Compare commits

..

1 Commits

Author SHA1 Message Date
Yaroslav Volovich
fc92e90650 feat(tui): add /title terminal title configuration 2026-03-12 22:45:44 +00:00
284 changed files with 4849 additions and 15577 deletions

View File

@@ -56,7 +56,3 @@ common --jobs=30
common:remote --extra_execution_platforms=//:rbe
common:remote --remote_executor=grpcs://remote.buildbuddy.io
common:remote --jobs=800
# TODO(team): Evaluate if this actually helps, zbarsky is not sure, everything seems bottlenecked on `core` either way.
# Enable pipelined compilation since we are not bound by local CPU count.
#common:remote --@rules_rust//rust/settings:pipelined_compilation

View File

@@ -19,10 +19,6 @@ In the codex-rs folder where the rust code lives:
repo root to refresh `MODULE.bazel.lock`, and include that lockfile update in the same change.
- After dependency changes, run `just bazel-lock-check` from the repo root so lockfile drift is caught
locally before CI.
- Bazel does not automatically make source-tree files available to compile-time Rust file access. If
you add `include_str!`, `include_bytes!`, `sqlx::migrate!`, or similar build-time file or
directory reads, update the crate's `BUILD.bazel` (`compile_data`, `build_script_data`, or test
data) or Bazel may fail even when Cargo passes.
- Do not create small helper methods that are referenced only once.
- Avoid large modules:
- Prefer adding new modules instead of growing existing ones.

View File

@@ -1,7 +1,14 @@
module(name = "codex")
bazel_dep(name = "platforms", version = "1.0.0")
bazel_dep(name = "llvm", version = "0.6.7")
bazel_dep(name = "llvm", version = "0.6.1")
single_version_override(
module_name = "llvm",
patch_strip = 1,
patches = [
"//patches:toolchains_llvm_bootstrapped_resource_dir.patch",
],
)
register_toolchains("@llvm//toolchain:all")
@@ -32,7 +39,7 @@ use_repo(osx, "macos_sdk")
bazel_dep(name = "apple_support", version = "2.1.0")
bazel_dep(name = "rules_cc", version = "0.2.16")
bazel_dep(name = "rules_platform", version = "0.1.0")
bazel_dep(name = "rules_rs", version = "0.0.43")
bazel_dep(name = "rules_rs", version = "0.0.40")
rules_rust = use_extension("@rules_rs//rs/experimental:rules_rust.bzl", "rules_rust")
use_repo(rules_rust, "rules_rust")
@@ -84,6 +91,7 @@ crate.annotation(
inject_repo(crate, "zstd")
bazel_dep(name = "bzip2", version = "1.0.8.bcr.3")
bazel_dep(name = "libcap", version = "2.27.bcr.1")
crate.annotation(
crate = "bzip2-sys",
@@ -141,13 +149,13 @@ crate.annotation(
"@macos_sdk//sysroot",
],
build_script_env = {
"BINDGEN_EXTRA_CLANG_ARGS": "-Xclang -internal-isystem -Xclang $(location @llvm//:builtin_resource_dir)/include",
"BINDGEN_EXTRA_CLANG_ARGS": "-isystem $(location @llvm//:builtin_headers)",
"COREAUDIO_SDK_PATH": "$(location @macos_sdk//sysroot)",
"LIBCLANG_PATH": "$(location @llvm-project//clang:libclang_interface_output)",
},
build_script_tools = [
"@llvm-project//clang:libclang_interface_output",
"@llvm//:builtin_resource_dir",
"@llvm//:builtin_headers",
],
crate = "coreaudio-sys",
gen_build_script = "on",
@@ -176,8 +184,6 @@ inject_repo(crate, "alsa_lib")
use_repo(crate, "crates")
bazel_dep(name = "libcap", version = "2.27.bcr.1")
rbe_platform_repository = use_repo_rule("//:rbe.bzl", "rbe_platform_repository")
rbe_platform_repository(

50
MODULE.bazel.lock generated
View File

@@ -24,6 +24,10 @@
"https://bcr.bazel.build/modules/apple_support/1.24.2/MODULE.bazel": "0e62471818affb9f0b26f128831d5c40b074d32e6dda5a0d3852847215a41ca4",
"https://bcr.bazel.build/modules/apple_support/2.1.0/MODULE.bazel": "b15c125dabed01b6803c129cd384de4997759f02f8ec90dc5136bcf6dfc5086a",
"https://bcr.bazel.build/modules/apple_support/2.1.0/source.json": "78064cfefe18dee4faaf51893661e0d403784f3efe88671d727cdcdc67ed8fb3",
"https://bcr.bazel.build/modules/aspect_bazel_lib/2.14.0/MODULE.bazel": "2b31ffcc9bdc8295b2167e07a757dbbc9ac8906e7028e5170a3708cecaac119f",
"https://bcr.bazel.build/modules/aspect_bazel_lib/2.19.3/MODULE.bazel": "253d739ba126f62a5767d832765b12b59e9f8d2bc88cc1572f4a73e46eb298ca",
"https://bcr.bazel.build/modules/aspect_bazel_lib/2.19.3/source.json": "ffab9254c65ba945f8369297ad97ca0dec213d3adc6e07877e23a48624a8b456",
"https://bcr.bazel.build/modules/aspect_bazel_lib/2.8.1/MODULE.bazel": "812d2dd42f65dca362152101fbec418029cc8fd34cbad1a2fde905383d705838",
"https://bcr.bazel.build/modules/aspect_tools_telemetry/0.3.2/MODULE.bazel": "598e7fe3b54f5fa64fdbeead1027653963a359cc23561d43680006f3b463d5a4",
"https://bcr.bazel.build/modules/aspect_tools_telemetry/0.3.2/source.json": "c6f5c39e6f32eb395f8fdaea63031a233bbe96d49a3bfb9f75f6fce9b74bec6c",
"https://bcr.bazel.build/modules/bazel_features/1.1.1/MODULE.bazel": "27b8c79ef57efe08efccbd9dd6ef70d61b4798320b8d3c134fd571f78963dbcd",
@@ -49,8 +53,8 @@
"https://bcr.bazel.build/modules/bazel_features/1.9.0/MODULE.bazel": "885151d58d90d8d9c811eb75e3288c11f850e1d6b481a8c9f766adee4712358b",
"https://bcr.bazel.build/modules/bazel_features/1.9.1/MODULE.bazel": "8f679097876a9b609ad1f60249c49d68bfab783dd9be012faf9d82547b14815a",
"https://bcr.bazel.build/modules/bazel_lib/3.0.0/MODULE.bazel": "22b70b80ac89ad3f3772526cd9feee2fa412c2b01933fea7ed13238a448d370d",
"https://bcr.bazel.build/modules/bazel_lib/3.2.2/MODULE.bazel": "e2c890c8a515d6bca9c66d47718aa9e44b458fde64ec7204b8030bf2d349058c",
"https://bcr.bazel.build/modules/bazel_lib/3.2.2/source.json": "9e84e115c20e14652c5c21401ae85ff4daa8702e265b5c0b3bf89353f17aa212",
"https://bcr.bazel.build/modules/bazel_lib/3.2.0/MODULE.bazel": "39b50d94b9be6bda507862254e20c263f9b950e3160112348d10a938be9ce2c2",
"https://bcr.bazel.build/modules/bazel_lib/3.2.0/source.json": "a6f45a903134bebbf33a6166dd42b4c7ab45169de094b37a85f348ca41170a84",
"https://bcr.bazel.build/modules/bazel_skylib/1.0.3/MODULE.bazel": "bcb0fd896384802d1ad283b4e4eb4d718eebd8cb820b0a2c3a347fb971afd9d8",
"https://bcr.bazel.build/modules/bazel_skylib/1.1.1/MODULE.bazel": "1add3e7d93ff2e6998f9e118022c84d163917d912f5afafb3058e3d2f1545b5e",
"https://bcr.bazel.build/modules/bazel_skylib/1.2.0/MODULE.bazel": "44fe84260e454ed94ad326352a698422dbe372b21a1ac9f3eab76eb531223686",
@@ -69,8 +73,8 @@
"https://bcr.bazel.build/modules/buildozer/8.2.1/source.json": "7c33f6a26ee0216f85544b4bca5e9044579e0219b6898dd653f5fb449cf2e484",
"https://bcr.bazel.build/modules/bzip2/1.0.8.bcr.3/MODULE.bazel": "29ecf4babfd3c762be00d7573c288c083672ab60e79c833ff7f49ee662e54471",
"https://bcr.bazel.build/modules/bzip2/1.0.8.bcr.3/source.json": "8be4a3ef2599693f759e5c0990a4cc5a246ac08db4c900a38f852ba25b5c39be",
"https://bcr.bazel.build/modules/gawk/5.3.2.bcr.3/MODULE.bazel": "f1b7bb2dd53e8f2ef984b39485ec8a44e9076dda5c4b8efd2fb4c6a6e856a31d",
"https://bcr.bazel.build/modules/gawk/5.3.2.bcr.3/source.json": "ebe931bfe362e4b41e59ee00a528db6074157ff2ced92eb9e970acab2e1089c9",
"https://bcr.bazel.build/modules/gawk/5.3.2.bcr.1/MODULE.bazel": "cdf8cbe5ee750db04b78878c9633cc76e80dcf4416cbe982ac3a9222f80713c8",
"https://bcr.bazel.build/modules/gawk/5.3.2.bcr.1/source.json": "fa7b512dfcb5eafd90ce3959cf42a2a6fe96144ebbb4b3b3928054895f2afac2",
"https://bcr.bazel.build/modules/google_benchmark/1.8.2/MODULE.bazel": "a70cf1bba851000ba93b58ae2f6d76490a9feb74192e57ab8e8ff13c34ec50cb",
"https://bcr.bazel.build/modules/googletest/1.11.0/MODULE.bazel": "3a83f095183f66345ca86aa13c58b59f9f94a2f81999c093d4eeaa2d262d12f4",
"https://bcr.bazel.build/modules/googletest/1.14.0.bcr.1/MODULE.bazel": "22c31a561553727960057361aa33bf20fb2e98584bc4fec007906e27053f80c6",
@@ -78,18 +82,22 @@
"https://bcr.bazel.build/modules/googletest/1.15.2/MODULE.bazel": "6de1edc1d26cafb0ea1a6ab3f4d4192d91a312fd2d360b63adaa213cd00b2108",
"https://bcr.bazel.build/modules/googletest/1.17.0/MODULE.bazel": "dbec758171594a705933a29fcf69293d2468c49ec1f2ebca65c36f504d72df46",
"https://bcr.bazel.build/modules/googletest/1.17.0/source.json": "38e4454b25fc30f15439c0378e57909ab1fd0a443158aa35aec685da727cd713",
"https://bcr.bazel.build/modules/jq.bzl/0.1.0/MODULE.bazel": "2ce69b1af49952cd4121a9c3055faa679e748ce774c7f1fda9657f936cae902f",
"https://bcr.bazel.build/modules/jq.bzl/0.1.0/source.json": "746bf13cac0860f091df5e4911d0c593971cd8796b5ad4e809b2f8e133eee3d5",
"https://bcr.bazel.build/modules/jsoncpp/1.9.5/MODULE.bazel": "31271aedc59e815656f5736f282bb7509a97c7ecb43e927ac1a37966e0578075",
"https://bcr.bazel.build/modules/jsoncpp/1.9.6/MODULE.bazel": "2f8d20d3b7d54143213c4dfc3d98225c42de7d666011528dc8fe91591e2e17b0",
"https://bcr.bazel.build/modules/jsoncpp/1.9.6/source.json": "a04756d367a2126c3541682864ecec52f92cdee80a35735a3cb249ce015ca000",
"https://bcr.bazel.build/modules/libcap/2.27.bcr.1/MODULE.bazel": "7c034d7a4d92b2293294934377f5d1cbc88119710a11079fa8142120f6f08768",
"https://bcr.bazel.build/modules/libcap/2.27.bcr.1/source.json": "3b116cbdbd25a68ffb587b672205f6d353a4c19a35452e480d58fc89531e0a10",
"https://bcr.bazel.build/modules/libpfm/4.11.0/MODULE.bazel": "45061ff025b301940f1e30d2c16bea596c25b176c8b6b3087e92615adbd52902",
"https://bcr.bazel.build/modules/llvm/0.6.7/MODULE.bazel": "d37a2e10571864dc6a5bb53c29216d90b9400bbcadb422337f49107fd2eaf0d2",
"https://bcr.bazel.build/modules/llvm/0.6.7/source.json": "c40bcce08d2adbd658aae609976ce4ae4fdc44f3299fffa29c7fa9bf7e7d6d2b",
"https://bcr.bazel.build/modules/llvm/0.6.0/MODULE.bazel": "42c2182c49f13d2df83a4a4a95ab55d31efda47b2d67acf419bf6b31522b2a30",
"https://bcr.bazel.build/modules/llvm/0.6.1/MODULE.bazel": "29170ab19f4e2dc9b6bbf9b3d101738e84142f63ba29a13cc33e0d40f74c79b0",
"https://bcr.bazel.build/modules/llvm/0.6.1/source.json": "2d8cdd3a5f8e1d16132dbbe97250133101e4863c0376d23273d9afd7363cc331",
"https://bcr.bazel.build/modules/nlohmann_json/3.6.1/MODULE.bazel": "6f7b417dcc794d9add9e556673ad25cb3ba835224290f4f848f8e2db1e1fca74",
"https://bcr.bazel.build/modules/nlohmann_json/3.6.1/source.json": "f448c6e8963fdfa7eb831457df83ad63d3d6355018f6574fb017e8169deb43a9",
"https://bcr.bazel.build/modules/openssl/3.5.4.bcr.0/MODULE.bazel": "0f6b8f20b192b9ff0781406256150bcd46f19e66d807dcb0c540548439d6fc35",
"https://bcr.bazel.build/modules/openssl/3.5.4.bcr.0/source.json": "543ed7627cc18e6460b9c1ae4a1b6b1debc5a5e0aca878b00f7531c7186b73da",
"https://bcr.bazel.build/modules/package_metadata/0.0.2/MODULE.bazel": "fb8d25550742674d63d7b250063d4580ca530499f045d70748b1b142081ebb92",
"https://bcr.bazel.build/modules/package_metadata/0.0.5/MODULE.bazel": "ef4f9439e3270fdd6b9fd4dbc3d2f29d13888e44c529a1b243f7a31dfbc2e8e4",
"https://bcr.bazel.build/modules/package_metadata/0.0.5/source.json": "2326db2f6592578177751c3e1f74786b79382cd6008834c9d01ec865b9126a85",
"https://bcr.bazel.build/modules/platforms/0.0.10/MODULE.bazel": "8cb8efaf200bdeb2150d93e162c40f388529a25852b332cec879373771e48ed5",
@@ -147,6 +155,7 @@
"https://bcr.bazel.build/modules/rules_fuzzing/0.5.2/MODULE.bazel": "40c97d1144356f52905566c55811f13b299453a14ac7769dfba2ac38192337a8",
"https://bcr.bazel.build/modules/rules_java/4.0.0/MODULE.bazel": "5a78a7ae82cd1a33cef56dc578c7d2a46ed0dca12643ee45edbb8417899e6f74",
"https://bcr.bazel.build/modules/rules_java/5.3.5/MODULE.bazel": "a4ec4f2db570171e3e5eb753276ee4b389bae16b96207e9d3230895c99644b86",
"https://bcr.bazel.build/modules/rules_java/6.3.0/MODULE.bazel": "a97c7678c19f236a956ad260d59c86e10a463badb7eb2eda787490f4c969b963",
"https://bcr.bazel.build/modules/rules_java/6.5.2/MODULE.bazel": "1d440d262d0e08453fa0c4d8f699ba81609ed0e9a9a0f02cd10b3e7942e61e31",
"https://bcr.bazel.build/modules/rules_java/7.10.0/MODULE.bazel": "530c3beb3067e870561739f1144329a21c851ff771cd752a49e06e3dc9c2e71a",
"https://bcr.bazel.build/modules/rules_java/7.12.2/MODULE.bazel": "579c505165ee757a4280ef83cda0150eea193eed3bef50b1004ba88b99da6de6",
@@ -195,8 +204,8 @@
"https://bcr.bazel.build/modules/rules_python/1.6.0/MODULE.bazel": "7e04ad8f8d5bea40451cf80b1bd8262552aa73f841415d20db96b7241bd027d8",
"https://bcr.bazel.build/modules/rules_python/1.7.0/MODULE.bazel": "d01f995ecd137abf30238ad9ce97f8fc3ac57289c8b24bd0bf53324d937a14f8",
"https://bcr.bazel.build/modules/rules_python/1.7.0/source.json": "028a084b65dcf8f4dc4f82f8778dbe65df133f234b316828a82e060d81bdce32",
"https://bcr.bazel.build/modules/rules_rs/0.0.43/MODULE.bazel": "7adfc2a97d90218ebeb9882de9eb18d9c6b0b41d2884be6ab92c9daadb17c78d",
"https://bcr.bazel.build/modules/rules_rs/0.0.43/source.json": "c315361abf625411f506ab935e660f49f14dc64fa30c125ca0a177c34cd63a2a",
"https://bcr.bazel.build/modules/rules_rs/0.0.40/MODULE.bazel": "63238bcb69010753dbd37b5ed08cb79d3af2d88a40b0fda0b110f60f307e86d4",
"https://bcr.bazel.build/modules/rules_rs/0.0.40/source.json": "ae3b17d2f9e4fbcd3de543318e71f83d8522c8527f385bf2b2a7665ec504827e",
"https://bcr.bazel.build/modules/rules_shell/0.2.0/MODULE.bazel": "fda8a652ab3c7d8fee214de05e7a9916d8b28082234e8d2c0094505c5268ed3c",
"https://bcr.bazel.build/modules/rules_shell/0.3.0/MODULE.bazel": "de4402cd12f4cc8fda2354fce179fdb068c0b9ca1ec2d2b17b3e21b24c1a937b",
"https://bcr.bazel.build/modules/rules_shell/0.4.1/MODULE.bazel": "00e501db01bbf4e3e1dd1595959092c2fadf2087b2852d3f553b5370f5633592",
@@ -211,17 +220,21 @@
"https://bcr.bazel.build/modules/sed/4.9.bcr.3/source.json": "31c0cf4c135ed3fa58298cd7bcfd4301c54ea4cf59d7c4e2ea0a180ce68eb34f",
"https://bcr.bazel.build/modules/stardoc/0.5.1/MODULE.bazel": "1a05d92974d0c122f5ccf09291442580317cdd859f07a8655f1db9a60374f9f8",
"https://bcr.bazel.build/modules/stardoc/0.5.3/MODULE.bazel": "c7f6948dae6999bf0db32c1858ae345f112cacf98f174c7a8bb707e41b974f1c",
"https://bcr.bazel.build/modules/stardoc/0.6.2/MODULE.bazel": "7060193196395f5dd668eda046ccbeacebfd98efc77fed418dbe2b82ffaa39fd",
"https://bcr.bazel.build/modules/stardoc/0.7.0/MODULE.bazel": "05e3d6d30c099b6770e97da986c53bd31844d7f13d41412480ea265ac9e8079c",
"https://bcr.bazel.build/modules/stardoc/0.7.2/MODULE.bazel": "fc152419aa2ea0f51c29583fab1e8c99ddefd5b3778421845606ee628629e0e5",
"https://bcr.bazel.build/modules/stardoc/0.7.2/source.json": "58b029e5e901d6802967754adf0a9056747e8176f017cfe3607c0851f4d42216",
"https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.1/MODULE.bazel": "5e463fbfba7b1701d957555ed45097d7f984211330106ccd1352c6e0af0dcf91",
"https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.2/MODULE.bazel": "75aab2373a4bbe2a1260b9bf2a1ebbdbf872d3bd36f80bff058dccd82e89422f",
"https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.2/source.json": "5fba48bbe0ba48761f9e9f75f92876cafb5d07c0ce059cc7a8027416de94a05b",
"https://bcr.bazel.build/modules/tar.bzl/0.9.0/MODULE.bazel": "452a22d7f02b1c9d7a22ab25edf20f46f3e1101f0f67dc4bfbf9a474ddf02445",
"https://bcr.bazel.build/modules/tar.bzl/0.9.0/source.json": "c732760a374831a2cf5b08839e4be75017196b4d796a5aa55235272ee17cd839",
"https://bcr.bazel.build/modules/tar.bzl/0.2.1/MODULE.bazel": "52d1c00a80a8cc67acbd01649e83d8dd6a9dc426a6c0b754a04fe8c219c76468",
"https://bcr.bazel.build/modules/tar.bzl/0.6.0/MODULE.bazel": "a3584b4edcfafcabd9b0ef9819808f05b372957bbdff41601429d5fd0aac2e7c",
"https://bcr.bazel.build/modules/tar.bzl/0.6.0/source.json": "4a620381df075a16cb3a7ed57bd1d05f7480222394c64a20fa51bdb636fda658",
"https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43",
"https://bcr.bazel.build/modules/with_cfg.bzl/0.12.0/MODULE.bazel": "b573395fe63aef4299ba095173e2f62ccfee5ad9bbf7acaa95dba73af9fc2b38",
"https://bcr.bazel.build/modules/with_cfg.bzl/0.12.0/source.json": "3f3fbaeafecaf629877ad152a2c9def21f8d330d91aa94c5dc75bbb98c10b8b8",
"https://bcr.bazel.build/modules/yq.bzl/0.1.1/MODULE.bazel": "9039681f9bcb8958ee2c87ffc74bdafba9f4369096a2b5634b88abc0eaefa072",
"https://bcr.bazel.build/modules/yq.bzl/0.1.1/source.json": "2d2bad780a9f2b9195a4a370314d2c17ae95eaa745cefc2e12fbc49759b15aa3",
"https://bcr.bazel.build/modules/zlib/1.2.11/MODULE.bazel": "07b389abc85fdbca459b69e2ec656ae5622873af3f845e1c9d80fe179f3effa0",
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.5/MODULE.bazel": "eec517b5bbe5492629466e11dae908d043364302283de25581e3eb944326c4ca",
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.8/MODULE.bazel": "772c674bb78a0342b8caf32ab5c25085c493ca4ff08398208dcbe4375fe9f776",
@@ -235,7 +248,7 @@
"@@aspect_tools_telemetry+//:extension.bzl%telemetry": {
"general": {
"bzlTransitiveDigest": "dnnhvKMf9MIXMulhbhHBblZdDAfAkiSVjApIXpUz9Y8=",
"usagesDigest": "aAcu2vTLy2HUXbcYIow0P6OHLLog/f5FFk8maEC/fpQ=",
"usagesDigest": "2ScE07TNSr/xo2GnYHCRI4JX4hiql6iZaNKUIUshUv4=",
"recordedInputs": [
"REPO_MAPPING:aspect_tools_telemetry+,bazel_lib bazel_lib+",
"REPO_MAPPING:aspect_tools_telemetry+,bazel_skylib bazel_skylib+"
@@ -248,17 +261,18 @@
"abseil-cpp": "20250814.1",
"alsa_lib": "1.2.9.bcr.4",
"apple_support": "2.1.0",
"aspect_bazel_lib": "2.19.3",
"aspect_tools_telemetry": "0.3.2",
"bazel_features": "1.42.0",
"bazel_lib": "3.2.2",
"bazel_features": "1.34.0",
"bazel_lib": "3.2.0",
"bazel_skylib": "1.8.2",
"buildozer": "8.2.1",
"bzip2": "1.0.8.bcr.3",
"gawk": "5.3.2.bcr.3",
"gawk": "5.3.2.bcr.1",
"googletest": "1.17.0",
"jq.bzl": "0.1.0",
"jsoncpp": "1.9.6",
"libcap": "2.27.bcr.1",
"llvm": "0.6.7",
"nlohmann_json": "3.6.1",
"openssl": "3.5.4.bcr.0",
"package_metadata": "0.0.5",
@@ -278,15 +292,15 @@
"rules_platform": "0.1.0",
"rules_proto": "7.1.0",
"rules_python": "1.7.0",
"rules_rs": "0.0.40",
"rules_shell": "0.6.1",
"rules_swift": "3.1.2",
"sed": "4.9.bcr.3",
"stardoc": "0.7.2",
"swift_argument_parser": "1.3.1.2",
"tar.bzl": "0.9.0",
"toolchains_llvm_bootstrapped": "0.5.2",
"tar.bzl": "0.6.0",
"toolchains_llvm_bootstrapped": "0.5.6",
"with_cfg.bzl": "0.12.0",
"yq.bzl": "0.1.1",
"zlib": "1.3.1.bcr.8",
"zstd": "1.5.7"
}

13
codex-rs/Cargo.lock generated
View File

@@ -1596,7 +1596,6 @@ version = "0.0.0"
dependencies = [
"anyhow",
"codex-backend-openapi-models",
"codex-client",
"codex-core",
"codex-protocol",
"pretty_assertions",
@@ -1684,22 +1683,15 @@ version = "0.0.0"
dependencies = [
"async-trait",
"bytes",
"codex-utils-cargo-bin",
"codex-utils-rustls-provider",
"eventsource-stream",
"futures",
"http 1.4.0",
"opentelemetry",
"opentelemetry_sdk",
"pretty_assertions",
"rand 0.9.2",
"reqwest",
"rustls",
"rustls-native-certs",
"rustls-pki-types",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"tokio",
"tracing",
@@ -1740,7 +1732,6 @@ dependencies = [
"base64 0.22.1",
"chrono",
"clap",
"codex-client",
"codex-cloud-tasks-client",
"codex-core",
"codex-login",
@@ -2142,7 +2133,6 @@ dependencies = [
"base64 0.22.1",
"chrono",
"codex-app-server-protocol",
"codex-client",
"codex-core",
"core_test_support",
"pretty_assertions",
@@ -2345,7 +2335,6 @@ version = "0.0.0"
dependencies = [
"anyhow",
"axum",
"codex-client",
"codex-keyring-store",
"codex-protocol",
"codex-utils-cargo-bin",
@@ -2495,13 +2484,11 @@ dependencies = [
"chrono",
"clap",
"codex-ansi-escape",
"codex-app-server-client",
"codex-app-server-protocol",
"codex-arg0",
"codex-backend-client",
"codex-chatgpt",
"codex-cli",
"codex-client",
"codex-cloud-requirements",
"codex-core",
"codex-feedback",

View File

@@ -240,8 +240,6 @@ rustls = { version = "0.23", default-features = false, features = [
"ring",
"std",
] }
rustls-native-certs = "0.8.3"
rustls-pki-types = "1.14.0"
schemars = "0.8.22"
seccompiler = "0.5.0"
semver = "1.0"

View File

@@ -36,12 +36,9 @@ use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::Result as JsonRpcResult;
use codex_arg0::Arg0DispatchPaths;
use codex_core::AuthManager;
use codex_core::ThreadManager;
use codex_core::config::Config;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::LoaderOverrides;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_feedback::CodexFeedback;
use codex_protocol::protocol::SessionSource;
use serde::de::DeserializeOwned;
@@ -126,16 +123,6 @@ impl Error for TypedRequestError {
}
}
#[derive(Clone)]
struct SharedCoreManagers {
// Temporary bootstrap escape hatch for embedders that still need direct
// core handles during the in-process app-server migration. Once TUI/exec
// stop depending on direct manager access, remove this wrapper and keep
// manager ownership entirely inside the app-server runtime.
auth_manager: Arc<AuthManager>,
thread_manager: Arc<ThreadManager>,
}
#[derive(Clone)]
pub struct InProcessClientStartArgs {
/// Resolved argv0 dispatch paths used by command execution internals.
@@ -169,30 +156,6 @@ pub struct InProcessClientStartArgs {
}
impl InProcessClientStartArgs {
fn shared_core_managers(&self) -> SharedCoreManagers {
let auth_manager = AuthManager::shared(
self.config.codex_home.clone(),
self.enable_codex_api_key_env,
self.config.cli_auth_credentials_store_mode,
);
let thread_manager = Arc::new(ThreadManager::new(
self.config.as_ref(),
auth_manager.clone(),
self.session_source.clone(),
CollaborationModesConfig {
default_mode_request_user_input: self
.config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
SharedCoreManagers {
auth_manager,
thread_manager,
}
}
/// Builds initialize params from caller-provided metadata.
pub fn initialize_params(&self) -> InitializeParams {
let capabilities = InitializeCapabilities {
@@ -214,7 +177,7 @@ impl InProcessClientStartArgs {
}
}
fn into_runtime_start_args(self, shared_core: &SharedCoreManagers) -> InProcessStartArgs {
fn into_runtime_start_args(self) -> InProcessStartArgs {
let initialize = self.initialize_params();
InProcessStartArgs {
arg0_paths: self.arg0_paths,
@@ -222,8 +185,6 @@ impl InProcessClientStartArgs {
cli_overrides: self.cli_overrides,
loader_overrides: self.loader_overrides,
cloud_requirements: self.cloud_requirements,
auth_manager: Some(shared_core.auth_manager.clone()),
thread_manager: Some(shared_core.thread_manager.clone()),
feedback: self.feedback,
config_warnings: self.config_warnings,
session_source: self.session_source,
@@ -277,8 +238,6 @@ pub struct InProcessAppServerClient {
command_tx: mpsc::Sender<ClientCommand>,
event_rx: mpsc::Receiver<InProcessServerEvent>,
worker_handle: tokio::task::JoinHandle<()>,
auth_manager: Arc<AuthManager>,
thread_manager: Arc<ThreadManager>,
}
impl InProcessAppServerClient {
@@ -289,9 +248,8 @@ impl InProcessAppServerClient {
/// with overload error instead of being silently dropped.
pub async fn start(args: InProcessClientStartArgs) -> IoResult<Self> {
let channel_capacity = args.channel_capacity.max(1);
let shared_core = args.shared_core_managers();
let mut handle =
codex_app_server::in_process::start(args.into_runtime_start_args(&shared_core)).await?;
codex_app_server::in_process::start(args.into_runtime_start_args()).await?;
let request_sender = handle.sender();
let (command_tx, mut command_rx) = mpsc::channel::<ClientCommand>(channel_capacity);
let (event_tx, event_rx) = mpsc::channel::<InProcessServerEvent>(channel_capacity);
@@ -442,21 +400,9 @@ impl InProcessAppServerClient {
command_tx,
event_rx,
worker_handle,
auth_manager: shared_core.auth_manager,
thread_manager: shared_core.thread_manager,
})
}
/// Temporary bootstrap escape hatch for embedders migrating toward RPC-only usage.
pub fn auth_manager(&self) -> Arc<AuthManager> {
self.auth_manager.clone()
}
/// Temporary bootstrap escape hatch for embedders migrating toward RPC-only usage.
pub fn thread_manager(&self) -> Arc<ThreadManager> {
self.thread_manager.clone()
}
/// Sends a typed client request and returns raw JSON-RPC result.
///
/// Callers that expect a concrete response type should usually prefer
@@ -609,8 +555,6 @@ impl InProcessAppServerClient {
command_tx,
event_rx,
worker_handle,
auth_manager: _,
thread_manager: _,
} = self;
let mut worker_handle = worker_handle;
// Drop the caller-facing receiver before asking the worker to shut
@@ -662,8 +606,6 @@ mod tests {
use codex_app_server_protocol::SessionSource as ApiSessionSource;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
use codex_core::AuthManager;
use codex_core::ThreadManager;
use codex_core::config::ConfigBuilder;
use pretty_assertions::assert_eq;
use tokio::time::Duration;
@@ -760,35 +702,6 @@ mod tests {
}
}
#[tokio::test]
async fn shared_thread_manager_tracks_threads_started_via_app_server() {
let client = start_test_client(SessionSource::Cli).await;
let response: ThreadStartResponse = client
.request_typed(ClientRequest::ThreadStart {
request_id: RequestId::Integer(3),
params: ThreadStartParams {
ephemeral: Some(true),
..ThreadStartParams::default()
},
})
.await
.expect("thread/start should succeed");
let created_thread_id = codex_protocol::ThreadId::from_string(&response.thread.id)
.expect("thread id should parse");
timeout(
Duration::from_secs(2),
client.thread_manager().get_thread(created_thread_id),
)
.await
.expect("timed out waiting for retained thread manager to observe started thread")
.expect("started thread should be visible through the shared thread manager");
let thread_ids = client.thread_manager().list_thread_ids().await;
assert!(thread_ids.contains(&created_thread_id));
client.shutdown().await.expect("shutdown should complete");
}
#[tokio::test]
async fn tiny_channel_capacity_still_supports_request_roundtrip() {
let client = start_test_client_with_capacity(SessionSource::Exec, 1).await;
@@ -833,22 +746,6 @@ mod tests {
let (command_tx, _command_rx) = mpsc::channel(1);
let (event_tx, event_rx) = mpsc::channel(1);
let worker_handle = tokio::spawn(async {});
let config = build_test_config().await;
let auth_manager = AuthManager::shared(
config.codex_home.clone(),
false,
config.cli_auth_credentials_store_mode,
);
let thread_manager = Arc::new(ThreadManager::new(
&config,
auth_manager.clone(),
SessionSource::Exec,
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
event_tx
.send(InProcessServerEvent::Lagged { skipped: 3 })
.await
@@ -859,8 +756,6 @@ mod tests {
command_tx,
event_rx,
worker_handle,
auth_manager,
thread_manager,
};
let event = timeout(Duration::from_secs(2), client.next_event())
@@ -903,30 +798,4 @@ mod tests {
skipped: 1
}));
}
#[tokio::test]
async fn accessors_expose_retained_shared_managers() {
let client = start_test_client(SessionSource::Cli).await;
assert!(
Arc::ptr_eq(&client.auth_manager(), &client.auth_manager()),
"auth_manager accessor should clone the retained shared manager"
);
assert!(
Arc::ptr_eq(&client.thread_manager(), &client.thread_manager()),
"thread_manager accessor should clone the retained shared manager"
);
client.shutdown().await.expect("shutdown should complete");
}
#[tokio::test]
async fn shutdown_completes_promptly_with_retained_shared_managers() {
let client = start_test_client(SessionSource::Cli).await;
timeout(Duration::from_secs(1), client.shutdown())
.await
.expect("shutdown should not wait for the 5s fallback timeout")
.expect("shutdown should complete");
}
}

View File

@@ -52,7 +52,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -81,9 +81,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]
@@ -1143,21 +1143,6 @@
},
"type": "object"
},
"PluginReadParams": {
"properties": {
"marketplacePath": {
"$ref": "#/definitions/AbsolutePathBuf"
},
"pluginName": {
"type": "string"
}
},
"required": [
"marketplacePath",
"pluginName"
],
"type": "object"
},
"PluginUninstallParams": {
"properties": {
"pluginId": {
@@ -3574,30 +3559,6 @@
"title": "Plugin/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"plugin/read"
],
"title": "Plugin/readRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/PluginReadParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Plugin/readRequest",
"type": "object"
},
{
"properties": {
"id": {

View File

@@ -643,30 +643,6 @@
"title": "Plugin/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
},
"method": {
"enum": [
"plugin/read"
],
"title": "Plugin/readRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/v2/PluginReadParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Plugin/readRequest",
"type": "object"
},
{
"properties": {
"id": {
@@ -2171,21 +2147,11 @@
"InitializeResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"platformFamily": {
"description": "Platform family for the running app-server target, for example `\"unix\"` or `\"windows\"`.",
"type": "string"
},
"platformOs": {
"description": "Operating system for the running app-server target, for example `\"macos\"`, `\"linux\"`, or `\"windows\"`.",
"type": "string"
},
"userAgent": {
"type": "string"
}
},
"required": [
"platformFamily",
"platformOs",
"userAgent"
],
"title": "InitializeResponse",
@@ -5061,7 +5027,7 @@
"type": "object"
},
"AppSummary": {
"description": "EXPERIMENTAL - app metadata summary for plugin responses.",
"description": "EXPERIMENTAL - app metadata summary for plugin-install responses.",
"properties": {
"description": {
"type": [
@@ -5226,7 +5192,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -5255,9 +5221,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]
@@ -8550,52 +8516,6 @@
],
"type": "string"
},
"PluginDetail": {
"properties": {
"apps": {
"items": {
"$ref": "#/definitions/v2/AppSummary"
},
"type": "array"
},
"description": {
"type": [
"string",
"null"
]
},
"marketplaceName": {
"type": "string"
},
"marketplacePath": {
"$ref": "#/definitions/v2/AbsolutePathBuf"
},
"mcpServers": {
"items": {
"type": "string"
},
"type": "array"
},
"skills": {
"items": {
"$ref": "#/definitions/v2/SkillSummary"
},
"type": "array"
},
"summary": {
"$ref": "#/definitions/v2/PluginSummary"
}
},
"required": [
"apps",
"marketplaceName",
"marketplacePath",
"mcpServers",
"skills",
"summary"
],
"type": "object"
},
"PluginInstallParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
@@ -8807,36 +8727,6 @@
],
"type": "object"
},
"PluginReadParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"marketplacePath": {
"$ref": "#/definitions/v2/AbsolutePathBuf"
},
"pluginName": {
"type": "string"
}
},
"required": [
"marketplacePath",
"pluginName"
],
"title": "PluginReadParams",
"type": "object"
},
"PluginReadResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"plugin": {
"$ref": "#/definitions/v2/PluginDetail"
}
},
"required": [
"plugin"
],
"title": "PluginReadResponse",
"type": "object"
},
"PluginSource": {
"oneOf": [
{
@@ -10581,41 +10471,6 @@
],
"type": "string"
},
"SkillSummary": {
"properties": {
"description": {
"type": "string"
},
"interface": {
"anyOf": [
{
"$ref": "#/definitions/v2/SkillInterface"
},
{
"type": "null"
}
]
},
"name": {
"type": "string"
},
"path": {
"type": "string"
},
"shortDescription": {
"type": [
"string",
"null"
]
}
},
"required": [
"description",
"name",
"path"
],
"type": "object"
},
"SkillToolDependency": {
"properties": {
"command": {

View File

@@ -473,7 +473,7 @@
"type": "object"
},
"AppSummary": {
"description": "EXPERIMENTAL - app metadata summary for plugin responses.",
"description": "EXPERIMENTAL - app metadata summary for plugin-install responses.",
"properties": {
"description": {
"type": [
@@ -638,7 +638,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -667,9 +667,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]
@@ -1162,30 +1162,6 @@
"title": "Plugin/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"plugin/read"
],
"title": "Plugin/readRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/PluginReadParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Plugin/readRequest",
"type": "object"
},
{
"properties": {
"id": {
@@ -5335,52 +5311,6 @@
],
"type": "string"
},
"PluginDetail": {
"properties": {
"apps": {
"items": {
"$ref": "#/definitions/AppSummary"
},
"type": "array"
},
"description": {
"type": [
"string",
"null"
]
},
"marketplaceName": {
"type": "string"
},
"marketplacePath": {
"$ref": "#/definitions/AbsolutePathBuf"
},
"mcpServers": {
"items": {
"type": "string"
},
"type": "array"
},
"skills": {
"items": {
"$ref": "#/definitions/SkillSummary"
},
"type": "array"
},
"summary": {
"$ref": "#/definitions/PluginSummary"
}
},
"required": [
"apps",
"marketplaceName",
"marketplacePath",
"mcpServers",
"skills",
"summary"
],
"type": "object"
},
"PluginInstallParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
@@ -5592,36 +5522,6 @@
],
"type": "object"
},
"PluginReadParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"marketplacePath": {
"$ref": "#/definitions/AbsolutePathBuf"
},
"pluginName": {
"type": "string"
}
},
"required": [
"marketplacePath",
"pluginName"
],
"title": "PluginReadParams",
"type": "object"
},
"PluginReadResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"plugin": {
"$ref": "#/definitions/PluginDetail"
}
},
"required": [
"plugin"
],
"title": "PluginReadResponse",
"type": "object"
},
"PluginSource": {
"oneOf": [
{
@@ -8298,41 +8198,6 @@
],
"type": "string"
},
"SkillSummary": {
"properties": {
"description": {
"type": "string"
},
"interface": {
"anyOf": [
{
"$ref": "#/definitions/SkillInterface"
},
{
"type": "null"
}
]
},
"name": {
"type": "string"
},
"path": {
"type": "string"
},
"shortDescription": {
"type": [
"string",
"null"
]
}
},
"required": [
"description",
"name",
"path"
],
"type": "object"
},
"SkillToolDependency": {
"properties": {
"command": {

View File

@@ -1,21 +1,11 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"platformFamily": {
"description": "Platform family for the running app-server target, for example `\"unix\"` or `\"windows\"`.",
"type": "string"
},
"platformOs": {
"description": "Operating system for the running app-server target, for example `\"macos\"`, `\"linux\"`, or `\"windows\"`.",
"type": "string"
},
"userAgent": {
"type": "string"
}
},
"required": [
"platformFamily",
"platformOs",
"userAgent"
],
"title": "InitializeResponse",

View File

@@ -143,7 +143,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -172,9 +172,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -15,7 +15,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -44,9 +44,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -2,7 +2,7 @@
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"AppSummary": {
"description": "EXPERIMENTAL - app metadata summary for plugin responses.",
"description": "EXPERIMENTAL - app metadata summary for plugin-install responses.",
"properties": {
"description": {
"type": [

View File

@@ -1,23 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"AbsolutePathBuf": {
"description": "A path that is guaranteed to be absolute and normalized (though it is not guaranteed to be canonicalized or exist on the filesystem).\n\nIMPORTANT: When deserializing an `AbsolutePathBuf`, a base path must be set using [AbsolutePathBufGuard::new]. If no base path is set, the deserialization will fail unless the path being deserialized is already absolute.",
"type": "string"
}
},
"properties": {
"marketplacePath": {
"$ref": "#/definitions/AbsolutePathBuf"
},
"pluginName": {
"type": "string"
}
},
"required": [
"marketplacePath",
"pluginName"
],
"title": "PluginReadParams",
"type": "object"
}

View File

@@ -1,354 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"AbsolutePathBuf": {
"description": "A path that is guaranteed to be absolute and normalized (though it is not guaranteed to be canonicalized or exist on the filesystem).\n\nIMPORTANT: When deserializing an `AbsolutePathBuf`, a base path must be set using [AbsolutePathBufGuard::new]. If no base path is set, the deserialization will fail unless the path being deserialized is already absolute.",
"type": "string"
},
"AppSummary": {
"description": "EXPERIMENTAL - app metadata summary for plugin responses.",
"properties": {
"description": {
"type": [
"string",
"null"
]
},
"id": {
"type": "string"
},
"installUrl": {
"type": [
"string",
"null"
]
},
"name": {
"type": "string"
}
},
"required": [
"id",
"name"
],
"type": "object"
},
"PluginAuthPolicy": {
"enum": [
"ON_INSTALL",
"ON_USE"
],
"type": "string"
},
"PluginDetail": {
"properties": {
"apps": {
"items": {
"$ref": "#/definitions/AppSummary"
},
"type": "array"
},
"description": {
"type": [
"string",
"null"
]
},
"marketplaceName": {
"type": "string"
},
"marketplacePath": {
"$ref": "#/definitions/AbsolutePathBuf"
},
"mcpServers": {
"items": {
"type": "string"
},
"type": "array"
},
"skills": {
"items": {
"$ref": "#/definitions/SkillSummary"
},
"type": "array"
},
"summary": {
"$ref": "#/definitions/PluginSummary"
}
},
"required": [
"apps",
"marketplaceName",
"marketplacePath",
"mcpServers",
"skills",
"summary"
],
"type": "object"
},
"PluginInstallPolicy": {
"enum": [
"NOT_AVAILABLE",
"AVAILABLE",
"INSTALLED_BY_DEFAULT"
],
"type": "string"
},
"PluginInterface": {
"properties": {
"brandColor": {
"type": [
"string",
"null"
]
},
"capabilities": {
"items": {
"type": "string"
},
"type": "array"
},
"category": {
"type": [
"string",
"null"
]
},
"composerIcon": {
"anyOf": [
{
"$ref": "#/definitions/AbsolutePathBuf"
},
{
"type": "null"
}
]
},
"defaultPrompt": {
"type": [
"string",
"null"
]
},
"developerName": {
"type": [
"string",
"null"
]
},
"displayName": {
"type": [
"string",
"null"
]
},
"logo": {
"anyOf": [
{
"$ref": "#/definitions/AbsolutePathBuf"
},
{
"type": "null"
}
]
},
"longDescription": {
"type": [
"string",
"null"
]
},
"privacyPolicyUrl": {
"type": [
"string",
"null"
]
},
"screenshots": {
"items": {
"$ref": "#/definitions/AbsolutePathBuf"
},
"type": "array"
},
"shortDescription": {
"type": [
"string",
"null"
]
},
"termsOfServiceUrl": {
"type": [
"string",
"null"
]
},
"websiteUrl": {
"type": [
"string",
"null"
]
}
},
"required": [
"capabilities",
"screenshots"
],
"type": "object"
},
"PluginSource": {
"oneOf": [
{
"properties": {
"path": {
"$ref": "#/definitions/AbsolutePathBuf"
},
"type": {
"enum": [
"local"
],
"title": "LocalPluginSourceType",
"type": "string"
}
},
"required": [
"path",
"type"
],
"title": "LocalPluginSource",
"type": "object"
}
]
},
"PluginSummary": {
"properties": {
"authPolicy": {
"$ref": "#/definitions/PluginAuthPolicy"
},
"enabled": {
"type": "boolean"
},
"id": {
"type": "string"
},
"installPolicy": {
"$ref": "#/definitions/PluginInstallPolicy"
},
"installed": {
"type": "boolean"
},
"interface": {
"anyOf": [
{
"$ref": "#/definitions/PluginInterface"
},
{
"type": "null"
}
]
},
"name": {
"type": "string"
},
"source": {
"$ref": "#/definitions/PluginSource"
}
},
"required": [
"authPolicy",
"enabled",
"id",
"installPolicy",
"installed",
"name",
"source"
],
"type": "object"
},
"SkillInterface": {
"properties": {
"brandColor": {
"type": [
"string",
"null"
]
},
"defaultPrompt": {
"type": [
"string",
"null"
]
},
"displayName": {
"type": [
"string",
"null"
]
},
"iconLarge": {
"type": [
"string",
"null"
]
},
"iconSmall": {
"type": [
"string",
"null"
]
},
"shortDescription": {
"type": [
"string",
"null"
]
}
},
"type": "object"
},
"SkillSummary": {
"properties": {
"description": {
"type": "string"
},
"interface": {
"anyOf": [
{
"$ref": "#/definitions/SkillInterface"
},
{
"type": "null"
}
]
},
"name": {
"type": "string"
},
"path": {
"type": "string"
},
"shortDescription": {
"type": [
"string",
"null"
]
}
},
"required": [
"description",
"name",
"path"
],
"type": "object"
}
},
"properties": {
"plugin": {
"$ref": "#/definitions/PluginDetail"
}
},
"required": [
"plugin"
],
"title": "PluginReadResponse",
"type": "object"
}

View File

@@ -15,7 +15,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -44,9 +44,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -19,7 +19,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -48,9 +48,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -15,7 +15,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -44,9 +44,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -19,7 +19,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -48,9 +48,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -15,7 +15,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -44,9 +44,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -19,7 +19,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -48,9 +48,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -19,7 +19,7 @@
{
"additionalProperties": false,
"properties": {
"granular": {
"reject": {
"properties": {
"mcp_elicitations": {
"type": "boolean"
@@ -48,9 +48,9 @@
}
},
"required": [
"granular"
"reject"
],
"title": "GranularAskForApproval",
"title": "RejectAskForApproval",
"type": "object"
}
]

View File

@@ -27,7 +27,6 @@ import type { McpServerOauthLoginParams } from "./v2/McpServerOauthLoginParams";
import type { ModelListParams } from "./v2/ModelListParams";
import type { PluginInstallParams } from "./v2/PluginInstallParams";
import type { PluginListParams } from "./v2/PluginListParams";
import type { PluginReadParams } from "./v2/PluginReadParams";
import type { PluginUninstallParams } from "./v2/PluginUninstallParams";
import type { ReviewStartParams } from "./v2/ReviewStartParams";
import type { SkillsConfigWriteParams } from "./v2/SkillsConfigWriteParams";
@@ -55,4 +54,4 @@ import type { WindowsSandboxSetupStartParams } from "./v2/WindowsSandboxSetupSta
/**
* Request from the client to the server.
*/
export type ClientRequest ={ "method": "initialize", id: RequestId, params: InitializeParams, } | { "method": "thread/start", id: RequestId, params: ThreadStartParams, } | { "method": "thread/resume", id: RequestId, params: ThreadResumeParams, } | { "method": "thread/fork", id: RequestId, params: ThreadForkParams, } | { "method": "thread/archive", id: RequestId, params: ThreadArchiveParams, } | { "method": "thread/unsubscribe", id: RequestId, params: ThreadUnsubscribeParams, } | { "method": "thread/name/set", id: RequestId, params: ThreadSetNameParams, } | { "method": "thread/metadata/update", id: RequestId, params: ThreadMetadataUpdateParams, } | { "method": "thread/unarchive", id: RequestId, params: ThreadUnarchiveParams, } | { "method": "thread/compact/start", id: RequestId, params: ThreadCompactStartParams, } | { "method": "thread/rollback", id: RequestId, params: ThreadRollbackParams, } | { "method": "thread/list", id: RequestId, params: ThreadListParams, } | { "method": "thread/loaded/list", id: RequestId, params: ThreadLoadedListParams, } | { "method": "thread/read", id: RequestId, params: ThreadReadParams, } | { "method": "skills/list", id: RequestId, params: SkillsListParams, } | { "method": "plugin/list", id: RequestId, params: PluginListParams, } | { "method": "plugin/read", id: RequestId, params: PluginReadParams, } | { "method": "skills/remote/list", id: RequestId, params: SkillsRemoteReadParams, } | { "method": "skills/remote/export", id: RequestId, params: SkillsRemoteWriteParams, } | { "method": "app/list", id: RequestId, params: AppsListParams, } | { "method": "skills/config/write", id: RequestId, params: SkillsConfigWriteParams, } | { "method": "plugin/install", id: RequestId, params: PluginInstallParams, } | { "method": "plugin/uninstall", id: RequestId, params: PluginUninstallParams, } | { "method": "turn/start", id: RequestId, params: TurnStartParams, } | { "method": "turn/steer", id: RequestId, params: TurnSteerParams, } | { "method": "turn/interrupt", id: RequestId, params: TurnInterruptParams, } | { "method": "review/start", id: RequestId, params: ReviewStartParams, } | { "method": "model/list", id: RequestId, params: ModelListParams, } | { "method": "experimentalFeature/list", id: RequestId, params: ExperimentalFeatureListParams, } | { "method": "mcpServer/oauth/login", id: RequestId, params: McpServerOauthLoginParams, } | { "method": "config/mcpServer/reload", id: RequestId, params: undefined, } | { "method": "mcpServerStatus/list", id: RequestId, params: ListMcpServerStatusParams, } | { "method": "windowsSandbox/setupStart", id: RequestId, params: WindowsSandboxSetupStartParams, } | { "method": "account/login/start", id: RequestId, params: LoginAccountParams, } | { "method": "account/login/cancel", id: RequestId, params: CancelLoginAccountParams, } | { "method": "account/logout", id: RequestId, params: undefined, } | { "method": "account/rateLimits/read", id: RequestId, params: undefined, } | { "method": "feedback/upload", id: RequestId, params: FeedbackUploadParams, } | { "method": "command/exec", id: RequestId, params: CommandExecParams, } | { "method": "command/exec/write", id: RequestId, params: CommandExecWriteParams, } | { "method": "command/exec/terminate", id: RequestId, params: CommandExecTerminateParams, } | { "method": "command/exec/resize", id: RequestId, params: CommandExecResizeParams, } | { "method": "config/read", id: RequestId, params: ConfigReadParams, } | { "method": "externalAgentConfig/detect", id: RequestId, params: ExternalAgentConfigDetectParams, } | { "method": "externalAgentConfig/import", id: RequestId, params: ExternalAgentConfigImportParams, } | { "method": "config/value/write", id: RequestId, params: ConfigValueWriteParams, } | { "method": "config/batchWrite", id: RequestId, params: ConfigBatchWriteParams, } | { "method": "configRequirements/read", id: RequestId, params: undefined, } | { "method": "account/read", id: RequestId, params: GetAccountParams, } | { "method": "getConversationSummary", id: RequestId, params: GetConversationSummaryParams, } | { "method": "gitDiffToRemote", id: RequestId, params: GitDiffToRemoteParams, } | { "method": "getAuthStatus", id: RequestId, params: GetAuthStatusParams, } | { "method": "fuzzyFileSearch", id: RequestId, params: FuzzyFileSearchParams, };
export type ClientRequest ={ "method": "initialize", id: RequestId, params: InitializeParams, } | { "method": "thread/start", id: RequestId, params: ThreadStartParams, } | { "method": "thread/resume", id: RequestId, params: ThreadResumeParams, } | { "method": "thread/fork", id: RequestId, params: ThreadForkParams, } | { "method": "thread/archive", id: RequestId, params: ThreadArchiveParams, } | { "method": "thread/unsubscribe", id: RequestId, params: ThreadUnsubscribeParams, } | { "method": "thread/name/set", id: RequestId, params: ThreadSetNameParams, } | { "method": "thread/metadata/update", id: RequestId, params: ThreadMetadataUpdateParams, } | { "method": "thread/unarchive", id: RequestId, params: ThreadUnarchiveParams, } | { "method": "thread/compact/start", id: RequestId, params: ThreadCompactStartParams, } | { "method": "thread/rollback", id: RequestId, params: ThreadRollbackParams, } | { "method": "thread/list", id: RequestId, params: ThreadListParams, } | { "method": "thread/loaded/list", id: RequestId, params: ThreadLoadedListParams, } | { "method": "thread/read", id: RequestId, params: ThreadReadParams, } | { "method": "skills/list", id: RequestId, params: SkillsListParams, } | { "method": "plugin/list", id: RequestId, params: PluginListParams, } | { "method": "skills/remote/list", id: RequestId, params: SkillsRemoteReadParams, } | { "method": "skills/remote/export", id: RequestId, params: SkillsRemoteWriteParams, } | { "method": "app/list", id: RequestId, params: AppsListParams, } | { "method": "skills/config/write", id: RequestId, params: SkillsConfigWriteParams, } | { "method": "plugin/install", id: RequestId, params: PluginInstallParams, } | { "method": "plugin/uninstall", id: RequestId, params: PluginUninstallParams, } | { "method": "turn/start", id: RequestId, params: TurnStartParams, } | { "method": "turn/steer", id: RequestId, params: TurnSteerParams, } | { "method": "turn/interrupt", id: RequestId, params: TurnInterruptParams, } | { "method": "review/start", id: RequestId, params: ReviewStartParams, } | { "method": "model/list", id: RequestId, params: ModelListParams, } | { "method": "experimentalFeature/list", id: RequestId, params: ExperimentalFeatureListParams, } | { "method": "mcpServer/oauth/login", id: RequestId, params: McpServerOauthLoginParams, } | { "method": "config/mcpServer/reload", id: RequestId, params: undefined, } | { "method": "mcpServerStatus/list", id: RequestId, params: ListMcpServerStatusParams, } | { "method": "windowsSandbox/setupStart", id: RequestId, params: WindowsSandboxSetupStartParams, } | { "method": "account/login/start", id: RequestId, params: LoginAccountParams, } | { "method": "account/login/cancel", id: RequestId, params: CancelLoginAccountParams, } | { "method": "account/logout", id: RequestId, params: undefined, } | { "method": "account/rateLimits/read", id: RequestId, params: undefined, } | { "method": "feedback/upload", id: RequestId, params: FeedbackUploadParams, } | { "method": "command/exec", id: RequestId, params: CommandExecParams, } | { "method": "command/exec/write", id: RequestId, params: CommandExecWriteParams, } | { "method": "command/exec/terminate", id: RequestId, params: CommandExecTerminateParams, } | { "method": "command/exec/resize", id: RequestId, params: CommandExecResizeParams, } | { "method": "config/read", id: RequestId, params: ConfigReadParams, } | { "method": "externalAgentConfig/detect", id: RequestId, params: ExternalAgentConfigDetectParams, } | { "method": "externalAgentConfig/import", id: RequestId, params: ExternalAgentConfigImportParams, } | { "method": "config/value/write", id: RequestId, params: ConfigValueWriteParams, } | { "method": "config/batchWrite", id: RequestId, params: ConfigBatchWriteParams, } | { "method": "configRequirements/read", id: RequestId, params: undefined, } | { "method": "account/read", id: RequestId, params: GetAccountParams, } | { "method": "getConversationSummary", id: RequestId, params: GetConversationSummaryParams, } | { "method": "gitDiffToRemote", id: RequestId, params: GitDiffToRemoteParams, } | { "method": "getAuthStatus", id: RequestId, params: GetAuthStatusParams, } | { "method": "fuzzyFileSearch", id: RequestId, params: FuzzyFileSearchParams, };

View File

@@ -2,14 +2,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type InitializeResponse = { userAgent: string,
/**
* Platform family for the running app-server target, for example
* `"unix"` or `"windows"`.
*/
platformFamily: string,
/**
* Operating system for the running app-server target, for example
* `"macos"`, `"linux"`, or `"windows"`.
*/
platformOs: string, };
export type InitializeResponse = { userAgent: string, };

View File

@@ -3,6 +3,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* EXPERIMENTAL - app metadata summary for plugin responses.
* EXPERIMENTAL - app metadata summary for plugin-install responses.
*/
export type AppSummary = { id: string, name: string, description: string | null, installUrl: string | null, };

View File

@@ -2,4 +2,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AskForApproval = "untrusted" | "on-failure" | "on-request" | { "granular": { sandbox_approval: boolean, rules: boolean, skill_approval: boolean, request_permissions: boolean, mcp_elicitations: boolean, } } | "never";
export type AskForApproval = "untrusted" | "on-failure" | "on-request" | { "reject": { sandbox_approval: boolean, rules: boolean, skill_approval: boolean, request_permissions: boolean, mcp_elicitations: boolean, } } | "never";

View File

@@ -1,9 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
import type { AppSummary } from "./AppSummary";
import type { PluginSummary } from "./PluginSummary";
import type { SkillSummary } from "./SkillSummary";
export type PluginDetail = { marketplaceName: string, marketplacePath: AbsolutePathBuf, summary: PluginSummary, description: string | null, skills: Array<SkillSummary>, apps: Array<AppSummary>, mcpServers: Array<string>, };

View File

@@ -1,6 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
export type PluginReadParams = { marketplacePath: AbsolutePathBuf, pluginName: string, };

View File

@@ -1,6 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginDetail } from "./PluginDetail";
export type PluginReadResponse = { plugin: PluginDetail, };

View File

@@ -1,6 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SkillInterface } from "./SkillInterface";
export type SkillSummary = { name: string, description: string, shortDescription: string | null, interface: SkillInterface | null, path: string, };

View File

@@ -176,7 +176,6 @@ export type { PermissionsRequestApprovalParams } from "./PermissionsRequestAppro
export type { PermissionsRequestApprovalResponse } from "./PermissionsRequestApprovalResponse";
export type { PlanDeltaNotification } from "./PlanDeltaNotification";
export type { PluginAuthPolicy } from "./PluginAuthPolicy";
export type { PluginDetail } from "./PluginDetail";
export type { PluginInstallParams } from "./PluginInstallParams";
export type { PluginInstallPolicy } from "./PluginInstallPolicy";
export type { PluginInstallResponse } from "./PluginInstallResponse";
@@ -184,8 +183,6 @@ export type { PluginInterface } from "./PluginInterface";
export type { PluginListParams } from "./PluginListParams";
export type { PluginListResponse } from "./PluginListResponse";
export type { PluginMarketplaceEntry } from "./PluginMarketplaceEntry";
export type { PluginReadParams } from "./PluginReadParams";
export type { PluginReadResponse } from "./PluginReadResponse";
export type { PluginSource } from "./PluginSource";
export type { PluginSummary } from "./PluginSummary";
export type { PluginUninstallParams } from "./PluginUninstallParams";
@@ -216,7 +213,6 @@ export type { SkillErrorInfo } from "./SkillErrorInfo";
export type { SkillInterface } from "./SkillInterface";
export type { SkillMetadata } from "./SkillMetadata";
export type { SkillScope } from "./SkillScope";
export type { SkillSummary } from "./SkillSummary";
export type { SkillToolDependency } from "./SkillToolDependency";
export type { SkillsChangedNotification } from "./SkillsChangedNotification";
export type { SkillsConfigWriteParams } from "./SkillsConfigWriteParams";

View File

@@ -5,7 +5,6 @@ use codex_protocol::protocol::W3cTraceContext;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use std::fmt;
use ts_rs::TS;
pub const JSONRPC_VERSION: &str = "2.0";
@@ -20,15 +19,6 @@ pub enum RequestId {
Integer(i64),
}
impl fmt::Display for RequestId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::String(value) => f.write_str(value),
Self::Integer(value) => write!(f, "{value}"),
}
}
}
pub type Result = serde_json::Value;
/// Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent.

View File

@@ -296,10 +296,6 @@ client_request_definitions! {
params: v2::PluginListParams,
response: v2::PluginListResponse,
},
PluginRead => "plugin/read" {
params: v2::PluginReadParams,
response: v2::PluginReadResponse,
},
SkillsRemoteList => "skills/remote/list" {
params: v2::SkillsRemoteReadParams,
response: v2::SkillsRemoteReadResponse,

View File

@@ -56,12 +56,6 @@ pub struct InitializeCapabilities {
#[serde(rename_all = "camelCase")]
pub struct InitializeResponse {
pub user_agent: String,
/// Platform family for the running app-server target, for example
/// `"unix"` or `"windows"`.
pub platform_family: String,
/// Operating system for the running app-server target, for example
/// `"macos"`, `"linux"`, or `"windows"`.
pub platform_os: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]

View File

@@ -50,7 +50,6 @@ use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
use codex_protocol::protocol::CodexErrorInfo as CoreCodexErrorInfo;
use codex_protocol::protocol::CreditsSnapshot as CoreCreditsSnapshot;
use codex_protocol::protocol::ExecCommandStatus as CoreExecCommandStatus;
use codex_protocol::protocol::GranularApprovalConfig as CoreGranularApprovalConfig;
use codex_protocol::protocol::HookEventName as CoreHookEventName;
use codex_protocol::protocol::HookExecutionMode as CoreHookExecutionMode;
use codex_protocol::protocol::HookHandlerType as CoreHookHandlerType;
@@ -66,6 +65,7 @@ use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
use codex_protocol::protocol::ReadOnlyAccess as CoreReadOnlyAccess;
use codex_protocol::protocol::RealtimeAudioFrame as CoreRealtimeAudioFrame;
use codex_protocol::protocol::RejectConfig as CoreRejectConfig;
use codex_protocol::protocol::ReviewDecision as CoreReviewDecision;
use codex_protocol::protocol::SessionSource as CoreSessionSource;
use codex_protocol::protocol::SkillDependencies as CoreSkillDependencies;
@@ -201,8 +201,8 @@ pub enum AskForApproval {
UnlessTrusted,
OnFailure,
OnRequest,
#[experimental("askForApproval.granular")]
Granular {
#[experimental("askForApproval.reject")]
Reject {
sandbox_approval: bool,
rules: bool,
#[serde(default)]
@@ -220,13 +220,13 @@ impl AskForApproval {
AskForApproval::UnlessTrusted => CoreAskForApproval::UnlessTrusted,
AskForApproval::OnFailure => CoreAskForApproval::OnFailure,
AskForApproval::OnRequest => CoreAskForApproval::OnRequest,
AskForApproval::Granular {
AskForApproval::Reject {
sandbox_approval,
rules,
skill_approval,
request_permissions,
mcp_elicitations,
} => CoreAskForApproval::Granular(CoreGranularApprovalConfig {
} => CoreAskForApproval::Reject(CoreRejectConfig {
sandbox_approval,
rules,
skill_approval,
@@ -244,12 +244,12 @@ impl From<CoreAskForApproval> for AskForApproval {
CoreAskForApproval::UnlessTrusted => AskForApproval::UnlessTrusted,
CoreAskForApproval::OnFailure => AskForApproval::OnFailure,
CoreAskForApproval::OnRequest => AskForApproval::OnRequest,
CoreAskForApproval::Granular(granular_config) => AskForApproval::Granular {
sandbox_approval: granular_config.sandbox_approval,
rules: granular_config.rules,
skill_approval: granular_config.skill_approval,
request_permissions: granular_config.request_permissions,
mcp_elicitations: granular_config.mcp_elicitations,
CoreAskForApproval::Reject(reject_config) => AskForApproval::Reject {
sandbox_approval: reject_config.sandbox_approval,
rules: reject_config.rules,
skill_approval: reject_config.skill_approval,
request_permissions: reject_config.request_permissions,
mcp_elicitations: reject_config.mcp_elicitations,
},
CoreAskForApproval::Never => AskForApproval::Never,
}
@@ -1979,7 +1979,7 @@ pub struct AppInfo {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
/// EXPERIMENTAL - app metadata summary for plugin responses.
/// EXPERIMENTAL - app metadata summary for plugin-install responses.
pub struct AppSummary {
pub id: String,
pub name: String,
@@ -2881,21 +2881,6 @@ pub struct PluginListResponse {
pub remote_sync_error: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct PluginReadParams {
pub marketplace_path: AbsolutePathBuf,
pub plugin_name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct PluginReadResponse {
pub plugin: PluginDetail,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
@@ -3107,30 +3092,6 @@ pub struct PluginSummary {
pub interface: Option<PluginInterface>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct PluginDetail {
pub marketplace_name: String,
pub marketplace_path: AbsolutePathBuf,
pub summary: PluginSummary,
pub description: Option<String>,
pub skills: Vec<SkillSummary>,
pub apps: Vec<AppSummary>,
pub mcp_servers: Vec<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct SkillSummary {
pub name: String,
pub description: String,
pub short_description: Option<String>,
pub interface: Option<SkillInterface>,
pub path: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
@@ -6231,8 +6192,8 @@ mod tests {
}
#[test]
fn ask_for_approval_granular_round_trips_request_permissions_flag() {
let v2_policy = AskForApproval::Granular {
fn ask_for_approval_reject_round_trips_request_permissions_flag() {
let v2_policy = AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6243,7 +6204,7 @@ mod tests {
let core_policy = v2_policy.to_core();
assert_eq!(
core_policy,
CoreAskForApproval::Granular(CoreGranularApprovalConfig {
CoreAskForApproval::Reject(CoreRejectConfig {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6257,19 +6218,19 @@ mod tests {
}
#[test]
fn ask_for_approval_granular_defaults_missing_optional_flags_to_false() {
fn ask_for_approval_reject_defaults_missing_optional_flags_to_false() {
let decoded = serde_json::from_value::<AskForApproval>(serde_json::json!({
"granular": {
"reject": {
"sandbox_approval": true,
"rules": false,
"mcp_elicitations": true,
}
}))
.expect("granular approval policy should deserialize");
.expect("legacy reject approval policy should deserialize");
assert_eq!(
decoded,
AskForApproval::Granular {
AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6280,9 +6241,9 @@ mod tests {
}
#[test]
fn ask_for_approval_granular_is_marked_experimental() {
fn ask_for_approval_reject_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(
&AskForApproval::Granular {
&AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6291,7 +6252,7 @@ mod tests {
},
);
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
assert_eq!(
crate::experimental_api::ExperimentalApi::experimental_reason(
&AskForApproval::OnRequest,
@@ -6301,11 +6262,11 @@ mod tests {
}
#[test]
fn profile_v2_granular_approval_policy_is_marked_experimental() {
fn profile_v2_reject_approval_policy_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&ProfileV2 {
model: None,
model_provider: None,
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6322,18 +6283,18 @@ mod tests {
additional: HashMap::new(),
});
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]
fn config_granular_approval_policy_is_marked_experimental() {
fn config_reject_approval_policy_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&Config {
model: None,
review_model: None,
model_context_window: None,
model_auto_compact_token_limit: None,
model_provider: None,
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: false,
rules: true,
skill_approval: false,
@@ -6360,11 +6321,11 @@ mod tests {
additional: HashMap::new(),
});
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]
fn config_nested_profile_granular_approval_policy_is_marked_experimental() {
fn config_nested_profile_reject_approval_policy_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&Config {
model: None,
review_model: None,
@@ -6384,7 +6345,7 @@ mod tests {
ProfileV2 {
model: None,
model_provider: None,
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6413,14 +6374,14 @@ mod tests {
additional: HashMap::new(),
});
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]
fn config_requirements_granular_allowed_approval_policy_is_marked_experimental() {
fn config_requirements_reject_allowed_approval_policy_is_marked_experimental() {
let reason =
crate::experimental_api::ExperimentalApi::experimental_reason(&ConfigRequirements {
allowed_approval_policies: Some(vec![AskForApproval::Granular {
allowed_approval_policies: Some(vec![AskForApproval::Reject {
sandbox_approval: true,
rules: true,
skill_approval: false,
@@ -6434,16 +6395,16 @@ mod tests {
network: None,
});
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]
fn client_request_thread_start_granular_approval_policy_is_marked_experimental() {
fn client_request_thread_start_reject_approval_policy_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(
&crate::ClientRequest::ThreadStart {
request_id: crate::RequestId::Integer(1),
params: ThreadStartParams {
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6455,17 +6416,17 @@ mod tests {
},
);
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]
fn client_request_thread_resume_granular_approval_policy_is_marked_experimental() {
fn client_request_thread_resume_reject_approval_policy_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(
&crate::ClientRequest::ThreadResume {
request_id: crate::RequestId::Integer(2),
params: ThreadResumeParams {
thread_id: "thr_123".to_string(),
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: false,
rules: true,
skill_approval: false,
@@ -6477,17 +6438,17 @@ mod tests {
},
);
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]
fn client_request_thread_fork_granular_approval_policy_is_marked_experimental() {
fn client_request_thread_fork_reject_approval_policy_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(
&crate::ClientRequest::ThreadFork {
request_id: crate::RequestId::Integer(3),
params: ThreadForkParams {
thread_id: "thr_456".to_string(),
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -6499,18 +6460,18 @@ mod tests {
},
);
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]
fn client_request_turn_start_granular_approval_policy_is_marked_experimental() {
fn client_request_turn_start_reject_approval_policy_is_marked_experimental() {
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(
&crate::ClientRequest::TurnStart {
request_id: crate::RequestId::Integer(4),
params: TurnStartParams {
thread_id: "thr_123".to_string(),
input: Vec::new(),
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: false,
rules: true,
skill_approval: false,
@@ -6522,7 +6483,7 @@ mod tests {
},
);
assert_eq!(reason, Some("askForApproval.granular"));
assert_eq!(reason, Some("askForApproval.reject"));
}
#[test]

View File

@@ -74,7 +74,7 @@ Use the thread APIs to create, list, or archive conversations. Drive a conversat
## Initialization
Clients must send a single `initialize` request per transport connection before invoking any other method on that connection, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services plus `platformFamily` and `platformOs` strings describing the app-server runtime target; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls on the same connection receive an `"Already initialized"` error.
Clients must send a single `initialize` request per transport connection before invoking any other method on that connection, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls on the same connection receive an `"Already initialized"` error.
`initialize.params.capabilities` also supports per-connection notification opt-out via `optOutNotificationMethods`, which is a list of exact method names to suppress for that connection. Matching is exact (no wildcards/prefixes). Unknown method names are accepted and ignored.
@@ -158,7 +158,6 @@ Example with notification opt-out:
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination). This response omits built-in developer instructions; clients should either pass `settings.developer_instructions: null` when setting a mode to use Codex's built-in instructions, or provide their own instructions explicitly.
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
- `plugin/list` — list discovered plugin marketplaces and plugin state, including effective marketplace install/auth policy metadata. `interface.category` uses the marketplace category when present; otherwise it falls back to the plugin manifest category. Pass `forceRemoteSync: true` to refresh curated plugin state before listing (**under development; do not call from production clients yet**).
- `plugin/read` — read one plugin by `marketplacePath` plus `pluginName`, returning marketplace info, a list-style `summary`, manifest descriptions/interface metadata, and bundled skills/apps/MCP server names (**under development; do not call from production clients yet**).
- `skills/changed` — notification emitted when watched local skill files change.
- `skills/remote/list` — list public remote skills (**under development; do not call from production clients yet**).
- `skills/remote/export` — download a remote skill by `hazelnutId` into `skills` under `codex_home` (**under development; do not call from production clients yet**).
@@ -929,7 +928,7 @@ Only the granted subset matters on the wire. Any permissions omitted from `resul
Within the same turn, granted permissions are sticky: later shell-like tool calls can automatically reuse the granted subset without reissuing a separate permission request.
If the session approval policy uses `Granular` with `request_permissions: false`, standalone `request_permissions` tool calls are auto-denied and no `item/permissions/requestApproval` prompt is sent. Inline `with_additional_permissions` command requests remain controlled by `sandbox_approval`, and any previously granted permissions remain sticky for later shell-like calls in the same turn.
If the session approval policy uses `Reject` with `request_permissions: true`, standalone `request_permissions` tool calls are auto-denied and no `item/permissions/requestApproval` prompt is sent. Inline `with_additional_permissions` command requests remain controlled by `sandbox_approval`, and any previously granted permissions remain sticky for later shell-like calls in the same turn.
### Dynamic tool calls (experimental)
@@ -1320,7 +1319,7 @@ Examples of descriptor strings:
- `mock/experimentalMethod` (method-level gate)
- `thread/start.mockExperimentalField` (field-level gate)
- `askForApproval.granular` (enum-variant gate, for `approvalPolicy: { "granular": ... }`)
- `askForApproval.reject` (enum-variant gate, for `approvalPolicy: { "reject": ... }`)
### For maintainers: Adding experimental fields and methods
@@ -1342,8 +1341,8 @@ Enum variants can be gated too:
```rust
#[derive(ExperimentalApi)]
enum AskForApproval {
#[experimental("askForApproval.granular")]
Granular { /* ... */ },
#[experimental("askForApproval.reject")]
Reject { /* ... */ },
}
```

View File

@@ -92,7 +92,7 @@ fn transport_name(transport: AppServerTransport) -> &'static str {
fn app_server_request_span_template(
method: &str,
transport: &'static str,
request_id: &impl std::fmt::Display,
request_id: &impl std::fmt::Debug,
connection_id: ConnectionId,
) -> Span {
info_span!(
@@ -102,8 +102,8 @@ fn app_server_request_span_template(
rpc.system = "jsonrpc",
rpc.method = method,
rpc.transport = transport,
rpc.request_id = %request_id,
app_server.connection_id = %connection_id,
rpc.request_id = ?request_id,
app_server.connection_id = ?connection_id,
app_server.api_version = "v2",
app_server.client_name = field::Empty,
app_server.client_version = field::Empty,
@@ -122,14 +122,14 @@ fn record_client_info(span: &Span, client_name: Option<&str>, client_version: Op
fn attach_parent_context(
span: &Span,
method: &str,
request_id: &impl std::fmt::Display,
request_id: &impl std::fmt::Debug,
parent_trace: Option<&W3cTraceContext>,
) {
if let Some(trace) = parent_trace {
if !set_parent_from_w3c_trace_context(span, trace) {
tracing::warn!(
rpc_method = method,
rpc_request_id = %request_id,
rpc_request_id = ?request_id,
"ignoring invalid inbound request trace carrier"
);
}

View File

@@ -123,7 +123,6 @@ use codex_protocol::protocol::ReviewOutputEvent;
use codex_protocol::protocol::TokenCountEvent;
use codex_protocol::protocol::TurnDiffEvent;
use codex_protocol::request_permissions::PermissionGrantScope as CorePermissionGrantScope;
use codex_protocol::request_permissions::RequestPermissionProfile as CoreRequestPermissionProfile;
use codex_protocol::request_permissions::RequestPermissionsResponse as CoreRequestPermissionsResponse;
use codex_protocol::request_user_input::RequestUserInputAnswer as CoreRequestUserInputAnswer;
use codex_protocol::request_user_input::RequestUserInputResponse as CoreRequestUserInputResponse;
@@ -700,7 +699,7 @@ pub(crate) async fn apply_bespoke_event_handling(
turn_id: request.turn_id.clone(),
item_id: request.call_id.clone(),
reason: request.reason,
permissions: CorePermissionProfile::from(request.permissions).into(),
permissions: request.permissions.into(),
};
let (pending_request_id, rx) = outgoing
.send_request(ServerRequestPayload::PermissionsRequestApproval(params))
@@ -2228,7 +2227,7 @@ fn mcp_server_elicitation_response_from_client_result(
async fn on_request_permissions_response(
call_id: String,
requested_permissions: CoreRequestPermissionProfile,
requested_permissions: CorePermissionProfile,
pending_request_id: RequestId,
receiver: oneshot::Receiver<ClientRequestResult>,
conversation: Arc<CodexThread>,
@@ -2256,7 +2255,7 @@ async fn on_request_permissions_response(
}
fn request_permissions_response_from_client_result(
requested_permissions: CoreRequestPermissionProfile,
requested_permissions: CorePermissionProfile,
response: std::result::Result<ClientRequestResult, oneshot::error::RecvError>,
) -> Option<CoreRequestPermissionsResponse> {
let value = match response {
@@ -2288,10 +2287,9 @@ fn request_permissions_response_from_client_result(
});
Some(CoreRequestPermissionsResponse {
permissions: intersect_permission_profiles(
requested_permissions.into(),
requested_permissions,
response.permissions.into(),
)
.into(),
),
scope: response.scope.to_core(),
})
}
@@ -2648,8 +2646,10 @@ mod tests {
use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::TurnPlanStepStatus;
use codex_protocol::mcp::CallToolResult;
use codex_protocol::models::FileSystemPermissions as CoreFileSystemPermissions;
use codex_protocol::models::NetworkPermissions as CoreNetworkPermissions;
use codex_protocol::models::MacOsAutomationPermission;
use codex_protocol::models::MacOsContactsPermission;
use codex_protocol::models::MacOsPreferencesPermission;
use codex_protocol::models::MacOsSeatbeltProfileExtensions;
use codex_protocol::plan_tool::PlanItemArg;
use codex_protocol::plan_tool::StepStatus;
use codex_protocol::protocol::CollabResumeBeginEvent;
@@ -2660,7 +2660,6 @@ mod tests {
use codex_protocol::protocol::RateLimitWindow;
use codex_protocol::protocol::TokenUsage;
use codex_protocol::protocol::TokenUsageInfo;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use rmcp::model::Content;
use serde_json::Value as JsonValue;
@@ -2722,7 +2721,7 @@ mod tests {
};
let response = request_permissions_response_from_client_result(
CoreRequestPermissionProfile::default(),
CorePermissionProfile::default(),
Ok(Err(error)),
);
@@ -2730,91 +2729,156 @@ mod tests {
}
#[test]
fn request_permissions_response_accepts_partial_network_and_file_system_grants() {
let input_path = if cfg!(target_os = "windows") {
r"C:\tmp\input"
} else {
"/tmp/input"
};
let output_path = if cfg!(target_os = "windows") {
r"C:\tmp\output"
} else {
"/tmp/output"
};
let ignored_path = if cfg!(target_os = "windows") {
r"C:\tmp\ignored"
} else {
"/tmp/ignored"
};
let absolute_path = |path: &str| {
AbsolutePathBuf::try_from(std::path::PathBuf::from(path)).expect("absolute path")
};
let requested_permissions = CoreRequestPermissionProfile {
network: Some(CoreNetworkPermissions {
enabled: Some(true),
}),
file_system: Some(CoreFileSystemPermissions {
read: Some(vec![absolute_path(input_path)]),
write: Some(vec![absolute_path(output_path)]),
fn request_permissions_response_accepts_partial_macos_grants() {
let requested_permissions = CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::ReadWrite,
macos_automation: MacOsAutomationPermission::BundleIds(vec![
"com.apple.Notes".to_string(),
"com.apple.Reminders".to_string(),
]),
macos_launch_services: true,
macos_accessibility: true,
macos_calendar: true,
macos_reminders: true,
macos_contacts: MacOsContactsPermission::ReadWrite,
}),
..Default::default()
};
let cases = vec![
(
serde_json::json!({}),
CoreRequestPermissionProfile::default(),
),
(serde_json::json!({}), CorePermissionProfile::default()),
(
serde_json::json!({
"network": {
"enabled": true,
},
"preferences": "read_only",
}),
CoreRequestPermissionProfile {
network: Some(CoreNetworkPermissions {
enabled: Some(true),
CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::ReadOnly,
macos_automation: MacOsAutomationPermission::None,
macos_launch_services: false,
macos_accessibility: false,
macos_calendar: false,
macos_reminders: false,
macos_contacts: MacOsContactsPermission::None,
}),
..CoreRequestPermissionProfile::default()
..Default::default()
},
),
(
serde_json::json!({
"fileSystem": {
"write": [output_path],
"automations": {
"bundle_ids": ["com.apple.Notes"],
},
}),
CoreRequestPermissionProfile {
file_system: Some(CoreFileSystemPermissions {
read: None,
write: Some(vec![absolute_path(output_path)]),
CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::None,
macos_automation: MacOsAutomationPermission::BundleIds(vec![
"com.apple.Notes".to_string(),
]),
macos_launch_services: false,
macos_accessibility: false,
macos_calendar: false,
macos_reminders: false,
macos_contacts: MacOsContactsPermission::None,
}),
..CoreRequestPermissionProfile::default()
..Default::default()
},
),
(
serde_json::json!({
"fileSystem": {
"read": [input_path],
"write": [output_path, ignored_path],
},
"macos": {
"calendar": true,
},
"launchServices": true,
}),
CoreRequestPermissionProfile {
file_system: Some(CoreFileSystemPermissions {
read: Some(vec![absolute_path(input_path)]),
write: Some(vec![absolute_path(output_path)]),
CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::None,
macos_automation: MacOsAutomationPermission::None,
macos_launch_services: true,
macos_accessibility: false,
macos_calendar: false,
macos_reminders: false,
macos_contacts: MacOsContactsPermission::None,
}),
..CoreRequestPermissionProfile::default()
..Default::default()
},
),
(
serde_json::json!({
"accessibility": true,
}),
CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::None,
macos_automation: MacOsAutomationPermission::None,
macos_launch_services: false,
macos_accessibility: true,
macos_calendar: false,
macos_reminders: false,
macos_contacts: MacOsContactsPermission::None,
}),
..Default::default()
},
),
(
serde_json::json!({
"calendar": true,
}),
CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::None,
macos_automation: MacOsAutomationPermission::None,
macos_launch_services: false,
macos_accessibility: false,
macos_calendar: true,
macos_reminders: false,
macos_contacts: MacOsContactsPermission::None,
}),
..Default::default()
},
),
(
serde_json::json!({
"reminders": true,
}),
CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::None,
macos_automation: MacOsAutomationPermission::None,
macos_launch_services: false,
macos_accessibility: false,
macos_calendar: false,
macos_reminders: true,
macos_contacts: MacOsContactsPermission::None,
}),
..Default::default()
},
),
(
serde_json::json!({
"contacts": "read_only",
}),
CorePermissionProfile {
macos: Some(MacOsSeatbeltProfileExtensions {
macos_preferences: MacOsPreferencesPermission::None,
macos_automation: MacOsAutomationPermission::None,
macos_launch_services: false,
macos_accessibility: false,
macos_calendar: false,
macos_reminders: false,
macos_contacts: MacOsContactsPermission::ReadOnly,
}),
..Default::default()
},
),
];
for (granted_permissions, expected_permissions) in cases {
for (granted_macos, expected_permissions) in cases {
let response = request_permissions_response_from_client_result(
requested_permissions.clone(),
Ok(Ok(serde_json::json!({
"permissions": granted_permissions,
"permissions": {
"macos": granted_macos,
},
}))),
)
.expect("response should be accepted");
@@ -2832,7 +2896,7 @@ mod tests {
#[test]
fn request_permissions_response_preserves_session_scope() {
let response = request_permissions_response_from_client_result(
CoreRequestPermissionProfile::default(),
CorePermissionProfile::default(),
Ok(Ok(serde_json::json!({
"scope": "session",
"permissions": {},
@@ -2843,7 +2907,7 @@ mod tests {
assert_eq!(
response,
CoreRequestPermissionsResponse {
permissions: CoreRequestPermissionProfile::default(),
permissions: CorePermissionProfile::default(),
scope: CorePermissionGrantScope::Session,
}
);

View File

@@ -24,6 +24,8 @@ use codex_app_server_protocol::Account;
use codex_app_server_protocol::AccountLoginCompletedNotification;
use codex_app_server_protocol::AccountUpdatedNotification;
use codex_app_server_protocol::AppInfo;
use codex_app_server_protocol::AppListUpdatedNotification;
use codex_app_server_protocol::AppSummary;
use codex_app_server_protocol::AppsListParams;
use codex_app_server_protocol::AppsListResponse;
use codex_app_server_protocol::AskForApproval;
@@ -81,15 +83,12 @@ use codex_app_server_protocol::MockExperimentalMethodParams;
use codex_app_server_protocol::MockExperimentalMethodResponse;
use codex_app_server_protocol::ModelListParams;
use codex_app_server_protocol::ModelListResponse;
use codex_app_server_protocol::PluginDetail;
use codex_app_server_protocol::PluginInstallParams;
use codex_app_server_protocol::PluginInstallResponse;
use codex_app_server_protocol::PluginInterface;
use codex_app_server_protocol::PluginListParams;
use codex_app_server_protocol::PluginListResponse;
use codex_app_server_protocol::PluginMarketplaceEntry;
use codex_app_server_protocol::PluginReadParams;
use codex_app_server_protocol::PluginReadResponse;
use codex_app_server_protocol::PluginSource;
use codex_app_server_protocol::PluginSummary;
use codex_app_server_protocol::PluginUninstallParams;
@@ -103,7 +102,6 @@ use codex_app_server_protocol::ReviewTarget as ApiReviewTarget;
use codex_app_server_protocol::SandboxMode;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestResolvedNotification;
use codex_app_server_protocol::SkillSummary;
use codex_app_server_protocol::SkillsConfigWriteParams;
use codex_app_server_protocol::SkillsConfigWriteResponse;
use codex_app_server_protocol::SkillsListParams;
@@ -201,9 +199,9 @@ use codex_core::config::NetworkProxyAuditMetadata;
use codex_core::config::edit::ConfigEdit;
use codex_core::config::edit::ConfigEditsBuilder;
use codex_core::config::types::McpServerTransportConfig;
use codex_core::config_loader::CloudRequirementsLoadError;
use codex_core::config_loader::CloudRequirementsLoadErrorCode;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::connectors::filter_disallowed_connectors;
use codex_core::connectors::merge_plugin_apps;
use codex_core::default_client::set_default_client_residency_requirement;
use codex_core::error::CodexErr;
use codex_core::error::Result as CodexResult;
@@ -224,11 +222,11 @@ use codex_core::mcp::collect_mcp_snapshot;
use codex_core::mcp::group_tools_by_server;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::parse_cursor;
use codex_core::plugins::AppConnectorId;
use codex_core::plugins::MarketplaceError;
use codex_core::plugins::MarketplacePluginSourceSummary;
use codex_core::plugins::PluginInstallError as CorePluginInstallError;
use codex_core::plugins::PluginInstallRequest;
use codex_core::plugins::PluginReadRequest;
use codex_core::plugins::PluginUninstallError as CorePluginUninstallError;
use codex_core::plugins::load_plugin_apps;
use codex_core::read_head_for_summary;
@@ -314,9 +312,6 @@ use uuid::Uuid;
#[cfg(test)]
use codex_app_server_protocol::ServerRequest;
mod apps_list_helpers;
mod plugin_app_helpers;
use crate::filters::compute_source_filters;
use crate::filters::source_kind_matches;
use crate::thread_state::ThreadListenerCommand;
@@ -725,10 +720,6 @@ impl CodexMessageProcessor {
self.plugin_list(to_connection_request_id(request_id), params)
.await;
}
ClientRequest::PluginRead { request_id, params } => {
self.plugin_read(to_connection_request_id(request_id), params)
.await;
}
ClientRequest::SkillsRemoteList { request_id, params } => {
self.skills_remote_list(to_connection_request_id(request_id), params)
.await;
@@ -1695,10 +1686,6 @@ impl CodexMessageProcessor {
.map(codex_core::config::StartedNetworkProxy::proxy),
sandbox_permissions: SandboxPermissions::UseDefault,
windows_sandbox_level,
windows_sandbox_private_desktop: self
.config
.permissions
.windows_sandbox_private_desktop,
justification: None,
arg0: None,
};
@@ -1965,7 +1952,11 @@ impl CodexMessageProcessor {
{
Ok(config) => config,
Err(err) => {
let error = config_load_error(&err);
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("error deriving config: {err}"),
data: None,
};
listener_task_context
.outgoing
.send_error(request_id, error)
@@ -1999,7 +1990,6 @@ impl CodexMessageProcessor {
})
.collect()
};
let core_dynamic_tool_count = core_dynamic_tools.len();
match listener_task_context
.thread_manager
@@ -2010,12 +2000,6 @@ impl CodexMessageProcessor {
service_name,
request_trace,
)
.instrument(tracing::info_span!(
"app_server.thread_start.create_thread",
otel.name = "app_server.thread_start.create_thread",
thread_start.dynamic_tool_count = core_dynamic_tool_count,
thread_start.persist_extended_history = persist_extended_history,
))
.await
{
Ok(new_conv) => {
@@ -2025,13 +2009,7 @@ impl CodexMessageProcessor {
session_configured,
..
} = new_conv;
let config_snapshot = thread
.config_snapshot()
.instrument(tracing::info_span!(
"app_server.thread_start.config_snapshot",
otel.name = "app_server.thread_start.config_snapshot",
))
.await;
let config_snapshot = thread.config_snapshot().await;
let mut thread = build_thread_from_snapshot(
thread_id,
&config_snapshot,
@@ -2047,11 +2025,6 @@ impl CodexMessageProcessor {
experimental_raw_events,
ApiVersion::V2,
)
.instrument(tracing::info_span!(
"app_server.thread_start.attach_listener",
otel.name = "app_server.thread_start.attach_listener",
thread_start.experimental_raw_events = experimental_raw_events,
))
.await,
thread_id,
request_id.connection_id,
@@ -2061,20 +2034,12 @@ impl CodexMessageProcessor {
listener_task_context
.thread_watch_manager
.upsert_thread_silently(thread.clone())
.instrument(tracing::info_span!(
"app_server.thread_start.upsert_thread",
otel.name = "app_server.thread_start.upsert_thread",
))
.await;
thread.status = resolve_thread_status(
listener_task_context
.thread_watch_manager
.loaded_status_for_thread(&thread.id)
.instrument(tracing::info_span!(
"app_server.thread_start.resolve_status",
otel.name = "app_server.thread_start.resolve_status",
))
.await,
false,
);
@@ -2093,20 +2058,12 @@ impl CodexMessageProcessor {
listener_task_context
.outgoing
.send_response(request_id, response)
.instrument(tracing::info_span!(
"app_server.thread_start.send_response",
otel.name = "app_server.thread_start.send_response",
))
.await;
let notif = ThreadStartedNotification { thread };
listener_task_context
.outgoing
.send_server_notification(ServerNotification::ThreadStarted(notif))
.instrument(tracing::info_span!(
"app_server.thread_start.notify_started",
otel.name = "app_server.thread_start.notify_started",
))
.await;
}
Err(err) => {
@@ -3402,7 +3359,11 @@ impl CodexMessageProcessor {
{
Ok(config) => config,
Err(err) => {
let error = config_load_error(&err);
let error = JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("error deriving config: {err}"),
data: None,
};
self.outgoing.send_error(request_id, error).await;
return;
}
@@ -3921,9 +3882,11 @@ impl CodexMessageProcessor {
{
Ok(config) => config,
Err(err) => {
self.outgoing
.send_error(request_id, config_load_error(&err))
.await;
self.send_invalid_request_error(
request_id,
format!("error deriving config: {err}"),
)
.await;
return;
}
};
@@ -5171,19 +5134,18 @@ impl CodexMessageProcessor {
if accessible_connectors.is_some() || all_connectors.is_some() {
let merged = connectors::with_app_enabled_state(
apps_list_helpers::merge_loaded_apps(
Self::merge_loaded_apps(
all_connectors.as_deref(),
accessible_connectors.as_deref(),
),
&config,
);
if apps_list_helpers::should_send_app_list_updated_notification(
if Self::should_send_app_list_updated_notification(
merged.as_slice(),
accessible_loaded,
all_loaded,
) {
apps_list_helpers::send_app_list_updated_notification(&outgoing, merged.clone())
.await;
Self::send_app_list_updated_notification(&outgoing, merged.clone()).await;
last_notified_apps = Some(merged);
}
}
@@ -5257,25 +5219,24 @@ impl CodexMessageProcessor {
accessible_connectors.as_deref()
};
let merged = connectors::with_app_enabled_state(
apps_list_helpers::merge_loaded_apps(
Self::merge_loaded_apps(
all_connectors_for_update,
accessible_connectors_for_update,
),
&config,
);
if apps_list_helpers::should_send_app_list_updated_notification(
if Self::should_send_app_list_updated_notification(
merged.as_slice(),
accessible_loaded,
all_loaded,
) && last_notified_apps.as_ref() != Some(&merged)
{
apps_list_helpers::send_app_list_updated_notification(&outgoing, merged.clone())
.await;
Self::send_app_list_updated_notification(&outgoing, merged.clone()).await;
last_notified_apps = Some(merged.clone());
}
if accessible_loaded && all_loaded {
match apps_list_helpers::paginate_apps(merged.as_slice(), start, limit) {
match Self::paginate_apps(merged.as_slice(), start, limit) {
Ok(response) => {
outgoing.send_response(request_id, response).await;
return;
@@ -5289,6 +5250,92 @@ impl CodexMessageProcessor {
}
}
fn merge_loaded_apps(
all_connectors: Option<&[AppInfo]>,
accessible_connectors: Option<&[AppInfo]>,
) -> Vec<AppInfo> {
let all_connectors_loaded = all_connectors.is_some();
let all = all_connectors.map_or_else(Vec::new, <[AppInfo]>::to_vec);
let accessible = accessible_connectors.map_or_else(Vec::new, <[AppInfo]>::to_vec);
connectors::merge_connectors_with_accessible(all, accessible, all_connectors_loaded)
}
fn plugin_apps_needing_auth(
all_connectors: &[AppInfo],
accessible_connectors: &[AppInfo],
plugin_apps: &[AppConnectorId],
codex_apps_ready: bool,
) -> Vec<AppSummary> {
if !codex_apps_ready {
return Vec::new();
}
let accessible_ids = accessible_connectors
.iter()
.map(|connector| connector.id.as_str())
.collect::<HashSet<_>>();
let plugin_app_ids = plugin_apps
.iter()
.map(|connector_id| connector_id.0.as_str())
.collect::<HashSet<_>>();
all_connectors
.iter()
.filter(|connector| {
plugin_app_ids.contains(connector.id.as_str())
&& !accessible_ids.contains(connector.id.as_str())
})
.cloned()
.map(AppSummary::from)
.collect()
}
fn should_send_app_list_updated_notification(
connectors: &[AppInfo],
accessible_loaded: bool,
all_loaded: bool,
) -> bool {
connectors.iter().any(|connector| connector.is_accessible)
|| (accessible_loaded && all_loaded)
}
fn paginate_apps(
connectors: &[AppInfo],
start: usize,
limit: Option<u32>,
) -> Result<AppsListResponse, JSONRPCErrorError> {
let total = connectors.len();
if start > total {
return Err(JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("cursor {start} exceeds total apps {total}"),
data: None,
});
}
let effective_limit = limit.unwrap_or(total as u32).max(1) as usize;
let end = start.saturating_add(effective_limit).min(total);
let data = connectors[start..end].to_vec();
let next_cursor = if end < total {
Some(end.to_string())
} else {
None
};
Ok(AppsListResponse { data, next_cursor })
}
async fn send_app_list_updated_notification(
outgoing: &Arc<OutgoingMessageSender>,
data: Vec<AppInfo>,
) {
outgoing
.send_server_notification(ServerNotification::AppListUpdated(
AppListUpdatedNotification { data },
))
.await;
}
async fn skills_list(&self, request_id: ConnectionRequestId, params: SkillsListParams) {
let SkillsListParams {
cwds,
@@ -5421,10 +5468,29 @@ impl CodexMessageProcessor {
installed: plugin.installed,
enabled: plugin.enabled,
name: plugin.name,
source: marketplace_plugin_source_to_info(plugin.source),
source: match plugin.source {
MarketplacePluginSourceSummary::Local { path } => {
PluginSource::Local { path }
}
},
install_policy: plugin.install_policy.into(),
auth_policy: plugin.auth_policy.into(),
interface: plugin.interface.map(plugin_interface_to_info),
interface: plugin.interface.map(|interface| PluginInterface {
display_name: interface.display_name,
short_description: interface.short_description,
long_description: interface.long_description,
developer_name: interface.developer_name,
category: interface.category,
capabilities: interface.capabilities,
website_url: interface.website_url,
privacy_policy_url: interface.privacy_policy_url,
terms_of_service_url: interface.terms_of_service_url,
default_prompt: interface.default_prompt,
brand_color: interface.brand_color,
composer_icon: interface.composer_icon,
logo: interface.logo,
screenshots: interface.screenshots,
}),
})
.collect(),
})
@@ -5460,73 +5526,6 @@ impl CodexMessageProcessor {
.await;
}
async fn plugin_read(&self, request_id: ConnectionRequestId, params: PluginReadParams) {
let plugins_manager = self.thread_manager.plugins_manager();
let PluginReadParams {
marketplace_path,
plugin_name,
} = params;
let config_cwd = marketplace_path.as_path().parent().map(Path::to_path_buf);
let config = match self.load_latest_config(config_cwd).await {
Ok(config) => config,
Err(err) => {
self.outgoing.send_error(request_id, err).await;
return;
}
};
let request = PluginReadRequest {
plugin_name,
marketplace_path,
};
let config_for_read = config.clone();
let outcome = match tokio::task::spawn_blocking(move || {
plugins_manager.read_plugin_for_config(&config_for_read, &request)
})
.await
{
Ok(Ok(outcome)) => outcome,
Ok(Err(err)) => {
self.send_marketplace_error(request_id, err, "read plugin details")
.await;
return;
}
Err(err) => {
self.send_internal_error(
request_id,
format!("failed to read plugin details: {err}"),
)
.await;
return;
}
};
let app_summaries =
plugin_app_helpers::load_plugin_app_summaries(&config, &outcome.plugin.apps).await;
let plugin = PluginDetail {
marketplace_name: outcome.marketplace_name,
marketplace_path: outcome.marketplace_path,
summary: PluginSummary {
id: outcome.plugin.id,
name: outcome.plugin.name,
source: marketplace_plugin_source_to_info(outcome.plugin.source),
installed: outcome.plugin.installed,
enabled: outcome.plugin.enabled,
install_policy: outcome.plugin.install_policy.into(),
auth_policy: outcome.plugin.auth_policy.into(),
interface: outcome.plugin.interface.map(plugin_interface_to_info),
},
description: outcome.plugin.description,
skills: plugin_skills_to_info(&outcome.plugin.skills),
apps: app_summaries,
mcp_servers: outcome.plugin.mcp_server_names,
};
self.outgoing
.send_response(request_id, PluginReadResponse { plugin })
.await;
}
async fn skills_remote_list(
&self,
request_id: ConnectionRequestId,
@@ -5673,19 +5672,23 @@ impl CodexMessageProcessor {
);
let all_connectors = match all_connectors_result {
Ok(connectors) => connectors,
Ok(connectors) => filter_disallowed_connectors(merge_plugin_apps(
connectors,
plugin_apps.clone(),
)),
Err(err) => {
warn!(
plugin = result.plugin_id.as_key(),
"failed to load app metadata after plugin install: {err:#}"
);
connectors::list_cached_all_connectors(&config)
.await
.unwrap_or_default()
filter_disallowed_connectors(merge_plugin_apps(
connectors::list_cached_all_connectors(&config)
.await
.unwrap_or_default(),
plugin_apps.clone(),
))
}
};
let all_connectors =
connectors::connectors_for_plugin_apps(all_connectors, &plugin_apps);
let (accessible_connectors, codex_apps_ready) =
match accessible_connectors_result {
Ok(status) => (status.connectors, status.codex_apps_ready),
@@ -5711,7 +5714,7 @@ impl CodexMessageProcessor {
);
}
plugin_app_helpers::plugin_apps_needing_auth(
Self::plugin_apps_needing_auth(
&all_connectors,
&accessible_connectors,
&plugin_apps,
@@ -7433,55 +7436,6 @@ fn skills_to_info(
.collect()
}
fn plugin_skills_to_info(skills: &[codex_core::skills::SkillMetadata]) -> Vec<SkillSummary> {
skills
.iter()
.map(|skill| SkillSummary {
name: skill.name.clone(),
description: skill.description.clone(),
short_description: skill.short_description.clone(),
interface: skill.interface.clone().map(|interface| {
codex_app_server_protocol::SkillInterface {
display_name: interface.display_name,
short_description: interface.short_description,
icon_small: interface.icon_small,
icon_large: interface.icon_large,
brand_color: interface.brand_color,
default_prompt: interface.default_prompt,
}
}),
path: skill.path_to_skills_md.clone(),
})
.collect()
}
fn plugin_interface_to_info(
interface: codex_core::plugins::PluginManifestInterfaceSummary,
) -> PluginInterface {
PluginInterface {
display_name: interface.display_name,
short_description: interface.short_description,
long_description: interface.long_description,
developer_name: interface.developer_name,
category: interface.category,
capabilities: interface.capabilities,
website_url: interface.website_url,
privacy_policy_url: interface.privacy_policy_url,
terms_of_service_url: interface.terms_of_service_url,
default_prompt: interface.default_prompt,
brand_color: interface.brand_color,
composer_icon: interface.composer_icon,
logo: interface.logo,
screenshots: interface.screenshots,
}
}
fn marketplace_plugin_source_to_info(source: MarketplacePluginSourceSummary) -> PluginSource {
match source {
MarketplacePluginSourceSummary::Local { path } => PluginSource::Local { path },
}
}
fn errors_to_info(
errors: &[codex_core::skills::SkillError],
) -> Vec<codex_app_server_protocol::SkillErrorInfo> {
@@ -7494,42 +7448,6 @@ fn errors_to_info(
.collect()
}
fn cloud_requirements_load_error(err: &std::io::Error) -> Option<&CloudRequirementsLoadError> {
let mut current: Option<&(dyn std::error::Error + 'static)> = err
.get_ref()
.map(|source| source as &(dyn std::error::Error + 'static));
while let Some(source) = current {
if let Some(cloud_error) = source.downcast_ref::<CloudRequirementsLoadError>() {
return Some(cloud_error);
}
current = source.source();
}
None
}
fn config_load_error(err: &std::io::Error) -> JSONRPCErrorError {
let data = cloud_requirements_load_error(err).map(|cloud_error| {
let mut data = serde_json::json!({
"reason": "cloudRequirements",
"errorCode": format!("{:?}", cloud_error.code()),
"detail": cloud_error.to_string(),
});
if let Some(status_code) = cloud_error.status_code() {
data["statusCode"] = serde_json::json!(status_code);
}
if cloud_error.code() == CloudRequirementsLoadErrorCode::Auth {
data["action"] = serde_json::json!("relogin");
}
data
});
JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("failed to load configuration: {err}"),
data,
}
}
fn validate_dynamic_tools(tools: &[ApiDynamicToolSpec]) -> Result<(), String> {
let mut seen = HashSet::new();
for tool in tools {
@@ -8166,63 +8084,31 @@ mod tests {
}
#[test]
fn config_load_error_marks_cloud_requirements_failures_for_relogin() {
let err = std::io::Error::other(CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::Auth,
Some(401),
"Your authentication session could not be refreshed automatically. Please log out and sign in again.",
));
let error = config_load_error(&err);
fn plugin_apps_needing_auth_returns_empty_when_codex_apps_is_not_ready() {
let all_connectors = vec![AppInfo {
id: "alpha".to_string(),
name: "Alpha".to_string(),
description: Some("Alpha connector".to_string()),
logo_url: None,
logo_url_dark: None,
distribution_channel: None,
branding: None,
app_metadata: None,
labels: None,
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
is_accessible: false,
is_enabled: true,
plugin_display_names: Vec::new(),
}];
assert_eq!(
error.data,
Some(json!({
"reason": "cloudRequirements",
"errorCode": "Auth",
"action": "relogin",
"statusCode": 401,
"detail": "Your authentication session could not be refreshed automatically. Please log out and sign in again.",
}))
);
assert!(
error.message.contains("failed to load configuration"),
"unexpected error message: {}",
error.message
);
}
#[test]
fn config_load_error_leaves_non_cloud_requirements_failures_unmarked() {
let err = std::io::Error::other("required MCP servers failed to initialize");
let error = config_load_error(&err);
assert_eq!(error.data, None);
assert!(
error.message.contains("failed to load configuration"),
"unexpected error message: {}",
error.message
);
}
#[test]
fn config_load_error_marks_non_auth_cloud_requirements_failures_without_relogin() {
let err = std::io::Error::other(CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::RequestFailed,
None,
"failed to load your workspace-managed config",
));
let error = config_load_error(&err);
assert_eq!(
error.data,
Some(json!({
"reason": "cloudRequirements",
"errorCode": "RequestFailed",
"detail": "failed to load your workspace-managed config",
}))
CodexMessageProcessor::plugin_apps_needing_auth(
&all_connectors,
&[],
&[AppConnectorId("alpha".to_string())],
false,
),
Vec::<AppSummary>::new()
);
}

View File

@@ -1,66 +0,0 @@
use std::sync::Arc;
use codex_app_server_protocol::AppInfo;
use codex_app_server_protocol::AppListUpdatedNotification;
use codex_app_server_protocol::AppsListResponse;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::ServerNotification;
use codex_chatgpt::connectors;
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
use crate::outgoing_message::OutgoingMessageSender;
pub(super) fn merge_loaded_apps(
all_connectors: Option<&[AppInfo]>,
accessible_connectors: Option<&[AppInfo]>,
) -> Vec<AppInfo> {
let all_connectors_loaded = all_connectors.is_some();
let all = all_connectors.map_or_else(Vec::new, <[AppInfo]>::to_vec);
let accessible = accessible_connectors.map_or_else(Vec::new, <[AppInfo]>::to_vec);
connectors::merge_connectors_with_accessible(all, accessible, all_connectors_loaded)
}
pub(super) fn should_send_app_list_updated_notification(
connectors: &[AppInfo],
accessible_loaded: bool,
all_loaded: bool,
) -> bool {
connectors.iter().any(|connector| connector.is_accessible) || (accessible_loaded && all_loaded)
}
pub(super) fn paginate_apps(
connectors: &[AppInfo],
start: usize,
limit: Option<u32>,
) -> Result<AppsListResponse, JSONRPCErrorError> {
let total = connectors.len();
if start > total {
return Err(JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: format!("cursor {start} exceeds total apps {total}"),
data: None,
});
}
let effective_limit = limit.unwrap_or(total as u32).max(1) as usize;
let end = start.saturating_add(effective_limit).min(total);
let data = connectors[start..end].to_vec();
let next_cursor = if end < total {
Some(end.to_string())
} else {
None
};
Ok(AppsListResponse { data, next_cursor })
}
pub(super) async fn send_app_list_updated_notification(
outgoing: &Arc<OutgoingMessageSender>,
data: Vec<AppInfo>,
) {
outgoing
.send_server_notification(ServerNotification::AppListUpdated(
AppListUpdatedNotification { data },
))
.await;
}

View File

@@ -1,100 +0,0 @@
use std::collections::HashSet;
use codex_app_server_protocol::AppInfo;
use codex_app_server_protocol::AppSummary;
use codex_chatgpt::connectors;
use codex_core::config::Config;
use codex_core::plugins::AppConnectorId;
use tracing::warn;
pub(super) async fn load_plugin_app_summaries(
config: &Config,
plugin_apps: &[AppConnectorId],
) -> Vec<AppSummary> {
if plugin_apps.is_empty() {
return Vec::new();
}
let connectors = match connectors::list_all_connectors_with_options(config, false).await {
Ok(connectors) => connectors,
Err(err) => {
warn!("failed to load app metadata for plugin/read: {err:#}");
connectors::list_cached_all_connectors(config)
.await
.unwrap_or_default()
}
};
connectors::connectors_for_plugin_apps(connectors, plugin_apps)
.into_iter()
.map(AppSummary::from)
.collect()
}
pub(super) fn plugin_apps_needing_auth(
all_connectors: &[AppInfo],
accessible_connectors: &[AppInfo],
plugin_apps: &[AppConnectorId],
codex_apps_ready: bool,
) -> Vec<AppSummary> {
if !codex_apps_ready {
return Vec::new();
}
let accessible_ids = accessible_connectors
.iter()
.map(|connector| connector.id.as_str())
.collect::<HashSet<_>>();
let plugin_app_ids = plugin_apps
.iter()
.map(|connector_id| connector_id.0.as_str())
.collect::<HashSet<_>>();
all_connectors
.iter()
.filter(|connector| {
plugin_app_ids.contains(connector.id.as_str())
&& !accessible_ids.contains(connector.id.as_str())
})
.cloned()
.map(AppSummary::from)
.collect()
}
#[cfg(test)]
mod tests {
use codex_app_server_protocol::AppInfo;
use codex_core::plugins::AppConnectorId;
use pretty_assertions::assert_eq;
use super::plugin_apps_needing_auth;
#[test]
fn plugin_apps_needing_auth_returns_empty_when_codex_apps_is_not_ready() {
let all_connectors = vec![AppInfo {
id: "alpha".to_string(),
name: "Alpha".to_string(),
description: Some("Alpha connector".to_string()),
logo_url: None,
logo_url_dark: None,
distribution_channel: None,
branding: None,
app_metadata: None,
labels: None,
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
is_accessible: false,
is_enabled: true,
plugin_display_names: Vec::new(),
}];
assert_eq!(
plugin_apps_needing_auth(
&all_connectors,
&[],
&[AppConnectorId("alpha".to_string())],
false,
),
Vec::new()
);
}
}

View File

@@ -733,7 +733,6 @@ mod tests {
expiration: ExecExpiration::DefaultTimeout,
sandbox: SandboxType::WindowsRestrictedToken,
windows_sandbox_level: WindowsSandboxLevel::Disabled,
windows_sandbox_private_desktop: false,
sandbox_permissions: codex_core::sandboxing::SandboxPermissions::UseDefault,
sandbox_policy: sandbox_policy.clone(),
file_system_sandbox_policy: FileSystemSandboxPolicy::from(&sandbox_policy),
@@ -845,7 +844,6 @@ mod tests {
expiration: ExecExpiration::Cancellation(CancellationToken::new()),
sandbox: SandboxType::None,
windows_sandbox_level: WindowsSandboxLevel::Disabled,
windows_sandbox_private_desktop: false,
sandbox_permissions: codex_core::sandboxing::SandboxPermissions::UseDefault,
sandbox_policy: sandbox_policy.clone(),
file_system_sandbox_policy: FileSystemSandboxPolicy::from(&sandbox_policy),

View File

@@ -12,7 +12,6 @@ use codex_app_server_protocol::ConfigWriteResponse;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::NetworkRequirements;
use codex_app_server_protocol::SandboxMode;
use codex_core::AnalyticsEventsClient;
use codex_core::ThreadManager;
use codex_core::config::ConfigService;
use codex_core::config::ConfigServiceError;
@@ -21,9 +20,6 @@ use codex_core::config_loader::ConfigRequirementsToml;
use codex_core::config_loader::LoaderOverrides;
use codex_core::config_loader::ResidencyRequirement as CoreResidencyRequirement;
use codex_core::config_loader::SandboxModeRequirement as CoreSandboxModeRequirement;
use codex_core::plugins::PluginId;
use codex_core::plugins::collect_plugin_enabled_candidates;
use codex_core::plugins::installed_plugin_telemetry_metadata;
use codex_protocol::config_types::WebSearchMode;
use codex_protocol::protocol::Op;
use serde_json::json;
@@ -60,7 +56,6 @@ pub(crate) struct ConfigApi {
loader_overrides: LoaderOverrides,
cloud_requirements: Arc<RwLock<CloudRequirementsLoader>>,
user_config_reloader: Arc<dyn UserConfigReloader>,
analytics_events_client: AnalyticsEventsClient,
}
impl ConfigApi {
@@ -70,7 +65,6 @@ impl ConfigApi {
loader_overrides: LoaderOverrides,
cloud_requirements: Arc<RwLock<CloudRequirementsLoader>>,
user_config_reloader: Arc<dyn UserConfigReloader>,
analytics_events_client: AnalyticsEventsClient,
) -> Self {
Self {
codex_home,
@@ -78,7 +72,6 @@ impl ConfigApi {
loader_overrides,
cloud_requirements,
user_config_reloader,
analytics_events_client,
}
}
@@ -120,15 +113,10 @@ impl ConfigApi {
&self,
params: ConfigValueWriteParams,
) -> Result<ConfigWriteResponse, JSONRPCErrorError> {
let pending_changes =
collect_plugin_enabled_candidates([(&params.key_path, &params.value)].into_iter());
let response = self
.config_service()
self.config_service()
.write_value(params)
.await
.map_err(map_error)?;
self.emit_plugin_toggle_events(pending_changes);
Ok(response)
.map_err(map_error)
}
pub(crate) async fn batch_write(
@@ -136,38 +124,16 @@ impl ConfigApi {
params: ConfigBatchWriteParams,
) -> Result<ConfigWriteResponse, JSONRPCErrorError> {
let reload_user_config = params.reload_user_config;
let pending_changes = collect_plugin_enabled_candidates(
params
.edits
.iter()
.map(|edit| (&edit.key_path, &edit.value)),
);
let response = self
.config_service()
.batch_write(params)
.await
.map_err(map_error)?;
self.emit_plugin_toggle_events(pending_changes);
if reload_user_config {
self.user_config_reloader.reload_user_config().await;
}
Ok(response)
}
fn emit_plugin_toggle_events(&self, pending_changes: std::collections::BTreeMap<String, bool>) {
for (plugin_id, enabled) in pending_changes {
let Ok(plugin_id) = PluginId::parse(&plugin_id) else {
continue;
};
let metadata =
installed_plugin_telemetry_metadata(self.codex_home.as_path(), &plugin_id);
if enabled {
self.analytics_events_client.track_plugin_enabled(metadata);
} else {
self.analytics_events_client.track_plugin_disabled(metadata);
}
}
}
}
fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigRequirements {
@@ -263,7 +229,6 @@ fn config_write_error(code: ConfigWriteErrorCode, message: impl Into<String>) ->
#[cfg(test)]
mod tests {
use super::*;
use codex_core::AnalyticsEventsClient;
use codex_core::config_loader::NetworkRequirementsToml as CoreNetworkRequirementsToml;
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
use pretty_assertions::assert_eq;
@@ -305,7 +270,6 @@ mod tests {
]),
}),
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: Some(CoreResidencyRequirement::Us),
network: Some(CoreNetworkRequirementsToml {
@@ -376,7 +340,6 @@ mod tests {
allowed_web_search_modes: Some(Vec::new()),
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -396,24 +359,12 @@ mod tests {
let user_config_path = codex_home.path().join("config.toml");
std::fs::write(&user_config_path, "").expect("write config");
let reloader = Arc::new(RecordingUserConfigReloader::default());
let analytics_config = Arc::new(
codex_core::config::ConfigBuilder::default()
.build()
.await
.expect("load analytics config"),
);
let config_api = ConfigApi::new(
codex_home.path().to_path_buf(),
Vec::new(),
LoaderOverrides::default(),
Arc::new(RwLock::new(CloudRequirementsLoader::default())),
reloader.clone(),
AnalyticsEventsClient::new(
analytics_config,
codex_core::test_support::auth_manager_from_auth(
codex_core::CodexAuth::from_api_key("test"),
),
),
);
let response = config_api

View File

@@ -74,8 +74,6 @@ use codex_app_server_protocol::Result;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequest;
use codex_arg0::Arg0DispatchPaths;
use codex_core::AuthManager;
use codex_core::ThreadManager;
use codex_core::config::Config;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::LoaderOverrides;
@@ -124,10 +122,6 @@ pub struct InProcessStartArgs {
pub loader_overrides: LoaderOverrides,
/// Preloaded cloud requirements provider.
pub cloud_requirements: CloudRequirementsLoader,
/// Optional prebuilt auth manager reused by an embedding caller.
pub auth_manager: Option<Arc<AuthManager>>,
/// Optional prebuilt thread manager reused by an embedding caller.
pub thread_manager: Option<Arc<ThreadManager>>,
/// Feedback sink used by app-server/core telemetry and logs.
pub feedback: CodexFeedback,
/// Startup warnings emitted after initialize succeeds.
@@ -410,8 +404,6 @@ fn start_uninitialized(args: InProcessStartArgs) -> InProcessClientHandle {
cli_overrides: args.cli_overrides,
loader_overrides: args.loader_overrides,
cloud_requirements: args.cloud_requirements,
auth_manager: args.auth_manager,
thread_manager: args.thread_manager,
feedback: args.feedback,
log_db: None,
config_warnings: args.config_warnings,
@@ -483,7 +475,6 @@ fn start_uninitialized(args: InProcessStartArgs) -> InProcessClientHandle {
}
}
processor.clear_runtime_references();
processor.drain_background_tasks().await;
processor.shutdown_threads().await;
processor.connection_closed(IN_PROCESS_CONNECTION_ID).await;
@@ -758,8 +749,6 @@ mod tests {
cli_overrides: Vec::new(),
loader_overrides: LoaderOverrides::default(),
cloud_requirements: CloudRequirementsLoader::default(),
auth_manager: None,
thread_manager: None,
feedback: CodexFeedback::new(),
config_warnings: Vec::new(),
session_source,

View File

@@ -607,8 +607,6 @@ pub async fn run_main_with_transport(
cli_overrides,
loader_overrides,
cloud_requirements: cloud_requirements.clone(),
auth_manager: None,
thread_manager: None,
feedback: feedback.clone(),
log_db,
config_warnings,

View File

@@ -39,7 +39,6 @@ use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestPayload;
use codex_app_server_protocol::experimental_required_message;
use codex_arg0::Arg0DispatchPaths;
use codex_core::AnalyticsEventsClient;
use codex_core::AuthManager;
use codex_core::ThreadManager;
use codex_core::auth::ExternalAuthRefreshContext;
@@ -139,7 +138,6 @@ pub(crate) struct MessageProcessor {
codex_message_processor: CodexMessageProcessor,
config_api: ConfigApi,
external_agent_config_api: ExternalAgentConfigApi,
auth_manager: Arc<AuthManager>,
config: Arc<Config>,
config_warnings: Arc<Vec<ConfigWarningNotification>>,
}
@@ -160,8 +158,6 @@ pub(crate) struct MessageProcessorArgs {
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
pub(crate) loader_overrides: LoaderOverrides,
pub(crate) cloud_requirements: CloudRequirementsLoader,
pub(crate) auth_manager: Option<Arc<AuthManager>>,
pub(crate) thread_manager: Option<Arc<ThreadManager>>,
pub(crate) feedback: CodexFeedback,
pub(crate) log_db: Option<LogDbLayer>,
pub(crate) config_warnings: Vec<ConfigWarningNotification>,
@@ -180,52 +176,38 @@ impl MessageProcessor {
cli_overrides,
loader_overrides,
cloud_requirements,
auth_manager,
thread_manager,
feedback,
log_db,
config_warnings,
session_source,
enable_codex_api_key_env,
} = args;
let (auth_manager, thread_manager) = match (auth_manager, thread_manager) {
(Some(auth_manager), Some(thread_manager)) => (auth_manager, thread_manager),
(None, None) => {
let auth_manager = AuthManager::shared(
config.codex_home.clone(),
enable_codex_api_key_env,
config.cli_auth_credentials_store_mode,
);
let thread_manager = Arc::new(ThreadManager::new(
config.as_ref(),
auth_manager.clone(),
session_source,
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
(auth_manager, thread_manager)
}
_ => panic!("MessageProcessorArgs must provide both auth_manager and thread_manager"),
};
let auth_manager = AuthManager::shared(
config.codex_home.clone(),
enable_codex_api_key_env,
config.cli_auth_credentials_store_mode,
);
auth_manager.set_forced_chatgpt_workspace_id(config.forced_chatgpt_workspace_id.clone());
auth_manager.set_external_auth_refresher(Arc::new(ExternalAuthRefreshBridge {
outgoing: outgoing.clone(),
}));
let analytics_events_client =
AnalyticsEventsClient::new(Arc::clone(&config), Arc::clone(&auth_manager));
thread_manager
.plugins_manager()
.set_analytics_events_client(analytics_events_client.clone());
let thread_manager = Arc::new(ThreadManager::new(
config.as_ref(),
auth_manager.clone(),
session_source,
CollaborationModesConfig {
default_mode_request_user_input: config
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
));
// TODO(xl): Move into PluginManager once this no longer depends on config feature gating.
thread_manager
.plugins_manager()
.maybe_start_curated_repo_sync_for_config(&config);
let cloud_requirements = Arc::new(RwLock::new(cloud_requirements));
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {
auth_manager: auth_manager.clone(),
auth_manager,
thread_manager: Arc::clone(&thread_manager),
outgoing: outgoing.clone(),
arg0_paths,
@@ -241,7 +223,6 @@ impl MessageProcessor {
loader_overrides,
cloud_requirements,
thread_manager,
analytics_events_client,
);
let external_agent_config_api = ExternalAgentConfigApi::new(config.codex_home.clone());
@@ -250,16 +231,11 @@ impl MessageProcessor {
codex_message_processor,
config_api,
external_agent_config_api,
auth_manager,
config,
config_warnings: Arc::new(config_warnings),
}
}
pub(crate) fn clear_runtime_references(&self) {
self.auth_manager.clear_external_auth_refresher();
}
pub(crate) async fn process_request(
&mut self,
connection_id: ConnectionId,
@@ -560,11 +536,7 @@ impl MessageProcessor {
}
let user_agent = get_codex_user_agent();
let response = InitializeResponse {
user_agent,
platform_family: std::env::consts::FAMILY.to_string(),
platform_os: std::env::consts::OS.to_string(),
};
let response = InitializeResponse { user_agent };
self.outgoing
.send_response(connection_request_id, response)
.await;

View File

@@ -47,6 +47,8 @@ use tracing_subscriber::layer::SubscriberExt;
use wiremock::MockServer;
const TEST_CONNECTION_ID: ConnectionId = ConnectionId(7);
const CORE_TURN_SANITY_SPAN_NAMES: &[&str] =
&["submission_dispatch", "session_task.turn", "run_turn"];
struct TestTracing {
exporter: InMemorySpanExporter,
@@ -239,8 +241,6 @@ fn build_test_processor(
cli_overrides: Vec::new(),
loader_overrides: LoaderOverrides::default(),
cloud_requirements: CloudRequirementsLoader::default(),
auth_manager: None,
thread_manager: None,
feedback: CodexFeedback::new(),
log_db: None,
config_warnings: Vec::new(),
@@ -282,21 +282,17 @@ fn find_rpc_span_with_trace<'a>(
})
}
fn find_span_with_trace<'a, F>(
fn find_span_by_name_with_trace<'a>(
spans: &'a [SpanData],
name: &str,
trace_id: TraceId,
description: &str,
predicate: F,
) -> &'a SpanData
where
F: Fn(&SpanData) -> bool,
{
) -> &'a SpanData {
spans
.iter()
.find(|span| span.span_context.trace_id() == trace_id && predicate(span))
.find(|span| span.name.as_ref() == name && span.span_context.trace_id() == trace_id)
.unwrap_or_else(|| {
panic!(
"missing span matching {description} for trace={trace_id}; exported spans:\n{}",
"missing span named {name} for trace={trace_id}; exported spans:\n{}",
format_spans(spans)
)
})
@@ -321,17 +317,12 @@ fn format_spans(spans: &[SpanData]) -> String {
.join("\n")
}
fn span_depth_from_ancestor(
spans: &[SpanData],
child: &SpanData,
ancestor: &SpanData,
) -> Option<usize> {
fn assert_span_descends_from(spans: &[SpanData], child: &SpanData, ancestor: &SpanData) {
let ancestor_span_id = ancestor.span_context.span_id();
let mut parent_span_id = child.parent_span_id;
let mut depth = 1;
while parent_span_id != SpanId::INVALID {
if parent_span_id == ancestor_span_id {
return Some(depth);
return;
}
let Some(parent_span) = spans
.iter()
@@ -340,15 +331,6 @@ fn span_depth_from_ancestor(
break;
};
parent_span_id = parent_span.parent_span_id;
depth += 1;
}
None
}
fn assert_span_descends_from(spans: &[SpanData], child: &SpanData, ancestor: &SpanData) {
if span_depth_from_ancestor(spans, child, ancestor).is_some() {
return;
}
panic!(
@@ -359,27 +341,6 @@ fn assert_span_descends_from(spans: &[SpanData], child: &SpanData, ancestor: &Sp
);
}
fn assert_has_internal_descendant_at_min_depth(
spans: &[SpanData],
ancestor: &SpanData,
min_depth: usize,
) {
if spans.iter().any(|span| {
span.span_kind == SpanKind::Internal
&& span.span_context.trace_id() == ancestor.span_context.trace_id()
&& span_depth_from_ancestor(spans, span, ancestor)
.is_some_and(|depth| depth >= min_depth)
}) {
return;
}
panic!(
"missing internal descendant at depth >= {min_depth} below {}; exported spans:\n{}",
ancestor.name,
format_spans(spans)
);
}
async fn read_response<T: serde::de::DeserializeOwned>(
outgoing_rx: &mut mpsc::Receiver<crate::outgoing_message::OutgoingEnvelope>,
request_id: i64,
@@ -480,21 +441,6 @@ where
);
}
async fn wait_for_new_exported_spans<F>(
tracing: &TestTracing,
baseline_len: usize,
predicate: F,
) -> Vec<SpanData>
where
F: Fn(&[SpanData]) -> bool,
{
let spans = wait_for_exported_spans(tracing, |spans| {
spans.len() > baseline_len && predicate(&spans[baseline_len..])
})
.await;
spans.into_iter().skip(baseline_len).collect()
}
#[tokio::test(flavor = "current_thread")]
async fn thread_start_jsonrpc_span_exports_server_span_and_parents_children() -> Result<()> {
let _guard = tracing_test_guard().lock().await;
@@ -502,65 +448,33 @@ async fn thread_start_jsonrpc_span_exports_server_span_and_parents_children() ->
let RemoteTrace {
trace_id: remote_trace_id,
parent_span_id: remote_parent_span_id,
context: remote_trace,
..
} = RemoteTrace::new("00000000000000000000000000000011", "0000000000000022");
let _: ThreadStartResponse = harness.start_thread(20_002, None).await;
let untraced_spans = wait_for_exported_spans(harness.tracing, |spans| {
spans.iter().any(|span| {
span.span_kind == SpanKind::Server
&& span_attr(span, "rpc.method") == Some("thread/start")
})
})
.await;
let untraced_server_span = find_rpc_span_with_trace(
&untraced_spans,
SpanKind::Server,
"thread/start",
untraced_spans
.iter()
.rev()
.find(|span| {
span.span_kind == SpanKind::Server
&& span_attr(span, "rpc.system") == Some("jsonrpc")
&& span_attr(span, "rpc.method") == Some("thread/start")
})
.unwrap_or_else(|| {
panic!(
"missing latest thread/start server span; exported spans:\n{}",
format_spans(&untraced_spans)
)
})
.span_context
.trace_id(),
);
assert_has_internal_descendant_at_min_depth(&untraced_spans, untraced_server_span, 1);
let baseline_len = untraced_spans.len();
let _: ThreadStartResponse = harness.start_thread(20_003, Some(remote_trace)).await;
let spans = wait_for_new_exported_spans(harness.tracing, baseline_len, |spans| {
let _: ThreadStartResponse = harness.start_thread(2, Some(remote_trace)).await;
let spans = wait_for_exported_spans(harness.tracing, |spans| {
spans.iter().any(|span| {
span.span_kind == SpanKind::Server
&& span_attr(span, "rpc.method") == Some("thread/start")
&& span.span_context.trace_id() == remote_trace_id
}) && spans.iter().any(|span| {
span.name.as_ref() == "app_server.thread_start.notify_started"
&& span.span_context.trace_id() == remote_trace_id
span.name.as_ref() == "thread_spawn" && span.span_context.trace_id() == remote_trace_id
}) && spans.iter().any(|span| {
span.name.as_ref() == "session_init" && span.span_context.trace_id() == remote_trace_id
})
})
.await;
let server_request_span =
find_rpc_span_with_trace(&spans, SpanKind::Server, "thread/start", remote_trace_id);
let thread_spawn_span = find_span_by_name_with_trace(&spans, "thread_spawn", remote_trace_id);
let session_init_span = find_span_by_name_with_trace(&spans, "session_init", remote_trace_id);
assert_eq!(server_request_span.name.as_ref(), "thread/start");
assert_eq!(server_request_span.parent_span_id, remote_parent_span_id);
assert!(server_request_span.parent_span_is_remote);
assert_eq!(server_request_span.span_context.trace_id(), remote_trace_id);
assert_ne!(server_request_span.span_context.span_id(), SpanId::INVALID);
assert_has_internal_descendant_at_min_depth(&spans, server_request_span, 1);
assert_has_internal_descendant_at_min_depth(&spans, server_request_span, 2);
assert_span_descends_from(&spans, thread_spawn_span, server_request_span);
assert_span_descends_from(&spans, session_init_span, server_request_span);
harness.shutdown().await;
Ok(())
@@ -611,7 +525,7 @@ async fn turn_start_jsonrpc_span_parents_core_turn_spans() -> Result<()> {
&& span_attr(span, "rpc.method") == Some("turn/start")
&& span.span_context.trace_id() == remote_trace_id
}) && spans.iter().any(|span| {
span_attr(span, "codex.op") == Some("user_input")
CORE_TURN_SANITY_SPAN_NAMES.contains(&span.name.as_ref())
&& span.span_context.trace_id() == remote_trace_id
})
})
@@ -619,9 +533,17 @@ async fn turn_start_jsonrpc_span_parents_core_turn_spans() -> Result<()> {
let server_request_span =
find_rpc_span_with_trace(&spans, SpanKind::Server, "turn/start", remote_trace_id);
let core_turn_span =
find_span_with_trace(&spans, remote_trace_id, "codex.op=user_input", |span| {
span_attr(span, "codex.op") == Some("user_input")
let core_turn_span = spans
.iter()
.find(|span| {
CORE_TURN_SANITY_SPAN_NAMES.contains(&span.name.as_ref())
&& span.span_context.trace_id() == remote_trace_id
})
.unwrap_or_else(|| {
panic!(
"missing representative core turn span for trace={remote_trace_id}; exported spans:\n{}",
format_spans(&spans)
)
});
assert_eq!(server_request_span.parent_span_id, remote_parent_span_id);

View File

@@ -1,5 +1,4 @@
use std::collections::HashMap;
use std::fmt;
use std::sync::Arc;
use std::sync::atomic::AtomicI64;
use std::sync::atomic::Ordering;
@@ -33,12 +32,6 @@ pub(crate) type ClientRequestResult = std::result::Result<Result, JSONRPCErrorEr
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub(crate) struct ConnectionId(pub(crate) u64);
impl fmt::Display for ConnectionId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
/// Stable identifier for a client request scoped to a transport connection.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub(crate) struct ConnectionRequestId {

View File

@@ -1,16 +0,0 @@
use anyhow::Result;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
pub async fn start_analytics_events_server() -> Result<MockServer> {
let server = MockServer::start().await;
Mock::given(method("POST"))
.and(path("/codex/analytics-events/events"))
.respond_with(ResponseTemplate::new(200))
.mount(&server)
.await;
Ok(server)
}

View File

@@ -1,4 +1,3 @@
mod analytics_server;
mod auth_fixtures;
mod config;
mod mcp_process;
@@ -7,7 +6,6 @@ mod models_cache;
mod responses;
mod rollout;
pub use analytics_server::start_analytics_events_server;
pub use auth_fixtures::ChatGptAuthFixture;
pub use auth_fixtures::ChatGptIdTokenClaims;
pub use auth_fixtures::encode_id_token;

View File

@@ -41,7 +41,6 @@ use codex_app_server_protocol::MockExperimentalMethodParams;
use codex_app_server_protocol::ModelListParams;
use codex_app_server_protocol::PluginInstallParams;
use codex_app_server_protocol::PluginListParams;
use codex_app_server_protocol::PluginReadParams;
use codex_app_server_protocol::PluginUninstallParams;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ReviewStartParams;
@@ -474,15 +473,6 @@ impl McpProcess {
self.send_request("plugin/list", params).await
}
/// Send a `plugin/read` JSON-RPC request.
pub async fn send_plugin_read_request(
&mut self,
params: PluginReadParams,
) -> anyhow::Result<i64> {
let params = Some(serde_json::to_value(params)?);
self.send_request("plugin/read", params).await
}
/// Send a JSON-RPC request with raw params for protocol-level validation tests.
pub async fn send_raw_request(
&mut self,

View File

@@ -47,7 +47,6 @@ fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
input_modalities: default_input_modalities(),
prefer_websockets: false,
used_fallback_model_metadata: false,
supports_search_tool: false,
}
}

View File

@@ -159,8 +159,7 @@ async fn thread_start_without_dynamic_tools_allows_without_experimental_api_capa
}
#[tokio::test]
async fn thread_start_granular_approval_policy_requires_experimental_api_capability() -> Result<()>
{
async fn thread_start_reject_approval_policy_requires_experimental_api_capability() -> Result<()> {
let server = create_mock_responses_server_sequence_unchecked(Vec::new()).await;
let codex_home = TempDir::new()?;
create_config_toml(codex_home.path(), &server.uri())?;
@@ -181,7 +180,7 @@ async fn thread_start_granular_approval_policy_requires_experimental_api_capabil
let request_id = mcp
.send_thread_start_request(ThreadStartParams {
approval_policy: Some(AskForApproval::Granular {
approval_policy: Some(AskForApproval::Reject {
sandbox_approval: true,
rules: false,
skill_approval: false,
@@ -197,7 +196,7 @@ async fn thread_start_granular_approval_policy_requires_experimental_api_capabil
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert_experimental_capability_error(error, "askForApproval.granular");
assert_experimental_capability_error(error, "askForApproval.reject");
Ok(())
}

View File

@@ -46,15 +46,9 @@ async fn initialize_uses_client_info_name_as_originator() -> Result<()> {
let JSONRPCMessage::Response(response) = message else {
anyhow::bail!("expected initialize response, got {message:?}");
};
let InitializeResponse {
user_agent,
platform_family,
platform_os,
} = to_response::<InitializeResponse>(response)?;
let InitializeResponse { user_agent } = to_response::<InitializeResponse>(response)?;
assert!(user_agent.starts_with("codex_vscode/"));
assert_eq!(platform_family, std::env::consts::FAMILY);
assert_eq!(platform_os, std::env::consts::OS);
Ok(())
}
@@ -86,15 +80,9 @@ async fn initialize_respects_originator_override_env_var() -> Result<()> {
let JSONRPCMessage::Response(response) = message else {
anyhow::bail!("expected initialize response, got {message:?}");
};
let InitializeResponse {
user_agent,
platform_family,
platform_os,
} = to_response::<InitializeResponse>(response)?;
let InitializeResponse { user_agent } = to_response::<InitializeResponse>(response)?;
assert!(user_agent.starts_with("codex_originator_via_env_var/"));
assert_eq!(platform_family, std::env::consts::FAMILY);
assert_eq!(platform_os, std::env::consts::OS);
Ok(())
}

View File

@@ -19,7 +19,6 @@ mod output_schema;
mod plan_item;
mod plugin_install;
mod plugin_list;
mod plugin_read;
mod plugin_uninstall;
mod rate_limits;
mod realtime_conversation;

View File

@@ -5,9 +5,7 @@ use std::time::Duration;
use anyhow::Result;
use app_test_support::ChatGptAuthFixture;
use app_test_support::DEFAULT_CLIENT_NAME;
use app_test_support::McpProcess;
use app_test_support::start_analytics_events_server;
use app_test_support::to_response;
use app_test_support::write_chatgpt_auth;
use axum::Json;
@@ -138,85 +136,6 @@ async fn plugin_install_returns_invalid_request_for_not_available_plugin() -> Re
Ok(())
}
#[tokio::test]
async fn plugin_install_tracks_analytics_event() -> Result<()> {
let analytics_server = start_analytics_events_server().await?;
let codex_home = TempDir::new()?;
write_analytics_config(codex_home.path(), &analytics_server.uri())?;
write_chatgpt_auth(
codex_home.path(),
ChatGptAuthFixture::new("chatgpt-token")
.account_id("account-123")
.chatgpt_user_id("user-123")
.chatgpt_account_id("account-123"),
AuthCredentialsStoreMode::File,
)?;
let repo_root = TempDir::new()?;
write_plugin_marketplace(
repo_root.path(),
"debug",
"sample-plugin",
"./sample-plugin",
None,
None,
)?;
write_plugin_source(repo_root.path(), "sample-plugin", &[])?;
let marketplace_path =
AbsolutePathBuf::try_from(repo_root.path().join(".agents/plugins/marketplace.json"))?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_plugin_install_request(PluginInstallParams {
marketplace_path,
plugin_name: "sample-plugin".to_string(),
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let response: PluginInstallResponse = to_response(response)?;
assert_eq!(response.apps_needing_auth, Vec::<AppSummary>::new());
let payloads = timeout(DEFAULT_TIMEOUT, async {
loop {
let Some(requests) = analytics_server.received_requests().await else {
tokio::time::sleep(Duration::from_millis(25)).await;
continue;
};
if !requests.is_empty() {
break requests;
}
tokio::time::sleep(Duration::from_millis(25)).await;
}
})
.await?;
let payload: serde_json::Value =
serde_json::from_slice(&payloads[0].body).expect("analytics payload");
assert_eq!(
payload,
json!({
"events": [{
"event_type": "codex_plugin_installed",
"event_params": {
"plugin_id": "sample-plugin@debug",
"plugin_name": "sample-plugin",
"marketplace_name": "debug",
"has_skills": false,
"mcp_server_count": 0,
"connector_ids": [],
"product_client_id": DEFAULT_CLIENT_NAME,
}
}]
})
);
Ok(())
}
#[tokio::test]
async fn plugin_install_returns_apps_needing_auth() -> Result<()> {
let connectors = vec![
@@ -542,13 +461,6 @@ connectors = true
)
}
fn write_analytics_config(codex_home: &std::path::Path, base_url: &str) -> std::io::Result<()> {
std::fs::write(
codex_home.join("config.toml"),
format!("chatgpt_base_url = \"{base_url}\"\n"),
)
}
fn write_plugin_marketplace(
repo_root: &std::path::Path,
marketplace_name: &str,

View File

@@ -1,303 +0,0 @@
use std::time::Duration;
use anyhow::Result;
use app_test_support::McpProcess;
use app_test_support::to_response;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::PluginAuthPolicy;
use codex_app_server_protocol::PluginInstallPolicy;
use codex_app_server_protocol::PluginReadParams;
use codex_app_server_protocol::PluginReadResponse;
use codex_app_server_protocol::RequestId;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use tokio::time::timeout;
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(10);
#[tokio::test]
async fn plugin_read_returns_plugin_details_with_bundle_contents() -> Result<()> {
let codex_home = TempDir::new()?;
let repo_root = TempDir::new()?;
let plugin_root = repo_root.path().join("plugins/demo-plugin");
std::fs::create_dir_all(repo_root.path().join(".git"))?;
std::fs::create_dir_all(repo_root.path().join(".agents/plugins"))?;
std::fs::create_dir_all(plugin_root.join(".codex-plugin"))?;
std::fs::create_dir_all(plugin_root.join("skills/thread-summarizer"))?;
std::fs::write(
repo_root.path().join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "demo-plugin",
"source": {
"source": "local",
"path": "./plugins/demo-plugin"
},
"installPolicy": "AVAILABLE",
"authPolicy": "ON_INSTALL",
"category": "Design"
}
]
}"#,
)?;
std::fs::write(
plugin_root.join(".codex-plugin/plugin.json"),
r##"{
"name": "demo-plugin",
"description": "Longer manifest description",
"interface": {
"displayName": "Plugin Display Name",
"shortDescription": "Short description for subtitle",
"longDescription": "Long description for details page",
"developerName": "OpenAI",
"category": "Productivity",
"capabilities": ["Interactive", "Write"],
"websiteURL": "https://openai.com/",
"privacyPolicyURL": "https://openai.com/policies/row-privacy-policy/",
"termsOfServiceURL": "https://openai.com/policies/row-terms-of-use/",
"defaultPrompt": "Starter prompt for trying a plugin",
"brandColor": "#3B82F6",
"composerIcon": "./assets/icon.png",
"logo": "./assets/logo.png",
"screenshots": ["./assets/screenshot1.png"]
}
}"##,
)?;
std::fs::write(
plugin_root.join("skills/thread-summarizer/SKILL.md"),
r#"---
name: thread-summarizer
description: Summarize email threads
---
# Thread Summarizer
"#,
)?;
std::fs::write(
plugin_root.join(".app.json"),
r#"{
"apps": {
"gmail": {
"id": "gmail"
}
}
}"#,
)?;
std::fs::write(
plugin_root.join(".mcp.json"),
r#"{
"mcpServers": {
"demo": {
"command": "demo-server"
}
}
}"#,
)?;
std::fs::write(
codex_home.path().join("config.toml"),
r#"[features]
plugins = true
[plugins."demo-plugin@codex-curated"]
enabled = true
"#,
)?;
write_installed_plugin(&codex_home, "codex-curated", "demo-plugin")?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let marketplace_path =
AbsolutePathBuf::try_from(repo_root.path().join(".agents/plugins/marketplace.json"))?;
let request_id = mcp
.send_plugin_read_request(PluginReadParams {
marketplace_path: marketplace_path.clone(),
plugin_name: "demo-plugin".to_string(),
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let response: PluginReadResponse = to_response(response)?;
assert_eq!(response.plugin.marketplace_name, "codex-curated");
assert_eq!(response.plugin.marketplace_path, marketplace_path);
assert_eq!(response.plugin.summary.id, "demo-plugin@codex-curated");
assert_eq!(response.plugin.summary.name, "demo-plugin");
assert_eq!(
response.plugin.description.as_deref(),
Some("Longer manifest description")
);
assert_eq!(response.plugin.summary.installed, true);
assert_eq!(response.plugin.summary.enabled, true);
assert_eq!(
response.plugin.summary.install_policy,
PluginInstallPolicy::Available
);
assert_eq!(
response.plugin.summary.auth_policy,
PluginAuthPolicy::OnInstall
);
assert_eq!(
response
.plugin
.summary
.interface
.as_ref()
.and_then(|interface| interface.display_name.as_deref()),
Some("Plugin Display Name")
);
assert_eq!(
response
.plugin
.summary
.interface
.as_ref()
.and_then(|interface| interface.category.as_deref()),
Some("Design")
);
assert_eq!(response.plugin.skills.len(), 1);
assert_eq!(
response.plugin.skills[0].name,
"demo-plugin:thread-summarizer"
);
assert_eq!(
response.plugin.skills[0].description,
"Summarize email threads"
);
assert_eq!(response.plugin.apps.len(), 1);
assert_eq!(response.plugin.apps[0].id, "gmail");
assert_eq!(response.plugin.apps[0].name, "gmail");
assert_eq!(
response.plugin.apps[0].install_url.as_deref(),
Some("https://chatgpt.com/apps/gmail/gmail")
);
assert_eq!(response.plugin.mcp_servers.len(), 1);
assert_eq!(response.plugin.mcp_servers[0], "demo");
Ok(())
}
#[tokio::test]
async fn plugin_read_returns_invalid_request_when_plugin_is_missing() -> Result<()> {
let codex_home = TempDir::new()?;
let repo_root = TempDir::new()?;
std::fs::create_dir_all(repo_root.path().join(".git"))?;
std::fs::create_dir_all(repo_root.path().join(".agents/plugins"))?;
std::fs::write(
repo_root.path().join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "demo-plugin",
"source": {
"source": "local",
"path": "./plugins/demo-plugin"
}
}
]
}"#,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_plugin_read_request(PluginReadParams {
marketplace_path: AbsolutePathBuf::try_from(
repo_root.path().join(".agents/plugins/marketplace.json"),
)?,
plugin_name: "missing-plugin".to_string(),
})
.await?;
let err = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert_eq!(err.error.code, -32600);
assert!(
err.error
.message
.contains("plugin `missing-plugin` was not found")
);
Ok(())
}
#[tokio::test]
async fn plugin_read_returns_invalid_request_when_plugin_manifest_is_missing() -> Result<()> {
let codex_home = TempDir::new()?;
let repo_root = TempDir::new()?;
let plugin_root = repo_root.path().join("plugins/demo-plugin");
std::fs::create_dir_all(repo_root.path().join(".git"))?;
std::fs::create_dir_all(repo_root.path().join(".agents/plugins"))?;
std::fs::create_dir_all(&plugin_root)?;
std::fs::write(
repo_root.path().join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "demo-plugin",
"source": {
"source": "local",
"path": "./plugins/demo-plugin"
}
}
]
}"#,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_plugin_read_request(PluginReadParams {
marketplace_path: AbsolutePathBuf::try_from(
repo_root.path().join(".agents/plugins/marketplace.json"),
)?,
plugin_name: "demo-plugin".to_string(),
})
.await?;
let err = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert_eq!(err.error.code, -32600);
assert!(
err.error
.message
.contains("missing or invalid .codex-plugin/plugin.json")
);
Ok(())
}
fn write_installed_plugin(
codex_home: &TempDir,
marketplace_name: &str,
plugin_name: &str,
) -> Result<()> {
let plugin_root = codex_home
.path()
.join("plugins/cache")
.join(marketplace_name)
.join(plugin_name)
.join("local/.codex-plugin");
std::fs::create_dir_all(&plugin_root)?;
std::fs::write(
plugin_root.join("plugin.json"),
format!(r#"{{"name":"{plugin_name}"}}"#),
)?;
Ok(())
}

View File

@@ -1,19 +1,13 @@
use std::time::Duration;
use anyhow::Result;
use app_test_support::ChatGptAuthFixture;
use app_test_support::DEFAULT_CLIENT_NAME;
use app_test_support::McpProcess;
use app_test_support::start_analytics_events_server;
use app_test_support::to_response;
use app_test_support::write_chatgpt_auth;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::PluginUninstallParams;
use codex_app_server_protocol::PluginUninstallResponse;
use codex_app_server_protocol::RequestId;
use codex_core::auth::AuthCredentialsStoreMode;
use pretty_assertions::assert_eq;
use serde_json::json;
use tempfile::TempDir;
use tokio::time::timeout;
@@ -70,78 +64,6 @@ enabled = true
Ok(())
}
#[tokio::test]
async fn plugin_uninstall_tracks_analytics_event() -> Result<()> {
let analytics_server = start_analytics_events_server().await?;
let codex_home = TempDir::new()?;
write_installed_plugin(&codex_home, "debug", "sample-plugin")?;
std::fs::write(
codex_home.path().join("config.toml"),
format!(
"chatgpt_base_url = \"{}\"\n\n[features]\nplugins = true\n\n[plugins.\"sample-plugin@debug\"]\nenabled = true\n",
analytics_server.uri()
),
)?;
write_chatgpt_auth(
codex_home.path(),
ChatGptAuthFixture::new("chatgpt-token")
.account_id("account-123")
.chatgpt_user_id("user-123")
.chatgpt_account_id("account-123"),
AuthCredentialsStoreMode::File,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_plugin_uninstall_request(PluginUninstallParams {
plugin_id: "sample-plugin@debug".to_string(),
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let response: PluginUninstallResponse = to_response(response)?;
assert_eq!(response, PluginUninstallResponse {});
let payloads = timeout(DEFAULT_TIMEOUT, async {
loop {
let Some(requests) = analytics_server.received_requests().await else {
tokio::time::sleep(Duration::from_millis(25)).await;
continue;
};
if !requests.is_empty() {
break requests;
}
tokio::time::sleep(Duration::from_millis(25)).await;
}
})
.await?;
let payload: serde_json::Value =
serde_json::from_slice(&payloads[0].body).expect("analytics payload");
assert_eq!(
payload,
json!({
"events": [{
"event_type": "codex_plugin_uninstalled",
"event_params": {
"plugin_id": "sample-plugin@debug",
"plugin_name": "sample-plugin",
"marketplace_name": "debug",
"has_skills": false,
"mcp_server_count": 0,
"connector_ids": [],
"product_client_id": DEFAULT_CLIENT_NAME,
}
}]
})
);
Ok(())
}
fn write_installed_plugin(
codex_home: &TempDir,
marketplace_name: &str,

View File

@@ -51,7 +51,7 @@ async fn realtime_conversation_streams_v2_notifications() -> Result<()> {
vec![],
vec![
json!({
"type": "response.output_audio.delta",
"type": "conversation.output_audio.delta",
"delta": "AQID",
"sample_rate": 24_000,
"channels": 1,
@@ -403,10 +403,6 @@ sandbox_mode = "read-only"
model_provider = "mock_provider"
experimental_realtime_ws_base_url = "{realtime_server_uri}"
[realtime]
version = "v2"
type = "conversational"
[features]
{realtime_feature_key} = {realtime_enabled}

View File

@@ -1,10 +1,8 @@
use anyhow::Result;
use app_test_support::ChatGptAuthFixture;
use app_test_support::McpProcess;
use app_test_support::create_fake_rollout;
use app_test_support::create_mock_responses_server_repeating_assistant;
use app_test_support::to_response;
use app_test_support::write_chatgpt_auth;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCResponse;
@@ -24,19 +22,11 @@ use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartResponse;
use codex_app_server_protocol::TurnStatus;
use codex_app_server_protocol::UserInput;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use pretty_assertions::assert_eq;
use serde_json::Value;
use serde_json::json;
use std::path::Path;
use tempfile::TempDir;
use tokio::time::timeout;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
@@ -222,102 +212,6 @@ async fn thread_fork_rejects_unmaterialized_thread() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn thread_fork_surfaces_cloud_requirements_load_errors() -> Result<()> {
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/backend-api/wham/config/requirements"))
.respond_with(
ResponseTemplate::new(401)
.insert_header("content-type", "text/html")
.set_body_string("<html>nope</html>"),
)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/oauth/token"))
.respond_with(ResponseTemplate::new(401).set_body_json(json!({
"error": { "code": "refresh_token_invalidated" }
})))
.mount(&server)
.await;
let codex_home = TempDir::new()?;
let model_server = create_mock_responses_server_repeating_assistant("Done").await;
let chatgpt_base_url = format!("{}/backend-api", server.uri());
create_config_toml_with_chatgpt_base_url(
codex_home.path(),
&model_server.uri(),
&chatgpt_base_url,
)?;
write_chatgpt_auth(
codex_home.path(),
ChatGptAuthFixture::new("chatgpt-token")
.refresh_token("stale-refresh-token")
.plan_type("business")
.chatgpt_user_id("user-123")
.chatgpt_account_id("account-123")
.account_id("account-123"),
AuthCredentialsStoreMode::File,
)?;
let conversation_id = create_fake_rollout(
codex_home.path(),
"2025-01-05T12-00-00",
"2025-01-05T12:00:00Z",
"Saved user message",
Some("mock_provider"),
None,
)?;
let refresh_token_url = format!("{}/oauth/token", server.uri());
let mut mcp = McpProcess::new_with_env(
codex_home.path(),
&[
("OPENAI_API_KEY", None),
(
REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR,
Some(refresh_token_url.as_str()),
),
],
)
.await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let fork_id = mcp
.send_thread_fork_request(ThreadForkParams {
thread_id: conversation_id,
..Default::default()
})
.await?;
let fork_err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(fork_id)),
)
.await??;
assert!(
fork_err
.error
.message
.contains("failed to load configuration"),
"unexpected fork error: {}",
fork_err.error.message
);
assert_eq!(
fork_err.error.data,
Some(json!({
"reason": "cloudRequirements",
"errorCode": "Auth",
"action": "relogin",
"statusCode": 401,
"detail": "Your access token could not be refreshed because your refresh token was revoked. Please log out and sign in again.",
}))
);
Ok(())
}
#[tokio::test]
async fn thread_fork_ephemeral_remains_pathless_and_omits_listing() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
@@ -504,31 +398,3 @@ stream_max_retries = 0
),
)
}
fn create_config_toml_with_chatgpt_base_url(
codex_home: &Path,
server_uri: &str,
chatgpt_base_url: &str,
) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
std::fs::write(
config_toml,
format!(
r#"
model = "mock-model"
approval_policy = "never"
sandbox_mode = "read-only"
chatgpt_base_url = "{chatgpt_base_url}"
model_provider = "mock_provider"
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"
wire_api = "responses"
request_max_retries = 0
stream_max_retries = 0
"#
),
)
}

View File

@@ -1,5 +1,4 @@
use anyhow::Result;
use app_test_support::ChatGptAuthFixture;
use app_test_support::McpProcess;
use app_test_support::create_apply_patch_sse_response;
use app_test_support::create_fake_rollout_with_text_elements;
@@ -9,7 +8,6 @@ use app_test_support::create_mock_responses_server_sequence_unchecked;
use app_test_support::create_shell_command_sse_response;
use app_test_support::rollout_path;
use app_test_support::to_response;
use app_test_support::write_chatgpt_auth;
use chrono::Utc;
use codex_app_server_protocol::AskForApproval;
use codex_app_server_protocol::CommandExecutionApprovalDecision;
@@ -38,8 +36,6 @@ use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartResponse;
use codex_app_server_protocol::TurnStatus;
use codex_app_server_protocol::UserInput;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use codex_protocol::ThreadId;
use codex_protocol::config_types::Personality;
use codex_protocol::models::ContentItem;
@@ -64,11 +60,6 @@ use std::process::Command;
use tempfile::TempDir;
use tokio::time::timeout;
use uuid::Uuid;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
const CODEX_5_2_INSTRUCTIONS_TEMPLATE_DEFAULT: &str = "You are Codex, a coding agent based on GPT-5. You and the user share the same workspace and collaborate to achieve the user's goals.";
@@ -1418,98 +1409,6 @@ async fn thread_resume_fails_when_required_mcp_server_fails_to_initialize() -> R
Ok(())
}
#[tokio::test]
async fn thread_resume_surfaces_cloud_requirements_load_errors() -> Result<()> {
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/backend-api/wham/config/requirements"))
.respond_with(
ResponseTemplate::new(401)
.insert_header("content-type", "text/html")
.set_body_string("<html>nope</html>"),
)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/oauth/token"))
.respond_with(ResponseTemplate::new(401).set_body_json(json!({
"error": { "code": "refresh_token_invalidated" }
})))
.mount(&server)
.await;
let codex_home = TempDir::new()?;
let model_server = create_mock_responses_server_repeating_assistant("Done").await;
let chatgpt_base_url = format!("{}/backend-api", server.uri());
create_config_toml_with_chatgpt_base_url(
codex_home.path(),
&model_server.uri(),
&chatgpt_base_url,
)?;
write_chatgpt_auth(
codex_home.path(),
ChatGptAuthFixture::new("chatgpt-token")
.refresh_token("stale-refresh-token")
.plan_type("business")
.chatgpt_user_id("user-123")
.chatgpt_account_id("account-123")
.account_id("account-123"),
AuthCredentialsStoreMode::File,
)?;
let conversation_id = create_fake_rollout_with_text_elements(
codex_home.path(),
"2025-01-05T12-00-00",
"2025-01-05T12:00:00Z",
"Saved user message",
Vec::new(),
Some("mock_provider"),
None,
)?;
let refresh_token_url = format!("{}/oauth/token", server.uri());
let mut mcp = McpProcess::new_with_env(
codex_home.path(),
&[
("OPENAI_API_KEY", None),
(
REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR,
Some(refresh_token_url.as_str()),
),
],
)
.await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let resume_id = mcp
.send_thread_resume_request(ThreadResumeParams {
thread_id: conversation_id,
..Default::default()
})
.await?;
let err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(resume_id)),
)
.await??;
assert!(
err.error.message.contains("failed to load configuration"),
"unexpected error message: {}",
err.error.message
);
assert_eq!(
err.error.data,
Some(json!({
"reason": "cloudRequirements",
"errorCode": "Auth",
"action": "relogin",
"statusCode": 401,
"detail": "Your access token could not be refreshed because your refresh token was revoked. Please log out and sign in again.",
}))
);
Ok(())
}
#[tokio::test]
async fn thread_resume_prefers_path_over_thread_id() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
@@ -1835,37 +1734,6 @@ stream_max_retries = 0
)
}
fn create_config_toml_with_chatgpt_base_url(
codex_home: &std::path::Path,
server_uri: &str,
chatgpt_base_url: &str,
) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
std::fs::write(
config_toml,
format!(
r#"
model = "gpt-5.2-codex"
approval_policy = "never"
sandbox_mode = "read-only"
chatgpt_base_url = "{chatgpt_base_url}"
model_provider = "mock_provider"
[features]
personality = true
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"
wire_api = "responses"
request_max_retries = 0
stream_max_retries = 0
"#
),
)
}
fn create_config_toml_with_required_broken_mcp(
codex_home: &std::path::Path,
server_uri: &str,

View File

@@ -1,9 +1,7 @@
use anyhow::Result;
use app_test_support::ChatGptAuthFixture;
use app_test_support::McpProcess;
use app_test_support::create_mock_responses_server_repeating_assistant;
use app_test_support::to_response;
use app_test_support::write_chatgpt_auth;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCResponse;
@@ -13,23 +11,15 @@ use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::ThreadStartedNotification;
use codex_app_server_protocol::ThreadStatus;
use codex_app_server_protocol::ThreadStatusChangedNotification;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use codex_core::config::set_project_trust_level;
use codex_protocol::config_types::ServiceTier;
use codex_protocol::config_types::TrustLevel;
use codex_protocol::openai_models::ReasoningEffort;
use pretty_assertions::assert_eq;
use serde_json::Value;
use serde_json::json;
use std::path::Path;
use tempfile::TempDir;
use tokio::time::timeout;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
@@ -328,88 +318,6 @@ async fn thread_start_fails_when_required_mcp_server_fails_to_initialize() -> Re
Ok(())
}
#[tokio::test]
async fn thread_start_surfaces_cloud_requirements_load_errors() -> Result<()> {
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/backend-api/wham/config/requirements"))
.respond_with(
ResponseTemplate::new(401)
.insert_header("content-type", "text/html")
.set_body_string("<html>nope</html>"),
)
.mount(&server)
.await;
Mock::given(method("POST"))
.and(path("/oauth/token"))
.respond_with(ResponseTemplate::new(401).set_body_json(json!({
"error": { "code": "refresh_token_invalidated" }
})))
.mount(&server)
.await;
let codex_home = TempDir::new()?;
let model_server = create_mock_responses_server_repeating_assistant("Done").await;
let chatgpt_base_url = format!("{}/backend-api", server.uri());
create_config_toml_with_chatgpt_base_url(
codex_home.path(),
&model_server.uri(),
&chatgpt_base_url,
)?;
write_chatgpt_auth(
codex_home.path(),
ChatGptAuthFixture::new("chatgpt-token")
.refresh_token("stale-refresh-token")
.plan_type("business")
.chatgpt_user_id("user-123")
.chatgpt_account_id("account-123")
.account_id("account-123"),
AuthCredentialsStoreMode::File,
)?;
let refresh_token_url = format!("{}/oauth/token", server.uri());
let mut mcp = McpProcess::new_with_env(
codex_home.path(),
&[
("OPENAI_API_KEY", None),
(
REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR,
Some(refresh_token_url.as_str()),
),
],
)
.await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let req_id = mcp
.send_thread_start_request(ThreadStartParams::default())
.await?;
let err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(req_id)),
)
.await??;
assert!(
err.error.message.contains("failed to load configuration"),
"unexpected error message: {}",
err.error.message
);
assert_eq!(
err.error.data,
Some(json!({
"reason": "cloudRequirements",
"errorCode": "Auth",
"action": "relogin",
"statusCode": 401,
"detail": "Your access token could not be refreshed because your refresh token was revoked. Please log out and sign in again.",
}))
);
Ok(())
}
// Helper to create a config.toml pointing at the mock model server.
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
@@ -434,34 +342,6 @@ stream_max_retries = 0
)
}
fn create_config_toml_with_chatgpt_base_url(
codex_home: &Path,
server_uri: &str,
chatgpt_base_url: &str,
) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
std::fs::write(
config_toml,
format!(
r#"
model = "mock-model"
approval_policy = "never"
sandbox_mode = "read-only"
chatgpt_base_url = "{chatgpt_base_url}"
model_provider = "mock_provider"
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"
wire_api = "responses"
request_max_retries = 0
stream_max_retries = 0
"#
),
)
}
fn create_config_toml_with_required_broken_mcp(
codex_home: &Path,
server_uri: &str,

View File

@@ -14,7 +14,6 @@ serde = { version = "1", features = ["derive"] }
serde_json = "1"
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
codex-backend-openapi-models = { path = "../codex-backend-openapi-models" }
codex-client = { workspace = true }
codex-protocol = { workspace = true }
codex-core = { workspace = true }

View File

@@ -4,7 +4,6 @@ use crate::types::PaginatedListTaskListItem;
use crate::types::RateLimitStatusPayload;
use crate::types::TurnAttemptsSiblingTurnsResponse;
use anyhow::Result;
use codex_client::build_reqwest_client_with_custom_ca;
use codex_core::auth::CodexAuth;
use codex_core::default_client::get_codex_user_agent;
use codex_protocol::account::PlanType as AccountPlanType;
@@ -121,7 +120,7 @@ impl Client {
{
base_url = format!("{base_url}/backend-api");
}
let http = build_reqwest_client_with_custom_ca(reqwest::Client::builder())?;
let http = reqwest::Client::builder().build()?;
let path_style = PathStyle::from_base_url(&base_url);
Ok(Self {
base_url,

View File

@@ -21,7 +21,6 @@ pub use codex_core::connectors::list_cached_accessible_connectors_from_mcp_tools
use codex_core::connectors::merge_connectors;
use codex_core::connectors::merge_plugin_apps;
pub use codex_core::connectors::with_app_enabled_state;
use codex_core::plugins::AppConnectorId;
use codex_core::plugins::PluginsManager;
const DIRECTORY_CONNECTORS_TIMEOUT: Duration = Duration::from_secs(60);
@@ -119,21 +118,6 @@ fn plugin_apps_for_config(config: &Config) -> Vec<codex_core::plugins::AppConnec
.effective_apps()
}
pub fn connectors_for_plugin_apps(
connectors: Vec<AppInfo>,
plugin_apps: &[AppConnectorId],
) -> Vec<AppInfo> {
let plugin_app_ids = plugin_apps
.iter()
.map(|connector_id| connector_id.0.as_str())
.collect::<HashSet<_>>();
filter_disallowed_connectors(merge_plugin_apps(connectors, plugin_apps.to_vec()))
.into_iter()
.filter(|connector| plugin_app_ids.contains(connector.id.as_str()))
.collect()
}
pub fn merge_connectors_with_accessible(
connectors: Vec<AppInfo>,
accessible_connectors: Vec<AppInfo>,
@@ -159,7 +143,6 @@ pub fn merge_connectors_with_accessible(
mod tests {
use super::*;
use codex_core::connectors::connector_install_url;
use codex_core::plugins::AppConnectorId;
use pretty_assertions::assert_eq;
fn app(id: &str) -> AppInfo {
@@ -260,27 +243,4 @@ mod tests {
vec![merged_app("alpha", true), merged_app("beta", true)]
);
}
#[test]
fn connectors_for_plugin_apps_returns_only_requested_plugin_apps() {
let connectors = connectors_for_plugin_apps(
vec![app("alpha"), app("beta")],
&[
AppConnectorId("alpha".to_string()),
AppConnectorId("gmail".to_string()),
],
);
assert_eq!(connectors, vec![app("alpha"), merged_app("gmail", false)]);
}
#[test]
fn connectors_for_plugin_apps_filters_disallowed_plugin_apps() {
let connectors = connectors_for_plugin_apps(
Vec::new(),
&[AppConnectorId(
"asdk_app_6938a94a61d881918ef32cb999ff937c".to_string(),
)],
);
assert_eq!(connectors, Vec::<AppInfo>::new());
}
}

View File

@@ -165,7 +165,6 @@ async fn run_command_under_sandbox(
&cwd_clone,
env_map,
None,
config.permissions.windows_sandbox_private_desktop,
)
} else {
run_windows_sandbox_capture(
@@ -176,7 +175,6 @@ async fn run_command_under_sandbox(
&cwd_clone,
env_map,
None,
config.permissions.windows_sandbox_private_desktop,
)
}
})

View File

@@ -976,12 +976,7 @@ async fn run_interactive_tui(
}
}
codex_tui::run_main(
interactive,
arg0_paths,
codex_core::config_loader::LoaderOverrides::default(),
)
.await
codex_tui::run_main(interactive, arg0_paths).await
}
fn confirm(prompt: &str) -> std::io::Result<bool> {

View File

@@ -19,7 +19,6 @@ use codex_core::AuthManager;
use codex_core::auth::CodexAuth;
use codex_core::auth::RefreshTokenError;
use codex_core::config_loader::CloudRequirementsLoadError;
use codex_core::config_loader::CloudRequirementsLoadErrorCode;
use codex_core::config_loader::CloudRequirementsLoader;
use codex_core::config_loader::ConfigRequirementsToml;
use codex_core::util::backoff;
@@ -83,7 +82,7 @@ enum FetchAttemptError {
Retryable(RetryableFailureKind),
Unauthorized {
status_code: Option<u16>,
message: String,
error: CloudRequirementsLoadError,
},
}
@@ -225,7 +224,7 @@ impl RequirementsFetcher for BackendRequirementsFetcher {
if err.is_unauthorized() {
FetchAttemptError::Unauthorized {
status_code,
message: err.to_string(),
error: CloudRequirementsLoadError::new(err.to_string()),
}
} else {
FetchAttemptError::Retryable(RetryableFailureKind::Request { status_code })
@@ -283,14 +282,10 @@ impl CloudRequirementsService {
emit_load_metric("startup", "error");
})
.map_err(|_| {
CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::Timeout,
None,
format!(
"timed out waiting for cloud requirements after {}s",
self.timeout.as_secs()
),
)
CloudRequirementsLoadError::new(format!(
"timed out waiting for cloud requirements after {}s",
self.timeout.as_secs()
))
})?;
let result = match fetch_result {
@@ -386,10 +381,7 @@ impl CloudRequirementsService {
attempt += 1;
continue;
}
Err(FetchAttemptError::Unauthorized {
status_code,
message,
}) => {
Err(FetchAttemptError::Unauthorized { status_code, error }) => {
last_status_code = status_code;
emit_fetch_attempt_metric(trigger, attempt, "unauthorized", status_code);
if auth_recovery.has_next() {
@@ -399,7 +391,7 @@ impl CloudRequirementsService {
"Cloud requirements request was unauthorized; attempting auth recovery"
);
match auth_recovery.next().await {
Ok(_) => {
Ok(()) => {
let Some(refreshed_auth) = self.auth_manager.auth().await else {
tracing::error!(
"Auth recovery succeeded but no auth is available for cloud requirements"
@@ -412,8 +404,6 @@ impl CloudRequirementsService {
status_code,
);
return Err(CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::Auth,
status_code,
CLOUD_REQUIREMENTS_AUTH_RECOVERY_FAILED_MESSAGE,
));
};
@@ -432,11 +422,7 @@ impl CloudRequirementsService {
attempt,
status_code,
);
return Err(CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::Auth,
status_code,
failed.message,
));
return Err(CloudRequirementsLoadError::new(failed.message));
}
Err(RefreshTokenError::Transient(recovery_err)) => {
if attempt < CLOUD_REQUIREMENTS_MAX_ATTEMPTS {
@@ -455,7 +441,7 @@ impl CloudRequirementsService {
}
tracing::warn!(
error = %message,
error = %error,
"Cloud requirements request was unauthorized and no auth recovery is available"
);
emit_fetch_final_metric(
@@ -466,8 +452,6 @@ impl CloudRequirementsService {
status_code,
);
return Err(CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::Auth,
status_code,
CLOUD_REQUIREMENTS_AUTH_RECOVERY_FAILED_MESSAGE,
));
}
@@ -486,8 +470,6 @@ impl CloudRequirementsService {
last_status_code,
);
return Err(CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::Parse,
None,
CLOUD_REQUIREMENTS_LOAD_FAILED_MESSAGE,
));
}
@@ -516,8 +498,6 @@ impl CloudRequirementsService {
"{CLOUD_REQUIREMENTS_LOAD_FAILED_MESSAGE}"
);
Err(CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::RequestFailed,
last_status_code,
CLOUD_REQUIREMENTS_LOAD_FAILED_MESSAGE,
))
}
@@ -706,11 +686,7 @@ pub fn cloud_requirements_loader(
CloudRequirementsLoader::new(async move {
task.await.map_err(|err| {
tracing::error!(error = %err, "Cloud requirements task failed");
CloudRequirementsLoadError::new(
CloudRequirementsLoadErrorCode::Internal,
None,
format!("cloud requirements load failed: {err}"),
)
CloudRequirementsLoadError::new(format!("cloud requirements load failed: {err}"))
})?
})
}
@@ -805,7 +781,6 @@ mod tests {
use codex_protocol::protocol::AskForApproval;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::collections::BTreeMap;
use std::collections::VecDeque;
use std::future::pending;
use std::path::Path;
@@ -1034,7 +1009,7 @@ mod tests {
} else {
Err(FetchAttemptError::Unauthorized {
status_code: Some(401),
message: "GET /config/requirements failed: 401".to_string(),
error: CloudRequirementsLoadError::new("GET /config/requirements failed: 401"),
})
}
}
@@ -1054,7 +1029,7 @@ mod tests {
self.request_count.fetch_add(1, Ordering::SeqCst);
Err(FetchAttemptError::Unauthorized {
status_code: Some(401),
message: self.message.clone(),
error: CloudRequirementsLoadError::new(self.message.clone()),
})
}
}
@@ -1105,7 +1080,6 @@ mod tests {
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1149,7 +1123,6 @@ mod tests {
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1157,31 +1130,6 @@ mod tests {
);
}
#[tokio::test]
async fn fetch_cloud_requirements_parses_apps_requirements_toml() {
let result = parse_for_fetch(Some(
r#"
[apps.connector_5f3c8c41a1e54ad7a76272c89e2554fa]
enabled = false
"#,
));
assert_eq!(
result,
Some(ConfigRequirementsToml {
apps: Some(codex_core::config_loader::AppsRequirementsToml {
apps: BTreeMap::from([(
"connector_5f3c8c41a1e54ad7a76272c89e2554fa".to_string(),
codex_core::config_loader::AppRequirementToml {
enabled: Some(false),
},
)]),
}),
..Default::default()
})
);
}
#[tokio::test(start_paused = true)]
async fn fetch_cloud_requirements_times_out() {
let auth_manager = auth_manager_with_plan("enterprise");
@@ -1229,7 +1177,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1280,7 +1227,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1331,7 +1277,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1440,8 +1385,6 @@ enabled = false
err.to_string(),
CLOUD_REQUIREMENTS_AUTH_RECOVERY_FAILED_MESSAGE
);
assert_eq!(err.code(), CloudRequirementsLoadErrorCode::Auth);
assert_eq!(err.status_code(), Some(401));
assert_eq!(fetcher.request_count.load(Ordering::SeqCst), 1);
}
@@ -1492,7 +1435,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1521,7 +1463,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1570,7 +1511,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1618,7 +1558,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1670,7 +1609,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1723,7 +1661,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1776,7 +1713,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1831,7 +1767,6 @@ enabled = false
err.to_string(),
"failed to load your workspace-managed config"
);
assert_eq!(err.code(), CloudRequirementsLoadErrorCode::RequestFailed);
assert_eq!(
fetcher.request_count.load(Ordering::SeqCst),
CLOUD_REQUIREMENTS_MAX_ATTEMPTS
@@ -1862,7 +1797,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -1887,7 +1821,6 @@ enabled = false
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,

View File

@@ -20,7 +20,6 @@ codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = [
"mock",
"online",
] }
codex-client = { workspace = true }
codex-core = { path = "../core" }
codex-login = { path = "../login" }
codex-tui = { path = "../tui" }

View File

@@ -1,4 +1,3 @@
use codex_client::build_reqwest_client_with_custom_ca;
use reqwest::header::CONTENT_TYPE;
use reqwest::header::HeaderMap;
use std::collections::HashMap;
@@ -74,7 +73,7 @@ pub async fn autodetect_environment_id(
};
crate::append_error_log(format!("env: GET {list_url}"));
// Fetch and log the full environments JSON for debugging
let http = build_reqwest_client_with_custom_ca(reqwest::Client::builder())?;
let http = reqwest::Client::builder().build()?;
let res = http.get(&list_url).headers(headers.clone()).send().await?;
let status = res.status();
let ct = res
@@ -148,7 +147,7 @@ async fn get_json<T: serde::de::DeserializeOwned>(
url: &str,
headers: &HeaderMap,
) -> anyhow::Result<T> {
let http = build_reqwest_client_with_custom_ca(reqwest::Client::builder())?;
let http = reqwest::Client::builder().build()?;
let res = http.get(url).headers(headers.clone()).send().await?;
let status = res.status();
let ct = res

View File

@@ -21,12 +21,6 @@ pub struct CompactionInput<'a> {
pub model: &'a str,
pub input: &'a [ResponseItem],
pub instructions: &'a str,
pub tools: Vec<Value>,
pub parallel_tool_calls: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning: Option<Reasoning>,
#[serde(skip_serializing_if = "Option::is_none")]
pub text: Option<TextControls>,
}
/// Canonical input payload for the memory summarize endpoint.

View File

@@ -1,31 +1,24 @@
use crate::endpoint::realtime_websocket::protocol::ConversationFunctionCallOutputItem;
use crate::endpoint::realtime_websocket::protocol::ConversationItem;
use crate::endpoint::realtime_websocket::protocol::ConversationItemContent;
use crate::endpoint::realtime_websocket::protocol::ConversationItemPayload;
use crate::endpoint::realtime_websocket::protocol::ConversationMessageItem;
use crate::endpoint::realtime_websocket::protocol::RealtimeAudioFrame;
use crate::endpoint::realtime_websocket::protocol::RealtimeEvent;
use crate::endpoint::realtime_websocket::protocol::RealtimeEventParser;
use crate::endpoint::realtime_websocket::protocol::RealtimeOutboundMessage;
use crate::endpoint::realtime_websocket::protocol::RealtimeSessionConfig;
use crate::endpoint::realtime_websocket::protocol::RealtimeSessionMode;
use crate::endpoint::realtime_websocket::protocol::RealtimeTranscriptDelta;
use crate::endpoint::realtime_websocket::protocol::RealtimeTranscriptEntry;
use crate::endpoint::realtime_websocket::protocol::SessionAudio;
use crate::endpoint::realtime_websocket::protocol::SessionAudioFormat;
use crate::endpoint::realtime_websocket::protocol::SessionAudioInput;
use crate::endpoint::realtime_websocket::protocol::SessionAudioOutput;
use crate::endpoint::realtime_websocket::protocol::SessionFunctionTool;
use crate::endpoint::realtime_websocket::protocol::SessionUpdateSession;
use crate::endpoint::realtime_websocket::protocol::parse_realtime_event;
use crate::error::ApiError;
use crate::provider::Provider;
use codex_client::maybe_build_rustls_client_config_with_custom_ca;
use codex_utils_rustls_provider::ensure_rustls_crypto_provider;
use futures::SinkExt;
use futures::StreamExt;
use http::HeaderMap;
use http::HeaderValue;
use serde_json::json;
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
@@ -46,23 +39,6 @@ use tracing::trace;
use tungstenite::protocol::WebSocketConfig;
use url::Url;
const REALTIME_AUDIO_SAMPLE_RATE: u32 = 24_000;
const REALTIME_AUDIO_VOICE: &str = "fathom";
const REALTIME_V1_SESSION_TYPE: &str = "quicksilver";
const REALTIME_V2_SESSION_TYPE: &str = "realtime";
const REALTIME_V2_CODEX_TOOL_NAME: &str = "codex";
const REALTIME_V2_CODEX_TOOL_DESCRIPTION: &str = "Delegate work to Codex and return the result.";
fn normalized_session_mode(
event_parser: RealtimeEventParser,
session_mode: RealtimeSessionMode,
) -> RealtimeSessionMode {
match event_parser {
RealtimeEventParser::V1 => RealtimeSessionMode::Conversational,
RealtimeEventParser::RealtimeV2 => session_mode,
}
}
struct WsStream {
tx_command: mpsc::Sender<WsCommand>,
pump_task: tokio::task::JoinHandle<()>,
@@ -219,14 +195,12 @@ pub struct RealtimeWebsocketConnection {
pub struct RealtimeWebsocketWriter {
stream: Arc<WsStream>,
is_closed: Arc<AtomicBool>,
event_parser: RealtimeEventParser,
}
#[derive(Clone)]
pub struct RealtimeWebsocketEvents {
rx_message: Arc<Mutex<mpsc::UnboundedReceiver<Result<Message, WsError>>>>,
active_transcript: Arc<Mutex<ActiveTranscriptState>>,
event_parser: RealtimeEventParser,
is_closed: Arc<AtomicBool>,
}
@@ -273,7 +247,6 @@ impl RealtimeWebsocketConnection {
fn new(
stream: WsStream,
rx_message: mpsc::UnboundedReceiver<Result<Message, WsError>>,
event_parser: RealtimeEventParser,
) -> Self {
let stream = Arc::new(stream);
let is_closed = Arc::new(AtomicBool::new(false));
@@ -281,12 +254,10 @@ impl RealtimeWebsocketConnection {
writer: RealtimeWebsocketWriter {
stream: Arc::clone(&stream),
is_closed: Arc::clone(&is_closed),
event_parser,
},
events: RealtimeWebsocketEvents {
rx_message: Arc::new(Mutex::new(rx_message)),
active_transcript: Arc::new(Mutex::new(ActiveTranscriptState::default())),
event_parser,
is_closed,
},
}
@@ -300,19 +271,15 @@ impl RealtimeWebsocketWriter {
}
pub async fn send_conversation_item_create(&self, text: String) -> Result<(), ApiError> {
let content_kind = match self.event_parser {
RealtimeEventParser::V1 => "text",
RealtimeEventParser::RealtimeV2 => "input_text",
};
self.send_json(RealtimeOutboundMessage::ConversationItemCreate {
item: ConversationItemPayload::Message(ConversationMessageItem {
item: ConversationItem {
kind: "message".to_string(),
role: "user".to_string(),
content: vec![ConversationItemContent {
kind: content_kind.to_string(),
kind: "text".to_string(),
text,
}],
}),
},
})
.await
}
@@ -322,80 +289,29 @@ impl RealtimeWebsocketWriter {
handoff_id: String,
output_text: String,
) -> Result<(), ApiError> {
let message = match self.event_parser {
RealtimeEventParser::V1 => RealtimeOutboundMessage::ConversationHandoffAppend {
handoff_id,
output_text,
},
RealtimeEventParser::RealtimeV2 => RealtimeOutboundMessage::ConversationItemCreate {
item: ConversationItemPayload::FunctionCallOutput(
ConversationFunctionCallOutputItem {
kind: "function_call_output".to_string(),
call_id: handoff_id,
output: output_text,
},
),
},
};
self.send_json(message).await
self.send_json(RealtimeOutboundMessage::ConversationHandoffAppend {
handoff_id,
output_text,
})
.await
}
pub async fn send_session_update(
&self,
instructions: String,
session_mode: RealtimeSessionMode,
) -> Result<(), ApiError> {
let session_mode = normalized_session_mode(self.event_parser, session_mode);
let (session_kind, session_instructions, output_audio) = match session_mode {
RealtimeSessionMode::Conversational => {
let kind = match self.event_parser {
RealtimeEventParser::V1 => REALTIME_V1_SESSION_TYPE.to_string(),
RealtimeEventParser::RealtimeV2 => REALTIME_V2_SESSION_TYPE.to_string(),
};
(
kind,
Some(instructions),
Some(SessionAudioOutput {
voice: REALTIME_AUDIO_VOICE.to_string(),
}),
)
}
RealtimeSessionMode::Transcription => ("transcription".to_string(), None, None),
};
let tools = match self.event_parser {
RealtimeEventParser::RealtimeV2 => Some(vec![SessionFunctionTool {
kind: "function".to_string(),
name: REALTIME_V2_CODEX_TOOL_NAME.to_string(),
description: REALTIME_V2_CODEX_TOOL_DESCRIPTION.to_string(),
parameters: json!({
"type": "object",
"properties": {
"prompt": {
"type": "string",
"description": "Prompt text for the delegated Codex task."
}
},
"required": ["prompt"],
"additionalProperties": false
}),
}]),
RealtimeEventParser::V1 => None,
};
pub async fn send_session_update(&self, instructions: String) -> Result<(), ApiError> {
self.send_json(RealtimeOutboundMessage::SessionUpdate {
session: SessionUpdateSession {
kind: session_kind,
instructions: session_instructions,
kind: "quicksilver".to_string(),
instructions,
audio: SessionAudio {
input: SessionAudioInput {
format: SessionAudioFormat {
kind: "audio/pcm".to_string(),
rate: REALTIME_AUDIO_SAMPLE_RATE,
rate: 24_000,
},
},
output: output_audio,
output: SessionAudioOutput {
voice: "fathom".to_string(),
},
},
tools,
},
})
.await
@@ -459,7 +375,7 @@ impl RealtimeWebsocketEvents {
match msg {
Message::Text(text) => {
if let Some(mut event) = parse_realtime_event(&text, self.event_parser) {
if let Some(mut event) = parse_realtime_event(&text) {
self.update_active_transcript(&mut event).await;
debug!(?event, "realtime websocket parsed event");
return Ok(Some(event));
@@ -544,8 +460,6 @@ impl RealtimeWebsocketClient {
self.provider.base_url.as_str(),
self.provider.query_params.as_ref(),
config.model.as_deref(),
config.event_parser,
config.session_mode,
)?;
let mut request = ws_url
@@ -560,19 +474,12 @@ impl RealtimeWebsocketClient {
request.headers_mut().extend(headers);
info!("connecting realtime websocket: {ws_url}");
// Realtime websocket TLS should honor the same custom-CA env vars as the rest of Codex's
// outbound HTTPS and websocket traffic.
let connector = maybe_build_rustls_client_config_with_custom_ca()
.map_err(|err| ApiError::Stream(format!("failed to configure websocket TLS: {err}")))?
.map(tokio_tungstenite::Connector::Rustls);
let (stream, response) = tokio_tungstenite::connect_async_tls_with_config(
request,
Some(websocket_config()),
false,
connector,
)
.await
.map_err(|err| ApiError::Stream(format!("failed to connect realtime websocket: {err}")))?;
let (stream, response) =
tokio_tungstenite::connect_async_with_config(request, Some(websocket_config()), false)
.await
.map_err(|err| {
ApiError::Stream(format!("failed to connect realtime websocket: {err}"))
})?;
info!(
ws_url = %ws_url,
status = %response.status(),
@@ -580,14 +487,14 @@ impl RealtimeWebsocketClient {
);
let (stream, rx_message) = WsStream::new(stream);
let connection = RealtimeWebsocketConnection::new(stream, rx_message, config.event_parser);
let connection = RealtimeWebsocketConnection::new(stream, rx_message);
debug!(
session_id = config.session_id.as_deref().unwrap_or("<none>"),
"realtime websocket sending session.update"
);
connection
.writer
.send_session_update(config.instructions, config.session_mode)
.send_session_update(config.instructions)
.await?;
Ok(connection)
}
@@ -632,8 +539,6 @@ fn websocket_url_from_api_url(
api_url: &str,
query_params: Option<&HashMap<String, String>>,
model: Option<&str>,
event_parser: RealtimeEventParser,
_session_mode: RealtimeSessionMode,
) -> Result<Url, ApiError> {
let mut url = Url::parse(api_url)
.map_err(|err| ApiError::Stream(format!("failed to parse realtime api_url: {err}")))?;
@@ -653,20 +558,9 @@ fn websocket_url_from_api_url(
}
}
let intent = match event_parser {
RealtimeEventParser::V1 => Some("quicksilver"),
RealtimeEventParser::RealtimeV2 => None,
};
let has_extra_query_params = query_params.is_some_and(|query_params| {
query_params
.iter()
.any(|(key, _)| key != "intent" && !(key == "model" && model.is_some()))
});
if intent.is_some() || model.is_some() || has_extra_query_params {
{
let mut query = url.query_pairs_mut();
if let Some(intent) = intent {
query.append_pair("intent", intent);
}
query.append_pair("intent", "quicksilver");
if let Some(model) = model {
query.append_pair("model", model);
}
@@ -734,7 +628,7 @@ mod tests {
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::V1),
parse_realtime_event(payload.as_str()),
Some(RealtimeEvent::SessionUpdated {
session_id: "sess_123".to_string(),
instructions: Some("backend prompt".to_string()),
@@ -753,7 +647,7 @@ mod tests {
})
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::V1),
parse_realtime_event(payload.as_str()),
Some(RealtimeEvent::AudioOut(RealtimeAudioFrame {
data: "AAA=".to_string(),
sample_rate: 48000,
@@ -771,7 +665,7 @@ mod tests {
})
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::V1),
parse_realtime_event(payload.as_str()),
Some(RealtimeEvent::ConversationItemAdded(
json!({"type": "message", "seq": 7})
))
@@ -786,7 +680,7 @@ mod tests {
})
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::V1),
parse_realtime_event(payload.as_str()),
Some(RealtimeEvent::ConversationItemDone {
item_id: "item_123".to_string(),
})
@@ -804,7 +698,7 @@ mod tests {
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::V1),
parse_realtime_event(payload.as_str()),
Some(RealtimeEvent::HandoffRequested(RealtimeHandoffRequested {
handoff_id: "handoff_123".to_string(),
item_id: "item_123".to_string(),
@@ -823,7 +717,7 @@ mod tests {
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::V1),
parse_realtime_event(payload.as_str()),
Some(RealtimeEvent::InputTranscriptDelta(
RealtimeTranscriptDelta {
delta: "hello ".to_string(),
@@ -841,7 +735,7 @@ mod tests {
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::V1),
parse_realtime_event(payload.as_str()),
Some(RealtimeEvent::OutputTranscriptDelta(
RealtimeTranscriptDelta {
delta: "hi".to_string(),
@@ -850,68 +744,6 @@ mod tests {
);
}
#[test]
fn parse_realtime_v2_handoff_tool_call_event() {
let payload = json!({
"type": "conversation.item.done",
"item": {
"id": "item_123",
"type": "function_call",
"name": "codex",
"call_id": "call_123",
"arguments": "{\"prompt\":\"delegate this\"}"
}
})
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::RealtimeV2),
Some(RealtimeEvent::HandoffRequested(RealtimeHandoffRequested {
handoff_id: "call_123".to_string(),
item_id: "item_123".to_string(),
input_transcript: "delegate this".to_string(),
active_transcript: Vec::new(),
}))
);
}
#[test]
fn parse_realtime_v2_input_audio_transcription_delta_event() {
let payload = json!({
"type": "conversation.item.input_audio_transcription.delta",
"delta": "hello"
})
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::RealtimeV2),
Some(RealtimeEvent::InputTranscriptDelta(
RealtimeTranscriptDelta {
delta: "hello".to_string(),
}
))
);
}
#[test]
fn parse_realtime_v2_output_audio_delta_defaults_audio_shape() {
let payload = json!({
"type": "response.output_audio.delta",
"delta": "AQID"
})
.to_string();
assert_eq!(
parse_realtime_event(payload.as_str(), RealtimeEventParser::RealtimeV2),
Some(RealtimeEvent::AudioOut(RealtimeAudioFrame {
data: "AQID".to_string(),
sample_rate: 24_000,
num_channels: 1,
samples_per_channel: None,
}))
);
}
#[test]
fn merge_request_headers_matches_http_precedence() {
let mut provider_headers = HeaderMap::new();
@@ -947,14 +779,8 @@ mod tests {
#[test]
fn websocket_url_from_http_base_defaults_to_ws_path() {
let url = websocket_url_from_api_url(
"http://127.0.0.1:8011",
None,
None,
RealtimeEventParser::V1,
RealtimeSessionMode::Conversational,
)
.expect("build ws url");
let url =
websocket_url_from_api_url("http://127.0.0.1:8011", None, None).expect("build ws url");
assert_eq!(
url.as_str(),
"ws://127.0.0.1:8011/v1/realtime?intent=quicksilver"
@@ -963,14 +789,9 @@ mod tests {
#[test]
fn websocket_url_from_ws_base_defaults_to_ws_path() {
let url = websocket_url_from_api_url(
"wss://example.com",
None,
Some("realtime-test-model"),
RealtimeEventParser::V1,
RealtimeSessionMode::Conversational,
)
.expect("build ws url");
let url =
websocket_url_from_api_url("wss://example.com", None, Some("realtime-test-model"))
.expect("build ws url");
assert_eq!(
url.as_str(),
"wss://example.com/v1/realtime?intent=quicksilver&model=realtime-test-model"
@@ -979,14 +800,8 @@ mod tests {
#[test]
fn websocket_url_from_v1_base_appends_realtime_path() {
let url = websocket_url_from_api_url(
"https://api.openai.com/v1",
None,
Some("snapshot"),
RealtimeEventParser::V1,
RealtimeSessionMode::Conversational,
)
.expect("build ws url");
let url = websocket_url_from_api_url("https://api.openai.com/v1", None, Some("snapshot"))
.expect("build ws url");
assert_eq!(
url.as_str(),
"wss://api.openai.com/v1/realtime?intent=quicksilver&model=snapshot"
@@ -995,14 +810,9 @@ mod tests {
#[test]
fn websocket_url_from_nested_v1_base_appends_realtime_path() {
let url = websocket_url_from_api_url(
"https://example.com/openai/v1",
None,
Some("snapshot"),
RealtimeEventParser::V1,
RealtimeSessionMode::Conversational,
)
.expect("build ws url");
let url =
websocket_url_from_api_url("https://example.com/openai/v1", None, Some("snapshot"))
.expect("build ws url");
assert_eq!(
url.as_str(),
"wss://example.com/openai/v1/realtime?intent=quicksilver&model=snapshot"
@@ -1018,8 +828,6 @@ mod tests {
("intent".to_string(), "ignored".to_string()),
])),
Some("snapshot"),
RealtimeEventParser::V1,
RealtimeSessionMode::Conversational,
)
.expect("build ws url");
assert_eq!(
@@ -1028,54 +836,6 @@ mod tests {
);
}
#[test]
fn websocket_url_v1_ignores_transcription_mode() {
let url = websocket_url_from_api_url(
"https://example.com",
None,
None,
RealtimeEventParser::V1,
RealtimeSessionMode::Transcription,
)
.expect("build ws url");
assert_eq!(
url.as_str(),
"wss://example.com/v1/realtime?intent=quicksilver"
);
}
#[test]
fn websocket_url_omits_intent_for_realtime_v2_conversational_mode() {
let url = websocket_url_from_api_url(
"https://example.com/v1/realtime?foo=bar",
Some(&HashMap::from([
("trace".to_string(), "1".to_string()),
("intent".to_string(), "ignored".to_string()),
])),
Some("snapshot"),
RealtimeEventParser::RealtimeV2,
RealtimeSessionMode::Conversational,
)
.expect("build ws url");
assert_eq!(
url.as_str(),
"wss://example.com/v1/realtime?foo=bar&model=snapshot&trace=1"
);
}
#[test]
fn websocket_url_omits_intent_for_realtime_v2_transcription_mode() {
let url = websocket_url_from_api_url(
"https://example.com",
None,
None,
RealtimeEventParser::RealtimeV2,
RealtimeSessionMode::Transcription,
)
.expect("build ws url");
assert_eq!(url.as_str(), "wss://example.com/v1/realtime");
}
#[tokio::test]
async fn e2e_connect_and_exchange_events_against_mock_ws_server() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
@@ -1240,8 +1000,6 @@ mod tests {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_1".to_string()),
event_parser: RealtimeEventParser::V1,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),
@@ -1362,352 +1120,6 @@ mod tests {
server.await.expect("server task");
}
#[tokio::test]
async fn realtime_v2_session_update_includes_codex_tool_and_handoff_output_item() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
let addr = listener.local_addr().expect("local addr");
let server = tokio::spawn(async move {
let (stream, _) = listener.accept().await.expect("accept");
let mut ws = accept_async(stream).await.expect("accept ws");
let first = ws
.next()
.await
.expect("first msg")
.expect("first msg ok")
.into_text()
.expect("text");
let first_json: Value = serde_json::from_str(&first).expect("json");
assert_eq!(first_json["type"], "session.update");
assert_eq!(
first_json["session"]["type"],
Value::String("realtime".to_string())
);
assert_eq!(
first_json["session"]["tools"][0]["type"],
Value::String("function".to_string())
);
assert_eq!(
first_json["session"]["tools"][0]["name"],
Value::String("codex".to_string())
);
assert_eq!(
first_json["session"]["tools"][0]["parameters"]["required"],
json!(["prompt"])
);
ws.send(Message::Text(
json!({
"type": "session.updated",
"session": {"id": "sess_v2", "instructions": "backend prompt"}
})
.to_string()
.into(),
))
.await
.expect("send session.updated");
let second = ws
.next()
.await
.expect("second msg")
.expect("second msg ok")
.into_text()
.expect("text");
let second_json: Value = serde_json::from_str(&second).expect("json");
assert_eq!(second_json["type"], "conversation.item.create");
assert_eq!(
second_json["item"]["type"],
Value::String("message".to_string())
);
assert_eq!(
second_json["item"]["content"][0]["type"],
Value::String("input_text".to_string())
);
assert_eq!(
second_json["item"]["content"][0]["text"],
Value::String("delegate this".to_string())
);
let third = ws
.next()
.await
.expect("third msg")
.expect("third msg ok")
.into_text()
.expect("text");
let third_json: Value = serde_json::from_str(&third).expect("json");
assert_eq!(third_json["type"], "conversation.item.create");
assert_eq!(
third_json["item"]["type"],
Value::String("function_call_output".to_string())
);
assert_eq!(
third_json["item"]["call_id"],
Value::String("call_1".to_string())
);
assert_eq!(
third_json["item"]["output"],
Value::String("delegated result".to_string())
);
});
let provider = Provider {
name: "test".to_string(),
base_url: format!("http://{addr}"),
query_params: Some(HashMap::new()),
headers: HeaderMap::new(),
retry: crate::provider::RetryConfig {
max_attempts: 1,
base_delay: Duration::from_millis(1),
retry_429: false,
retry_5xx: false,
retry_transport: false,
},
stream_idle_timeout: Duration::from_secs(5),
};
let client = RealtimeWebsocketClient::new(provider);
let connection = client
.connect(
RealtimeSessionConfig {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_1".to_string()),
event_parser: RealtimeEventParser::RealtimeV2,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),
)
.await
.expect("connect");
let created = connection
.next_event()
.await
.expect("next event")
.expect("event");
assert_eq!(
created,
RealtimeEvent::SessionUpdated {
session_id: "sess_v2".to_string(),
instructions: Some("backend prompt".to_string()),
}
);
connection
.send_conversation_item_create("delegate this".to_string())
.await
.expect("send text item");
connection
.send_conversation_handoff_append("call_1".to_string(), "delegated result".to_string())
.await
.expect("send handoff output");
connection.close().await.expect("close");
server.await.expect("server task");
}
#[tokio::test]
async fn transcription_mode_session_update_omits_output_audio_and_instructions() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
let addr = listener.local_addr().expect("local addr");
let server = tokio::spawn(async move {
let (stream, _) = listener.accept().await.expect("accept");
let mut ws = accept_async(stream).await.expect("accept ws");
let first = ws
.next()
.await
.expect("first msg")
.expect("first msg ok")
.into_text()
.expect("text");
let first_json: Value = serde_json::from_str(&first).expect("json");
assert_eq!(first_json["type"], "session.update");
assert_eq!(
first_json["session"]["type"],
Value::String("transcription".to_string())
);
assert!(first_json["session"].get("instructions").is_none());
assert!(first_json["session"]["audio"].get("output").is_none());
assert_eq!(
first_json["session"]["tools"][0]["name"],
Value::String("codex".to_string())
);
ws.send(Message::Text(
json!({
"type": "session.updated",
"session": {"id": "sess_transcription"}
})
.to_string()
.into(),
))
.await
.expect("send session.updated");
let second = ws
.next()
.await
.expect("second msg")
.expect("second msg ok")
.into_text()
.expect("text");
let second_json: Value = serde_json::from_str(&second).expect("json");
assert_eq!(second_json["type"], "input_audio_buffer.append");
});
let provider = Provider {
name: "test".to_string(),
base_url: format!("http://{addr}"),
query_params: Some(HashMap::new()),
headers: HeaderMap::new(),
retry: crate::provider::RetryConfig {
max_attempts: 1,
base_delay: Duration::from_millis(1),
retry_429: false,
retry_5xx: false,
retry_transport: false,
},
stream_idle_timeout: Duration::from_secs(5),
};
let client = RealtimeWebsocketClient::new(provider);
let connection = client
.connect(
RealtimeSessionConfig {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_1".to_string()),
event_parser: RealtimeEventParser::RealtimeV2,
session_mode: RealtimeSessionMode::Transcription,
},
HeaderMap::new(),
HeaderMap::new(),
)
.await
.expect("connect");
let created = connection
.next_event()
.await
.expect("next event")
.expect("event");
assert_eq!(
created,
RealtimeEvent::SessionUpdated {
session_id: "sess_transcription".to_string(),
instructions: None,
}
);
connection
.send_audio_frame(RealtimeAudioFrame {
data: "AQID".to_string(),
sample_rate: 24_000,
num_channels: 1,
samples_per_channel: Some(480),
})
.await
.expect("send audio");
connection.close().await.expect("close");
server.await.expect("server task");
}
#[tokio::test]
async fn v1_transcription_mode_is_treated_as_conversational() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
let addr = listener.local_addr().expect("local addr");
let server = tokio::spawn(async move {
let (stream, _) = listener.accept().await.expect("accept");
let mut ws = accept_async(stream).await.expect("accept ws");
let first = ws
.next()
.await
.expect("first msg")
.expect("first msg ok")
.into_text()
.expect("text");
let first_json: Value = serde_json::from_str(&first).expect("json");
assert_eq!(first_json["type"], "session.update");
assert_eq!(
first_json["session"]["type"],
Value::String("quicksilver".to_string())
);
assert_eq!(
first_json["session"]["instructions"],
Value::String("backend prompt".to_string())
);
assert_eq!(
first_json["session"]["audio"]["output"]["voice"],
Value::String("fathom".to_string())
);
assert!(first_json["session"].get("tools").is_none());
ws.send(Message::Text(
json!({
"type": "session.updated",
"session": {"id": "sess_v1_mode"}
})
.to_string()
.into(),
))
.await
.expect("send session.updated");
});
let provider = Provider {
name: "test".to_string(),
base_url: format!("http://{addr}"),
query_params: Some(HashMap::new()),
headers: HeaderMap::new(),
retry: crate::provider::RetryConfig {
max_attempts: 1,
base_delay: Duration::from_millis(1),
retry_429: false,
retry_5xx: false,
retry_transport: false,
},
stream_idle_timeout: Duration::from_secs(5),
};
let client = RealtimeWebsocketClient::new(provider);
let connection = client
.connect(
RealtimeSessionConfig {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_1".to_string()),
event_parser: RealtimeEventParser::V1,
session_mode: RealtimeSessionMode::Transcription,
},
HeaderMap::new(),
HeaderMap::new(),
)
.await
.expect("connect");
let created = connection
.next_event()
.await
.expect("next event")
.expect("event");
assert_eq!(
created,
RealtimeEvent::SessionUpdated {
session_id: "sess_v1_mode".to_string(),
instructions: None,
}
);
connection.close().await.expect("close");
server.await.expect("server task");
}
#[tokio::test]
async fn send_does_not_block_while_next_event_waits_for_inbound_data() {
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
@@ -1770,8 +1182,6 @@ mod tests {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_1".to_string()),
event_parser: RealtimeEventParser::V1,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),

View File

@@ -1,8 +1,5 @@
pub mod methods;
pub mod protocol;
mod protocol_common;
mod protocol_v1;
mod protocol_v2;
pub use codex_protocol::protocol::RealtimeAudioFrame;
pub use codex_protocol::protocol::RealtimeEvent;
@@ -10,6 +7,4 @@ pub use methods::RealtimeWebsocketClient;
pub use methods::RealtimeWebsocketConnection;
pub use methods::RealtimeWebsocketEvents;
pub use methods::RealtimeWebsocketWriter;
pub use protocol::RealtimeEventParser;
pub use protocol::RealtimeSessionConfig;
pub use protocol::RealtimeSessionMode;

View File

@@ -1,5 +1,3 @@
use crate::endpoint::realtime_websocket::protocol_v1::parse_realtime_event_v1;
use crate::endpoint::realtime_websocket::protocol_v2::parse_realtime_event_v2;
pub use codex_protocol::protocol::RealtimeAudioFrame;
pub use codex_protocol::protocol::RealtimeEvent;
pub use codex_protocol::protocol::RealtimeHandoffRequested;
@@ -7,26 +5,13 @@ pub use codex_protocol::protocol::RealtimeTranscriptDelta;
pub use codex_protocol::protocol::RealtimeTranscriptEntry;
use serde::Serialize;
use serde_json::Value;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RealtimeEventParser {
V1,
RealtimeV2,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RealtimeSessionMode {
Conversational,
Transcription,
}
use tracing::debug;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RealtimeSessionConfig {
pub instructions: String,
pub model: Option<String>,
pub session_id: Option<String>,
pub event_parser: RealtimeEventParser,
pub session_mode: RealtimeSessionMode,
}
#[derive(Debug, Clone, Serialize)]
@@ -42,25 +27,21 @@ pub(super) enum RealtimeOutboundMessage {
#[serde(rename = "session.update")]
SessionUpdate { session: SessionUpdateSession },
#[serde(rename = "conversation.item.create")]
ConversationItemCreate { item: ConversationItemPayload },
ConversationItemCreate { item: ConversationItem },
}
#[derive(Debug, Clone, Serialize)]
pub(super) struct SessionUpdateSession {
#[serde(rename = "type")]
pub(super) kind: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub(super) instructions: Option<String>,
pub(super) instructions: String,
pub(super) audio: SessionAudio,
#[serde(skip_serializing_if = "Option::is_none")]
pub(super) tools: Option<Vec<SessionFunctionTool>>,
}
#[derive(Debug, Clone, Serialize)]
pub(super) struct SessionAudio {
pub(super) input: SessionAudioInput,
#[serde(skip_serializing_if = "Option::is_none")]
pub(super) output: Option<SessionAudioOutput>,
pub(super) output: SessionAudioOutput,
}
#[derive(Debug, Clone, Serialize)]
@@ -81,28 +62,13 @@ pub(super) struct SessionAudioOutput {
}
#[derive(Debug, Clone, Serialize)]
pub(super) struct ConversationMessageItem {
pub(super) struct ConversationItem {
#[serde(rename = "type")]
pub(super) kind: String,
pub(super) role: String,
pub(super) content: Vec<ConversationItemContent>,
}
#[derive(Debug, Clone, Serialize)]
#[serde(untagged)]
pub(super) enum ConversationItemPayload {
Message(ConversationMessageItem),
FunctionCallOutput(ConversationFunctionCallOutputItem),
}
#[derive(Debug, Clone, Serialize)]
pub(super) struct ConversationFunctionCallOutputItem {
#[serde(rename = "type")]
pub(super) kind: String,
pub(super) call_id: String,
pub(super) output: String,
}
#[derive(Debug, Clone, Serialize)]
pub(super) struct ConversationItemContent {
#[serde(rename = "type")]
@@ -110,21 +76,124 @@ pub(super) struct ConversationItemContent {
pub(super) text: String,
}
#[derive(Debug, Clone, Serialize)]
pub(super) struct SessionFunctionTool {
#[serde(rename = "type")]
pub(super) kind: String,
pub(super) name: String,
pub(super) description: String,
pub(super) parameters: Value,
}
pub(super) fn parse_realtime_event(payload: &str) -> Option<RealtimeEvent> {
let parsed: Value = match serde_json::from_str(payload) {
Ok(msg) => msg,
Err(err) => {
debug!("failed to parse realtime event: {err}, data: {payload}");
return None;
}
};
pub(super) fn parse_realtime_event(
payload: &str,
event_parser: RealtimeEventParser,
) -> Option<RealtimeEvent> {
match event_parser {
RealtimeEventParser::V1 => parse_realtime_event_v1(payload),
RealtimeEventParser::RealtimeV2 => parse_realtime_event_v2(payload),
let message_type = match parsed.get("type").and_then(Value::as_str) {
Some(message_type) => message_type,
None => {
debug!("received realtime event without type field: {payload}");
return None;
}
};
match message_type {
"session.updated" => {
let session_id = parsed
.get("session")
.and_then(Value::as_object)
.and_then(|session| session.get("id"))
.and_then(Value::as_str)
.map(str::to_string);
let instructions = parsed
.get("session")
.and_then(Value::as_object)
.and_then(|session| session.get("instructions"))
.and_then(Value::as_str)
.map(str::to_string);
session_id.map(|session_id| RealtimeEvent::SessionUpdated {
session_id,
instructions,
})
}
"conversation.output_audio.delta" => {
let data = parsed
.get("delta")
.and_then(Value::as_str)
.or_else(|| parsed.get("data").and_then(Value::as_str))
.map(str::to_string)?;
let sample_rate = parsed
.get("sample_rate")
.and_then(Value::as_u64)
.and_then(|v| u32::try_from(v).ok())?;
let num_channels = parsed
.get("channels")
.or_else(|| parsed.get("num_channels"))
.and_then(Value::as_u64)
.and_then(|v| u16::try_from(v).ok())?;
Some(RealtimeEvent::AudioOut(RealtimeAudioFrame {
data,
sample_rate,
num_channels,
samples_per_channel: parsed
.get("samples_per_channel")
.and_then(Value::as_u64)
.and_then(|v| u32::try_from(v).ok()),
}))
}
"conversation.input_transcript.delta" => parsed
.get("delta")
.and_then(Value::as_str)
.map(str::to_string)
.map(|delta| RealtimeEvent::InputTranscriptDelta(RealtimeTranscriptDelta { delta })),
"conversation.output_transcript.delta" => parsed
.get("delta")
.and_then(Value::as_str)
.map(str::to_string)
.map(|delta| RealtimeEvent::OutputTranscriptDelta(RealtimeTranscriptDelta { delta })),
"conversation.item.added" => parsed
.get("item")
.cloned()
.map(RealtimeEvent::ConversationItemAdded),
"conversation.item.done" => parsed
.get("item")
.and_then(Value::as_object)
.and_then(|item| item.get("id"))
.and_then(Value::as_str)
.map(str::to_string)
.map(|item_id| RealtimeEvent::ConversationItemDone { item_id }),
"conversation.handoff.requested" => {
let handoff_id = parsed
.get("handoff_id")
.and_then(Value::as_str)
.map(str::to_string)?;
let item_id = parsed
.get("item_id")
.and_then(Value::as_str)
.map(str::to_string)?;
let input_transcript = parsed
.get("input_transcript")
.and_then(Value::as_str)
.map(str::to_string)?;
Some(RealtimeEvent::HandoffRequested(RealtimeHandoffRequested {
handoff_id,
item_id,
input_transcript,
active_transcript: Vec::new(),
}))
}
"error" => parsed
.get("message")
.and_then(Value::as_str)
.map(str::to_string)
.or_else(|| {
parsed
.get("error")
.and_then(Value::as_object)
.and_then(|error| error.get("message"))
.and_then(Value::as_str)
.map(str::to_string)
})
.or_else(|| parsed.get("error").map(std::string::ToString::to_string))
.map(RealtimeEvent::Error),
_ => {
debug!("received unsupported realtime event type: {message_type}, data: {payload}");
None
}
}
}

View File

@@ -1,71 +0,0 @@
use codex_protocol::protocol::RealtimeEvent;
use codex_protocol::protocol::RealtimeTranscriptDelta;
use serde_json::Value;
use tracing::debug;
pub(super) fn parse_realtime_payload(payload: &str, parser_name: &str) -> Option<(Value, String)> {
let parsed: Value = match serde_json::from_str(payload) {
Ok(message) => message,
Err(err) => {
debug!("failed to parse {parser_name} event: {err}, data: {payload}");
return None;
}
};
let message_type = match parsed.get("type").and_then(Value::as_str) {
Some(message_type) => message_type.to_string(),
None => {
debug!("received {parser_name} event without type field: {payload}");
return None;
}
};
Some((parsed, message_type))
}
pub(super) fn parse_session_updated_event(parsed: &Value) -> Option<RealtimeEvent> {
let session_id = parsed
.get("session")
.and_then(Value::as_object)
.and_then(|session| session.get("id"))
.and_then(Value::as_str)
.map(str::to_string)?;
let instructions = parsed
.get("session")
.and_then(Value::as_object)
.and_then(|session| session.get("instructions"))
.and_then(Value::as_str)
.map(str::to_string);
Some(RealtimeEvent::SessionUpdated {
session_id,
instructions,
})
}
pub(super) fn parse_transcript_delta_event(
parsed: &Value,
field: &str,
) -> Option<RealtimeTranscriptDelta> {
parsed
.get(field)
.and_then(Value::as_str)
.map(str::to_string)
.map(|delta| RealtimeTranscriptDelta { delta })
}
pub(super) fn parse_error_event(parsed: &Value) -> Option<RealtimeEvent> {
parsed
.get("message")
.and_then(Value::as_str)
.map(str::to_string)
.or_else(|| {
parsed
.get("error")
.and_then(Value::as_object)
.and_then(|error| error.get("message"))
.and_then(Value::as_str)
.map(str::to_string)
})
.or_else(|| parsed.get("error").map(ToString::to_string))
.map(RealtimeEvent::Error)
}

View File

@@ -1,83 +0,0 @@
use crate::endpoint::realtime_websocket::protocol_common::parse_error_event;
use crate::endpoint::realtime_websocket::protocol_common::parse_realtime_payload;
use crate::endpoint::realtime_websocket::protocol_common::parse_session_updated_event;
use crate::endpoint::realtime_websocket::protocol_common::parse_transcript_delta_event;
use codex_protocol::protocol::RealtimeAudioFrame;
use codex_protocol::protocol::RealtimeEvent;
use codex_protocol::protocol::RealtimeHandoffRequested;
use serde_json::Value;
use tracing::debug;
pub(super) fn parse_realtime_event_v1(payload: &str) -> Option<RealtimeEvent> {
let (parsed, message_type) = parse_realtime_payload(payload, "realtime v1")?;
match message_type.as_str() {
"session.updated" => parse_session_updated_event(&parsed),
"conversation.output_audio.delta" => {
let data = parsed
.get("delta")
.and_then(Value::as_str)
.or_else(|| parsed.get("data").and_then(Value::as_str))
.map(str::to_string)?;
let sample_rate = parsed
.get("sample_rate")
.and_then(Value::as_u64)
.and_then(|value| u32::try_from(value).ok())?;
let num_channels = parsed
.get("channels")
.or_else(|| parsed.get("num_channels"))
.and_then(Value::as_u64)
.and_then(|value| u16::try_from(value).ok())?;
Some(RealtimeEvent::AudioOut(RealtimeAudioFrame {
data,
sample_rate,
num_channels,
samples_per_channel: parsed
.get("samples_per_channel")
.and_then(Value::as_u64)
.and_then(|value| u32::try_from(value).ok()),
}))
}
"conversation.input_transcript.delta" => {
parse_transcript_delta_event(&parsed, "delta").map(RealtimeEvent::InputTranscriptDelta)
}
"conversation.output_transcript.delta" => {
parse_transcript_delta_event(&parsed, "delta").map(RealtimeEvent::OutputTranscriptDelta)
}
"conversation.item.added" => parsed
.get("item")
.cloned()
.map(RealtimeEvent::ConversationItemAdded),
"conversation.item.done" => parsed
.get("item")
.and_then(Value::as_object)
.and_then(|item| item.get("id"))
.and_then(Value::as_str)
.map(str::to_string)
.map(|item_id| RealtimeEvent::ConversationItemDone { item_id }),
"conversation.handoff.requested" => {
let handoff_id = parsed
.get("handoff_id")
.and_then(Value::as_str)
.map(str::to_string)?;
let item_id = parsed
.get("item_id")
.and_then(Value::as_str)
.map(str::to_string)?;
let input_transcript = parsed
.get("input_transcript")
.and_then(Value::as_str)
.map(str::to_string)?;
Some(RealtimeEvent::HandoffRequested(RealtimeHandoffRequested {
handoff_id,
item_id,
input_transcript,
active_transcript: Vec::new(),
}))
}
"error" => parse_error_event(&parsed),
_ => {
debug!("received unsupported realtime v1 event type: {message_type}, data: {payload}");
None
}
}
}

View File

@@ -1,130 +0,0 @@
use crate::endpoint::realtime_websocket::protocol_common::parse_error_event;
use crate::endpoint::realtime_websocket::protocol_common::parse_realtime_payload;
use crate::endpoint::realtime_websocket::protocol_common::parse_session_updated_event;
use crate::endpoint::realtime_websocket::protocol_common::parse_transcript_delta_event;
use codex_protocol::protocol::RealtimeAudioFrame;
use codex_protocol::protocol::RealtimeEvent;
use codex_protocol::protocol::RealtimeHandoffRequested;
use serde_json::Map as JsonMap;
use serde_json::Value;
use tracing::debug;
const CODEX_TOOL_NAME: &str = "codex";
const DEFAULT_AUDIO_SAMPLE_RATE: u32 = 24_000;
const DEFAULT_AUDIO_CHANNELS: u16 = 1;
const TOOL_ARGUMENT_KEYS: [&str; 5] = ["input_transcript", "input", "text", "prompt", "query"];
pub(super) fn parse_realtime_event_v2(payload: &str) -> Option<RealtimeEvent> {
let (parsed, message_type) = parse_realtime_payload(payload, "realtime v2")?;
match message_type.as_str() {
"session.updated" => parse_session_updated_event(&parsed),
"response.output_audio.delta" => parse_output_audio_delta_event(&parsed),
"conversation.item.input_audio_transcription.delta" => {
parse_transcript_delta_event(&parsed, "delta").map(RealtimeEvent::InputTranscriptDelta)
}
"conversation.item.input_audio_transcription.completed" => {
parse_transcript_delta_event(&parsed, "transcript")
.map(RealtimeEvent::InputTranscriptDelta)
}
"response.output_text.delta" | "response.output_audio_transcript.delta" => {
parse_transcript_delta_event(&parsed, "delta").map(RealtimeEvent::OutputTranscriptDelta)
}
"conversation.item.added" => parsed
.get("item")
.cloned()
.map(RealtimeEvent::ConversationItemAdded),
"conversation.item.done" => parse_conversation_item_done_event(&parsed),
"error" => parse_error_event(&parsed),
_ => {
debug!("received unsupported realtime v2 event type: {message_type}, data: {payload}");
None
}
}
}
fn parse_output_audio_delta_event(parsed: &Value) -> Option<RealtimeEvent> {
let data = parsed
.get("delta")
.and_then(Value::as_str)
.map(str::to_string)?;
let sample_rate = parsed
.get("sample_rate")
.and_then(Value::as_u64)
.and_then(|value| u32::try_from(value).ok())
.unwrap_or(DEFAULT_AUDIO_SAMPLE_RATE);
let num_channels = parsed
.get("channels")
.or_else(|| parsed.get("num_channels"))
.and_then(Value::as_u64)
.and_then(|value| u16::try_from(value).ok())
.unwrap_or(DEFAULT_AUDIO_CHANNELS);
Some(RealtimeEvent::AudioOut(RealtimeAudioFrame {
data,
sample_rate,
num_channels,
samples_per_channel: parsed
.get("samples_per_channel")
.and_then(Value::as_u64)
.and_then(|value| u32::try_from(value).ok()),
}))
}
fn parse_conversation_item_done_event(parsed: &Value) -> Option<RealtimeEvent> {
let item = parsed.get("item")?.as_object()?;
if let Some(handoff) = parse_handoff_requested_event(item) {
return Some(handoff);
}
item.get("id")
.and_then(Value::as_str)
.map(str::to_string)
.map(|item_id| RealtimeEvent::ConversationItemDone { item_id })
}
fn parse_handoff_requested_event(item: &JsonMap<String, Value>) -> Option<RealtimeEvent> {
let item_type = item.get("type").and_then(Value::as_str);
let item_name = item.get("name").and_then(Value::as_str);
if item_type != Some("function_call") || item_name != Some(CODEX_TOOL_NAME) {
return None;
}
let call_id = item
.get("call_id")
.and_then(Value::as_str)
.or_else(|| item.get("id").and_then(Value::as_str))?;
let item_id = item
.get("id")
.and_then(Value::as_str)
.unwrap_or(call_id)
.to_string();
let arguments = item.get("arguments").and_then(Value::as_str).unwrap_or("");
Some(RealtimeEvent::HandoffRequested(RealtimeHandoffRequested {
handoff_id: call_id.to_string(),
item_id,
input_transcript: extract_input_transcript(arguments),
active_transcript: Vec::new(),
}))
}
fn extract_input_transcript(arguments: &str) -> String {
if arguments.is_empty() {
return String::new();
}
if let Ok(arguments_json) = serde_json::from_str::<Value>(arguments)
&& let Some(arguments_object) = arguments_json.as_object()
{
for key in TOOL_ARGUMENT_KEYS {
if let Some(value) = arguments_object.get(key).and_then(Value::as_str) {
let trimmed = value.trim();
if !trimmed.is_empty() {
return trimmed.to_string();
}
}
}
}
arguments.to_string()
}

View File

@@ -21,7 +21,6 @@ use http::Method;
use serde_json::Value;
use std::sync::Arc;
use std::sync::OnceLock;
use tracing::instrument;
pub struct ResponsesClient<T: HttpTransport, A: AuthProvider> {
session: EndpointSession<T, A>,
@@ -56,16 +55,6 @@ impl<T: HttpTransport, A: AuthProvider> ResponsesClient<T, A> {
}
}
#[instrument(
name = "responses.stream_request",
level = "info",
skip_all,
fields(
transport = "responses_http",
http.method = "POST",
api.path = "responses"
)
)]
pub async fn stream_request(
&self,
request: ResponsesApiRequest,
@@ -101,17 +90,6 @@ impl<T: HttpTransport, A: AuthProvider> ResponsesClient<T, A> {
"responses"
}
#[instrument(
name = "responses.stream",
level = "info",
skip_all,
fields(
transport = "responses_http",
http.method = "POST",
api.path = "responses",
turn.has_state = turn_state.is_some()
)
)]
pub async fn stream(
&self,
body: Value,

View File

@@ -10,7 +10,6 @@ use crate::sse::responses::ResponsesStreamEvent;
use crate::sse::responses::process_responses_event;
use crate::telemetry::WebsocketTelemetry;
use codex_client::TransportError;
use codex_client::maybe_build_rustls_client_config_with_custom_ca;
use codex_utils_rustls_provider::ensure_rustls_crypto_provider;
use futures::SinkExt;
use futures::StreamExt;
@@ -31,16 +30,12 @@ use tokio::sync::oneshot;
use tokio::time::Instant;
use tokio_tungstenite::MaybeTlsStream;
use tokio_tungstenite::WebSocketStream;
use tokio_tungstenite::connect_async_tls_with_config;
use tokio_tungstenite::tungstenite::Error as WsError;
use tokio_tungstenite::tungstenite::Message;
use tokio_tungstenite::tungstenite::client::IntoClientRequest;
use tracing::Instrument;
use tracing::Span;
use tracing::debug;
use tracing::error;
use tracing::info;
use tracing::instrument;
use tracing::trace;
use tungstenite::extensions::ExtensionsConfig;
use tungstenite::extensions::compression::deflate::DeflateConfig;
@@ -205,16 +200,9 @@ impl ResponsesWebsocketConnection {
self.stream.lock().await.is_none()
}
#[instrument(
name = "responses_websocket.stream_request",
level = "info",
skip_all,
fields(transport = "responses_websocket", api.path = "responses")
)]
pub async fn stream_request(
&self,
request: ResponsesWsRequest,
connection_reused: bool,
) -> Result<ResponseStream, ApiError> {
let (tx_event, rx_event) =
mpsc::channel::<std::result::Result<ResponseEvent, ApiError>>(1600);
@@ -228,53 +216,48 @@ impl ResponsesWebsocketConnection {
ApiError::Stream(format!("failed to encode websocket request: {err}"))
})?;
let current_span = Span::current();
tokio::spawn(
async move {
if let Some(model) = server_model {
let _ = tx_event.send(Ok(ResponseEvent::ServerModel(model))).await;
}
if let Some(etag) = models_etag {
let _ = tx_event.send(Ok(ResponseEvent::ModelsEtag(etag))).await;
}
if server_reasoning_included {
tokio::spawn(async move {
if let Some(model) = server_model {
let _ = tx_event.send(Ok(ResponseEvent::ServerModel(model))).await;
}
if let Some(etag) = models_etag {
let _ = tx_event.send(Ok(ResponseEvent::ModelsEtag(etag))).await;
}
if server_reasoning_included {
let _ = tx_event
.send(Ok(ResponseEvent::ServerReasoningIncluded(true)))
.await;
}
let mut guard = stream.lock().await;
let result = {
let Some(ws_stream) = guard.as_mut() else {
let _ = tx_event
.send(Ok(ResponseEvent::ServerReasoningIncluded(true)))
.send(Err(ApiError::Stream(
"websocket connection is closed".to_string(),
)))
.await;
}
let mut guard = stream.lock().await;
let result = {
let Some(ws_stream) = guard.as_mut() else {
let _ = tx_event
.send(Err(ApiError::Stream(
"websocket connection is closed".to_string(),
)))
.await;
return;
};
run_websocket_response_stream(
ws_stream,
tx_event.clone(),
request_body,
idle_timeout,
telemetry,
connection_reused,
)
.await
return;
};
if let Err(err) = result {
// A terminal stream error should reach the caller immediately. Waiting for a
// graceful close handshake here can stall indefinitely and mask the error.
let failed_stream = guard.take();
drop(guard);
drop(failed_stream);
let _ = tx_event.send(Err(err)).await;
}
run_websocket_response_stream(
ws_stream,
tx_event.clone(),
request_body,
idle_timeout,
telemetry,
)
.await
};
if let Err(err) = result {
// A terminal stream error should reach the caller immediately. Waiting for a
// graceful close handshake here can stall indefinitely and mask the error.
let failed_stream = guard.take();
drop(guard);
drop(failed_stream);
let _ = tx_event.send(Err(err)).await;
}
.instrument(current_span),
);
});
Ok(ResponseStream { rx_event })
}
@@ -290,12 +273,6 @@ impl<A: AuthProvider> ResponsesWebsocketClient<A> {
Self { provider, auth }
}
#[instrument(
name = "responses_websocket.connect",
level = "info",
skip_all,
fields(transport = "responses_websocket", api.path = "responses")
)]
pub async fn connect(
&self,
extra_headers: HeaderMap,
@@ -354,18 +331,10 @@ async fn connect_websocket(
.map_err(|err| ApiError::Stream(format!("failed to build websocket request: {err}")))?;
request.headers_mut().extend(headers);
// Secure websocket traffic needs the same custom-CA policy as reqwest-based HTTPS traffic.
// If a Codex-specific CA bundle is configured, build an explicit rustls connector so this
// websocket path does not fall back to tungstenite's default native-roots-only behavior.
let connector = maybe_build_rustls_client_config_with_custom_ca()
.map_err(|err| ApiError::Stream(format!("failed to configure websocket TLS: {err}")))?
.map(tokio_tungstenite::Connector::Rustls);
let response = connect_async_tls_with_config(
let response = tokio_tungstenite::connect_async_with_config(
request,
Some(websocket_config()),
false, // `false` means "do not disable Nagle", which is tungstenite's recommended default.
connector,
)
.await;
@@ -536,7 +505,6 @@ async fn run_websocket_response_stream(
request_body: Value,
idle_timeout: Duration,
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
connection_reused: bool,
) -> Result<(), ApiError> {
let mut last_server_model: Option<String> = None;
let request_text = match serde_json::to_string(&request_body) {
@@ -556,11 +524,7 @@ async fn run_websocket_response_stream(
.map_err(|err| ApiError::Stream(format!("failed to send websocket request: {err}")));
if let Some(t) = telemetry.as_ref() {
t.on_ws_request(
request_start.elapsed(),
result.as_ref().err(),
connection_reused,
);
t.on_ws_request(request_start.elapsed(), result.as_ref().err());
}
result?;

View File

@@ -12,7 +12,6 @@ use http::HeaderMap;
use http::Method;
use serde_json::Value;
use std::sync::Arc;
use tracing::instrument;
pub(crate) struct EndpointSession<T: HttpTransport, A: AuthProvider> {
transport: T,
@@ -69,12 +68,6 @@ impl<T: HttpTransport, A: AuthProvider> EndpointSession<T, A> {
.await
}
#[instrument(
name = "endpoint_session.execute_with",
level = "info",
skip_all,
fields(http.method = %method, api.path = path)
)]
pub(crate) async fn execute_with<C>(
&self,
method: Method,
@@ -103,12 +96,6 @@ impl<T: HttpTransport, A: AuthProvider> EndpointSession<T, A> {
Ok(response)
}
#[instrument(
name = "endpoint_session.stream_with",
level = "info",
skip_all,
fields(http.method = %method, api.path = path)
)]
pub(crate) async fn stream_with<C>(
&self,
method: Method,

View File

@@ -27,9 +27,7 @@ pub use crate::common::create_text_param_for_request;
pub use crate::endpoint::compact::CompactClient;
pub use crate::endpoint::memories::MemoriesClient;
pub use crate::endpoint::models::ModelsClient;
pub use crate::endpoint::realtime_websocket::RealtimeEventParser;
pub use crate::endpoint::realtime_websocket::RealtimeSessionConfig;
pub use crate::endpoint::realtime_websocket::RealtimeSessionMode;
pub use crate::endpoint::realtime_websocket::RealtimeWebsocketClient;
pub use crate::endpoint::realtime_websocket::RealtimeWebsocketConnection;
pub use crate::endpoint::responses::ResponsesClient;

View File

@@ -33,7 +33,7 @@ pub trait SseTelemetry: Send + Sync {
/// Telemetry for Responses WebSocket transport.
pub trait WebsocketTelemetry: Send + Sync {
fn on_ws_request(&self, duration: Duration, error: Option<&ApiError>, connection_reused: bool);
fn on_ws_request(&self, duration: Duration, error: Option<&ApiError>);
fn on_ws_event(
&self,

View File

@@ -95,7 +95,6 @@ async fn models_client_hits_models_endpoint() {
input_modalities: default_input_modalities(),
prefer_websockets: false,
used_fallback_model_metadata: false,
supports_search_tool: false,
}],
};

View File

@@ -4,13 +4,10 @@ use std::time::Duration;
use codex_api::RealtimeAudioFrame;
use codex_api::RealtimeEvent;
use codex_api::RealtimeEventParser;
use codex_api::RealtimeSessionConfig;
use codex_api::RealtimeSessionMode;
use codex_api::RealtimeWebsocketClient;
use codex_api::provider::Provider;
use codex_api::provider::RetryConfig;
use codex_protocol::protocol::RealtimeHandoffRequested;
use futures::SinkExt;
use futures::StreamExt;
use http::HeaderMap;
@@ -142,8 +139,6 @@ async fn realtime_ws_e2e_session_create_and_event_flow() {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_123".to_string()),
event_parser: RealtimeEventParser::V1,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),
@@ -236,8 +231,6 @@ async fn realtime_ws_e2e_send_while_next_event_waits() {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_123".to_string()),
event_parser: RealtimeEventParser::V1,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),
@@ -301,8 +294,6 @@ async fn realtime_ws_e2e_disconnected_emitted_once() {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_123".to_string()),
event_parser: RealtimeEventParser::V1,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),
@@ -363,8 +354,6 @@ async fn realtime_ws_e2e_ignores_unknown_text_events() {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_123".to_string()),
event_parser: RealtimeEventParser::V1,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),
@@ -388,70 +377,3 @@ async fn realtime_ws_e2e_ignores_unknown_text_events() {
connection.close().await.expect("close");
server.await.expect("server task");
}
#[tokio::test]
async fn realtime_ws_e2e_realtime_v2_parser_emits_handoff_requested() {
let (addr, server) = spawn_realtime_ws_server(|mut ws: RealtimeWsStream| async move {
let first = ws
.next()
.await
.expect("first msg")
.expect("first msg ok")
.into_text()
.expect("text");
let first_json: Value = serde_json::from_str(&first).expect("json");
assert_eq!(first_json["type"], "session.update");
ws.send(Message::Text(
json!({
"type": "conversation.item.done",
"item": {
"id": "item_123",
"type": "function_call",
"name": "codex",
"call_id": "call_123",
"arguments": "{\"prompt\":\"delegate now\"}"
}
})
.to_string()
.into(),
))
.await
.expect("send function call");
})
.await;
let client = RealtimeWebsocketClient::new(test_provider(format!("http://{addr}")));
let connection = client
.connect(
RealtimeSessionConfig {
instructions: "backend prompt".to_string(),
model: Some("realtime-test-model".to_string()),
session_id: Some("conv_123".to_string()),
event_parser: RealtimeEventParser::RealtimeV2,
session_mode: RealtimeSessionMode::Conversational,
},
HeaderMap::new(),
HeaderMap::new(),
)
.await
.expect("connect");
let event = connection
.next_event()
.await
.expect("next event")
.expect("event");
assert_eq!(
event,
RealtimeEvent::HandoffRequested(RealtimeHandoffRequested {
handoff_id: "call_123".to_string(),
item_id: "item_123".to_string(),
input_transcript: "delegate now".to_string(),
active_transcript: Vec::new(),
})
);
connection.close().await.expect("close");
server.await.expect("server task");
}

View File

@@ -3,5 +3,4 @@ load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "codex-client",
crate_name = "codex_client",
compile_data = glob(["tests/fixtures/**"]),
)

View File

@@ -13,24 +13,17 @@ http = { workspace = true }
opentelemetry = { workspace = true }
rand = { workspace = true }
reqwest = { workspace = true, features = ["json", "stream"] }
rustls = { workspace = true }
rustls-native-certs = { workspace = true }
rustls-pki-types = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt", "time", "sync"] }
tracing = { workspace = true }
tracing-opentelemetry = { workspace = true }
codex-utils-rustls-provider = { workspace = true }
zstd = { workspace = true }
[lints]
workspace = true
[dev-dependencies]
codex-utils-cargo-bin = { workspace = true }
opentelemetry_sdk = { workspace = true }
pretty_assertions = { workspace = true }
tempfile = { workspace = true }
tracing-subscriber = { workspace = true }

View File

@@ -1,29 +0,0 @@
//! Helper binary for exercising shared custom CA environment handling in tests.
//!
//! The shared reqwest client honors `CODEX_CA_CERTIFICATE` and `SSL_CERT_FILE`, but those
//! environment variables are process-global and unsafe to mutate in parallel test execution. This
//! probe keeps the behavior under test while letting integration tests (`tests/ca_env.rs`) set
//! env vars per-process, proving:
//!
//! - env precedence is respected,
//! - multi-cert PEM bundles load,
//! - error messages guide users when CA files are invalid.
//!
//! The detailed explanation of what "hermetic" means here lives in `codex_client::custom_ca`.
//! This binary exists so the tests can exercise
//! [`codex_client::build_reqwest_client_for_subprocess_tests`] in a separate process without
//! duplicating client-construction logic.
use std::process;
fn main() {
match codex_client::build_reqwest_client_for_subprocess_tests(reqwest::Client::builder()) {
Ok(_) => {
println!("ok");
}
Err(error) => {
eprintln!("{error}");
process::exit(1);
}
}
}

View File

@@ -1,788 +0,0 @@
//! Custom CA handling for Codex outbound HTTP and websocket clients.
//!
//! Codex constructs outbound reqwest clients and secure websocket connections in a few crates, but
//! they all need the same trust-store policy when enterprise proxies or gateways intercept TLS.
//! This module centralizes that policy so callers can start from an ordinary
//! `reqwest::ClientBuilder` or rustls client config, layer in custom CA support, and either get
//! back a configured transport or a user-facing error that explains how to fix a misconfigured CA
//! bundle.
//!
//! The module intentionally has a narrow responsibility:
//!
//! - read CA material from `CODEX_CA_CERTIFICATE`, falling back to `SSL_CERT_FILE`
//! - normalize PEM variants that show up in real deployments, including OpenSSL-style
//! `TRUSTED CERTIFICATE` labels and bundles that also contain CRLs
//! - return user-facing errors that explain how to fix misconfigured CA files
//!
//! It does not validate certificate chains or perform a handshake in tests. Its contract is
//! narrower: produce a transport configuration whose root store contains every parseable
//! certificate block from the configured PEM bundle, or fail early with a precise error before
//! the caller starts network traffic.
//!
//! In this module's test setup, a hermetic test is one whose result depends only on the CA file
//! and environment variables that the test chose for itself. That matters here because the normal
//! reqwest client-construction path is not hermetic enough for environment-sensitive tests:
//!
//! - on macOS seatbelt runs, `reqwest::Client::builder().build()` can panic inside
//! `system-configuration` while probing platform proxy settings, which means the process can die
//! before the custom-CA code reports success or a structured error. That matters in practice
//! because Codex itself commonly runs spawned test processes under seatbelt, so this is not just
//! a hypothetical CI edge case.
//! - child processes inherit CA-related environment variables by default, which lets developer
//! shell state or CI configuration affect a test unless the test scrubs those variables first
//!
//! The tests in this crate therefore stay split across two layers:
//!
//! - unit tests in this module cover env-selection logic without constructing a real client
//! - subprocess integration tests under `tests/` cover real client construction through
//! [`build_reqwest_client_for_subprocess_tests`], which disables reqwest proxy autodetection so
//! the tests can observe custom-CA success and failure directly
//! - those subprocess tests also scrub inherited CA environment variables before launch so their
//! result depends only on the test fixtures and env vars set by the test itself
use std::env;
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use codex_utils_rustls_provider::ensure_rustls_crypto_provider;
use rustls::ClientConfig;
use rustls::RootCertStore;
use rustls_pki_types::CertificateDer;
use rustls_pki_types::pem::PemObject;
use rustls_pki_types::pem::SectionKind;
use rustls_pki_types::pem::{self};
use thiserror::Error;
use tracing::info;
use tracing::warn;
pub const CODEX_CA_CERT_ENV: &str = "CODEX_CA_CERTIFICATE";
pub const SSL_CERT_FILE_ENV: &str = "SSL_CERT_FILE";
const CA_CERT_HINT: &str = "If you set CODEX_CA_CERTIFICATE or SSL_CERT_FILE, ensure it points to a PEM file containing one or more CERTIFICATE blocks, or unset it to use system roots.";
type PemSection = (SectionKind, Vec<u8>);
/// Describes why a transport using shared custom CA support could not be constructed.
///
/// These failure modes apply to both reqwest client construction and websocket TLS
/// configuration. A build can fail because the configured CA file could not be read, could not be
/// parsed as certificates, contained certs that the target TLS stack refused to register, or
/// because the final reqwest client builder failed. Callers that do not care about the
/// distinction can rely on the `From<BuildCustomCaTransportError> for io::Error` conversion.
#[derive(Debug, Error)]
pub enum BuildCustomCaTransportError {
/// Reading the selected CA file from disk failed before any PEM parsing could happen.
#[error(
"Failed to read CA certificate file {} selected by {}: {source}. {hint}",
path.display(),
source_env,
hint = CA_CERT_HINT
)]
ReadCaFile {
source_env: &'static str,
path: PathBuf,
source: io::Error,
},
/// The selected CA file was readable, but did not produce usable certificate material.
#[error(
"Failed to load CA certificates from {} selected by {}: {detail}. {hint}",
path.display(),
source_env,
hint = CA_CERT_HINT
)]
InvalidCaFile {
source_env: &'static str,
path: PathBuf,
detail: String,
},
/// One parsed certificate block could not be registered with the reqwest client builder.
#[error(
"Failed to parse certificate #{certificate_index} from {} selected by {}: {source}. {hint}",
path.display(),
source_env,
hint = CA_CERT_HINT
)]
RegisterCertificate {
source_env: &'static str,
path: PathBuf,
certificate_index: usize,
source: reqwest::Error,
},
/// Reqwest rejected the final client configuration after a custom CA bundle was loaded.
#[error(
"Failed to build HTTP client while using CA bundle from {} ({}): {source}",
source_env,
path.display()
)]
BuildClientWithCustomCa {
source_env: &'static str,
path: PathBuf,
#[source]
source: reqwest::Error,
},
/// Reqwest rejected the final client configuration while using only system roots.
#[error("Failed to build HTTP client while using system root certificates: {0}")]
BuildClientWithSystemRoots(#[source] reqwest::Error),
/// One parsed certificate block could not be registered with the websocket TLS root store.
#[error(
"Failed to register certificate #{certificate_index} from {} selected by {} in rustls root store: {source}. {hint}",
path.display(),
source_env,
hint = CA_CERT_HINT
)]
RegisterRustlsCertificate {
source_env: &'static str,
path: PathBuf,
certificate_index: usize,
source: rustls::Error,
},
}
impl From<BuildCustomCaTransportError> for io::Error {
fn from(error: BuildCustomCaTransportError) -> Self {
match error {
BuildCustomCaTransportError::ReadCaFile { ref source, .. } => {
io::Error::new(source.kind(), error)
}
BuildCustomCaTransportError::InvalidCaFile { .. }
| BuildCustomCaTransportError::RegisterCertificate { .. }
| BuildCustomCaTransportError::RegisterRustlsCertificate { .. } => {
io::Error::new(io::ErrorKind::InvalidData, error)
}
BuildCustomCaTransportError::BuildClientWithCustomCa { .. }
| BuildCustomCaTransportError::BuildClientWithSystemRoots(_) => io::Error::other(error),
}
}
}
/// Builds a reqwest client that honors Codex custom CA environment variables.
///
/// Callers supply the baseline builder configuration they need, and this helper layers in custom
/// CA handling before finally constructing the client. `CODEX_CA_CERTIFICATE` takes precedence
/// over `SSL_CERT_FILE`, and empty values for either are treated as unset so callers do not
/// accidentally turn `VAR=""` into a bogus path lookup.
///
/// Callers that build a raw `reqwest::Client` directly bypass this policy entirely. That is an
/// easy mistake to make when adding a new outbound Codex HTTP path, and the resulting bug only
/// shows up in environments where a proxy or gateway requires a custom root CA.
///
/// # Errors
///
/// Returns a [`BuildCustomCaTransportError`] when the configured CA file is unreadable,
/// malformed, or contains a certificate block that `reqwest` cannot register as a root.
pub fn build_reqwest_client_with_custom_ca(
builder: reqwest::ClientBuilder,
) -> Result<reqwest::Client, BuildCustomCaTransportError> {
build_reqwest_client_with_env(&ProcessEnv, builder)
}
/// Builds a rustls client config when a Codex custom CA bundle is configured.
///
/// This is the websocket-facing sibling of [`build_reqwest_client_with_custom_ca`]. When
/// `CODEX_CA_CERTIFICATE` or `SSL_CERT_FILE` selects a CA bundle, the returned config starts from
/// the platform native roots and then adds the configured custom CA certificates. When no custom
/// CA env var is set, this returns `Ok(None)` so websocket callers can keep using their ordinary
/// default connector path.
///
/// Callers that let tungstenite build its default TLS connector directly bypass this policy
/// entirely. That bug only shows up in environments where secure websocket traffic needs the same
/// enterprise root CA bundle as HTTPS traffic.
pub fn maybe_build_rustls_client_config_with_custom_ca()
-> Result<Option<Arc<ClientConfig>>, BuildCustomCaTransportError> {
maybe_build_rustls_client_config_with_env(&ProcessEnv)
}
/// Builds a reqwest client for spawned subprocess tests that exercise CA behavior.
///
/// This is the test-only client-construction path used by the subprocess coverage in `tests/`.
/// The module-level docs explain the hermeticity problem in full; this helper only addresses the
/// reqwest proxy-discovery panic side of that problem by disabling proxy autodetection. The tests
/// still scrub inherited CA environment variables themselves. Normal production callers should use
/// [`build_reqwest_client_with_custom_ca`] so test-only proxy behavior does not leak into
/// ordinary client construction.
pub fn build_reqwest_client_for_subprocess_tests(
builder: reqwest::ClientBuilder,
) -> Result<reqwest::Client, BuildCustomCaTransportError> {
build_reqwest_client_with_env(&ProcessEnv, builder.no_proxy())
}
fn maybe_build_rustls_client_config_with_env(
env_source: &dyn EnvSource,
) -> Result<Option<Arc<ClientConfig>>, BuildCustomCaTransportError> {
let Some(bundle) = env_source.configured_ca_bundle() else {
return Ok(None);
};
ensure_rustls_crypto_provider();
// Start from the platform roots so websocket callers keep the same baseline trust behavior
// they would get from tungstenite's default rustls connector, then layer in the Codex custom
// CA bundle on top when configured.
let mut root_store = RootCertStore::empty();
let rustls_native_certs::CertificateResult { certs, errors, .. } =
rustls_native_certs::load_native_certs();
if !errors.is_empty() {
warn!(
native_root_error_count = errors.len(),
"encountered errors while loading native root certificates"
);
}
let _ = root_store.add_parsable_certificates(certs);
let certificates = bundle.load_certificates()?;
for (idx, cert) in certificates.into_iter().enumerate() {
if let Err(source) = root_store.add(cert) {
warn!(
source_env = bundle.source_env,
ca_path = %bundle.path.display(),
certificate_index = idx + 1,
error = %source,
"failed to register CA certificate in rustls root store"
);
return Err(BuildCustomCaTransportError::RegisterRustlsCertificate {
source_env: bundle.source_env,
path: bundle.path.clone(),
certificate_index: idx + 1,
source,
});
}
}
Ok(Some(Arc::new(
ClientConfig::builder()
.with_root_certificates(root_store)
.with_no_client_auth(),
)))
}
/// Builds a reqwest client using an injected environment source and reqwest builder.
///
/// This exists so tests can exercise precedence behavior deterministically without mutating the
/// real process environment. It selects the CA bundle, delegates file parsing to
/// [`ConfiguredCaBundle::load_certificates`], preserves the caller's chosen `reqwest` builder
/// configuration, and finally registers each parsed certificate with that builder.
fn build_reqwest_client_with_env(
env_source: &dyn EnvSource,
mut builder: reqwest::ClientBuilder,
) -> Result<reqwest::Client, BuildCustomCaTransportError> {
if let Some(bundle) = env_source.configured_ca_bundle() {
let certificates = bundle.load_certificates()?;
for (idx, cert) in certificates.iter().enumerate() {
let certificate = match reqwest::Certificate::from_der(cert.as_ref()) {
Ok(certificate) => certificate,
Err(source) => {
warn!(
source_env = bundle.source_env,
ca_path = %bundle.path.display(),
certificate_index = idx + 1,
error = %source,
"failed to register CA certificate"
);
return Err(BuildCustomCaTransportError::RegisterCertificate {
source_env: bundle.source_env,
path: bundle.path.clone(),
certificate_index: idx + 1,
source,
});
}
};
builder = builder.add_root_certificate(certificate);
}
return match builder.build() {
Ok(client) => Ok(client),
Err(source) => {
warn!(
source_env = bundle.source_env,
ca_path = %bundle.path.display(),
error = %source,
"failed to build client after loading custom CA bundle"
);
Err(BuildCustomCaTransportError::BuildClientWithCustomCa {
source_env: bundle.source_env,
path: bundle.path.clone(),
source,
})
}
};
}
info!(
codex_ca_certificate_configured = false,
ssl_cert_file_configured = false,
"using system root certificates because no CA override environment variable was selected"
);
match builder.build() {
Ok(client) => Ok(client),
Err(source) => {
warn!(
error = %source,
"failed to build client while using system root certificates"
);
Err(BuildCustomCaTransportError::BuildClientWithSystemRoots(
source,
))
}
}
}
/// Abstracts environment access so tests can cover precedence rules without mutating process-wide
/// variables.
trait EnvSource {
/// Returns the environment variable value for `key`, if this source considers it set.
///
/// Implementations should return `None` for absent values and may also collapse unreadable
/// process-environment states into `None`, because the custom CA logic treats both cases as
/// "no override configured". Callers build precedence and empty-string handling on top of this
/// method, so implementations should not trim or normalize the returned string.
fn var(&self, key: &str) -> Option<String>;
/// Returns a non-empty environment variable value interpreted as a filesystem path.
///
/// Empty strings are treated as unset because presence here acts as a boolean "custom CA
/// override requested" signal. This keeps the precedence logic from treating `VAR=""` as an
/// attempt to open the current working directory or some other platform-specific oddity once
/// it is converted into a path.
fn non_empty_path(&self, key: &str) -> Option<PathBuf> {
self.var(key)
.filter(|value| !value.is_empty())
.map(PathBuf::from)
}
/// Returns the configured CA bundle and which environment variable selected it.
///
/// `CODEX_CA_CERTIFICATE` wins over `SSL_CERT_FILE` because it is the Codex-specific override.
/// Keeping the winning variable name with the path lets later logging explain not only which
/// file was used but also why that file was chosen.
fn configured_ca_bundle(&self) -> Option<ConfiguredCaBundle> {
self.non_empty_path(CODEX_CA_CERT_ENV)
.map(|path| ConfiguredCaBundle {
source_env: CODEX_CA_CERT_ENV,
path,
})
.or_else(|| {
self.non_empty_path(SSL_CERT_FILE_ENV)
.map(|path| ConfiguredCaBundle {
source_env: SSL_CERT_FILE_ENV,
path,
})
})
}
}
/// Reads CA configuration from the real process environment.
///
/// This is the production `EnvSource` implementation used by
/// [`build_reqwest_client_with_custom_ca`]. Tests substitute in-memory env maps so they can
/// exercise precedence and empty-value behavior without mutating process-global variables.
struct ProcessEnv;
impl EnvSource for ProcessEnv {
fn var(&self, key: &str) -> Option<String> {
env::var(key).ok()
}
}
/// Identifies the CA bundle selected for a client and the policy decision that selected it.
///
/// This is the concrete output of the environment-precedence logic. Callers use `source_env` for
/// logging and diagnostics, while `path` is the bundle that will actually be loaded.
struct ConfiguredCaBundle {
/// The environment variable that won the precedence check for this bundle.
source_env: &'static str,
/// The filesystem path that should be read as PEM certificate input.
path: PathBuf,
}
impl ConfiguredCaBundle {
/// Loads certificates from this selected CA bundle.
///
/// The bundle already represents the output of environment-precedence selection, so this is
/// the natural point where the file-loading phase begins. The method owns the high-level
/// success/failure logs for that phase and keeps the source env and path together for lower-
/// level parsing and error shaping.
fn load_certificates(
&self,
) -> Result<Vec<CertificateDer<'static>>, BuildCustomCaTransportError> {
match self.parse_certificates() {
Ok(certificates) => {
info!(
source_env = self.source_env,
ca_path = %self.path.display(),
certificate_count = certificates.len(),
"loaded certificates from custom CA bundle"
);
Ok(certificates)
}
Err(error) => {
warn!(
source_env = self.source_env,
ca_path = %self.path.display(),
error = %error,
"failed to load custom CA bundle"
);
Err(error)
}
}
}
/// Loads every certificate block from a PEM file intended for Codex CA overrides.
///
/// This accepts a few common real-world variants so Codex behaves like other CA-aware tooling:
/// leading comments are preserved, `TRUSTED CERTIFICATE` labels are normalized to standard
/// certificate labels, and embedded CRLs are ignored when they are well-formed enough for the
/// section iterator to classify them.
fn parse_certificates(
&self,
) -> Result<Vec<CertificateDer<'static>>, BuildCustomCaTransportError> {
let pem_data = self.read_pem_data()?;
let normalized_pem = NormalizedPem::from_pem_data(self.source_env, &self.path, &pem_data);
let mut certificates = Vec::new();
let mut logged_crl_presence = false;
for section_result in normalized_pem.sections() {
// Known limitation: if `rustls-pki-types` fails while parsing a malformed CRL section,
// that error is reported here before we can classify the block as ignorable. A bundle
// containing valid certificates plus a malformed `X509 CRL` therefore still fails to
// load today, even though well-formed CRLs are ignored.
let (section_kind, der) = match section_result {
Ok(section) => section,
Err(error) => return Err(self.pem_parse_error(&error)),
};
match section_kind {
SectionKind::Certificate => {
// Standard CERTIFICATE blocks already decode to the exact DER bytes reqwest
// wants. Only OpenSSL TRUSTED CERTIFICATE blocks need trimming to drop any
// trailing X509_AUX trust metadata before registration.
let cert_der = normalized_pem.certificate_der(&der).ok_or_else(|| {
self.invalid_ca_file(
"failed to extract certificate data from TRUSTED CERTIFICATE: invalid DER length",
)
})?;
certificates.push(CertificateDer::from(cert_der.to_vec()));
}
SectionKind::Crl => {
if !logged_crl_presence {
info!(
source_env = self.source_env,
ca_path = %self.path.display(),
"ignoring X509 CRL entries found in custom CA bundle"
);
logged_crl_presence = true;
}
}
_ => {}
}
}
if certificates.is_empty() {
return Err(self.pem_parse_error(&pem::Error::NoItemsFound));
}
Ok(certificates)
}
/// Reads the CA bundle bytes while preserving the original filesystem error kind.
///
/// The caller wants a user-facing error that includes the bundle path and remediation hint, but
/// higher-level surfaces still benefit from distinguishing "not found" from other I/O
/// failures. This helper keeps both pieces together.
fn read_pem_data(&self) -> Result<Vec<u8>, BuildCustomCaTransportError> {
fs::read(&self.path).map_err(|source| BuildCustomCaTransportError::ReadCaFile {
source_env: self.source_env,
path: self.path.clone(),
source,
})
}
/// Rewrites PEM parsing failures into user-facing configuration errors.
///
/// The underlying parser knows whether the file was empty, malformed, or contained unsupported
/// PEM content, but callers need a message that also points them back to the relevant
/// environment variables and the expected remediation.
fn pem_parse_error(&self, error: &pem::Error) -> BuildCustomCaTransportError {
let detail = match error {
pem::Error::NoItemsFound => "no certificates found in PEM file".to_string(),
_ => format!("failed to parse PEM file: {error}"),
};
self.invalid_ca_file(detail)
}
/// Creates an invalid-CA error tied to this file path.
///
/// Most parse-time failures in this module eventually collapse to "the configured CA bundle is
/// not usable", but the detailed reason still matters for operator debugging. Centralizing that
/// formatting keeps the path and hint text consistent across the different parser branches.
fn invalid_ca_file(&self, detail: impl std::fmt::Display) -> BuildCustomCaTransportError {
BuildCustomCaTransportError::InvalidCaFile {
source_env: self.source_env,
path: self.path.clone(),
detail: detail.to_string(),
}
}
}
/// The PEM text shape after OpenSSL compatibility normalization.
///
/// `Standard` means the input already used ordinary PEM certificate labels. `TrustedCertificate`
/// means the input used OpenSSL's `TRUSTED CERTIFICATE` labels, so callers must also be prepared
/// to trim trailing `X509_AUX` bytes from decoded certificate sections.
enum NormalizedPem {
/// PEM contents that already used ordinary `CERTIFICATE` labels.
Standard(String),
/// PEM contents rewritten from OpenSSL `TRUSTED CERTIFICATE` labels to `CERTIFICATE`.
TrustedCertificate(String),
}
impl NormalizedPem {
/// Normalizes PEM text from a CA bundle into the label shape this module expects.
///
/// Codex only needs certificate DER bytes to seed `reqwest`'s root store, but operators may
/// point it at CA files that came from OpenSSL tooling rather than from a minimal certificate
/// bundle. OpenSSL's `TRUSTED CERTIFICATE` form is one such variant: it is still certificate
/// material, but it uses a different PEM label and may carry auxiliary trust metadata that
/// this crate does not consume. This constructor rewrites only the PEM labels so the mixed-
/// section parser can keep treating the file as certificate input. The rustls ecosystem does
/// not currently accept `TRUSTED CERTIFICATE` as a standard certificate label upstream, so
/// this remains a local compatibility shim rather than behavior delegated to
/// `rustls-pki-types`.
///
/// See also:
/// - rustls/pemfile issue #52, closed as not planned, documenting that
/// `BEGIN TRUSTED CERTIFICATE` blocks are ignored upstream:
/// <https://github.com/rustls/pemfile/issues/52>
/// - OpenSSL `x509 -trustout`, which emits `TRUSTED CERTIFICATE` PEM blocks:
/// <https://docs.openssl.org/master/man1/openssl-x509/>
/// - OpenSSL PEM readers, which document that plain `PEM_read_bio_X509()` discards auxiliary
/// trust settings:
/// <https://docs.openssl.org/master/man3/PEM_read_bio_PrivateKey/>
/// - `openssl s_server`, a real OpenSSL-based server/test tool that operates in this
/// ecosystem:
/// <https://docs.openssl.org/master/man1/openssl-s_server/>
fn from_pem_data(source_env: &'static str, path: &Path, pem_data: &[u8]) -> Self {
let pem = String::from_utf8_lossy(pem_data);
if pem.contains("TRUSTED CERTIFICATE") {
info!(
source_env,
ca_path = %path.display(),
"normalizing OpenSSL TRUSTED CERTIFICATE labels in custom CA bundle"
);
Self::TrustedCertificate(
pem.replace("BEGIN TRUSTED CERTIFICATE", "BEGIN CERTIFICATE")
.replace("END TRUSTED CERTIFICATE", "END CERTIFICATE"),
)
} else {
Self::Standard(pem.into_owned())
}
}
/// Returns the normalized PEM contents regardless of the label shape that produced them.
fn contents(&self) -> &str {
match self {
Self::Standard(contents) | Self::TrustedCertificate(contents) => contents,
}
}
/// Iterates over every recognized PEM section in this normalized PEM text.
///
/// `rustls-pki-types` exposes mixed-section parsing through a `PemObject` implementation on the
/// `(SectionKind, Vec<u8>)` tuple. Keeping that type-directed API here lets callers iterate in
/// terms of normalized sections rather than trait plumbing.
fn sections(&self) -> impl Iterator<Item = Result<PemSection, pem::Error>> + '_ {
PemSection::pem_slice_iter(self.contents().as_bytes())
}
/// Returns the certificate DER bytes for one parsed PEM certificate section.
///
/// Standard PEM certificates already decode to the exact DER bytes `reqwest` wants. OpenSSL
/// `TRUSTED CERTIFICATE` sections may append `X509_AUX` bytes after the certificate, so those
/// sections need to be trimmed down to their first DER object before registration.
fn certificate_der<'a>(&self, der: &'a [u8]) -> Option<&'a [u8]> {
match self {
Self::Standard(_) => Some(der),
Self::TrustedCertificate(_) => first_der_item(der),
}
}
}
/// Returns the first DER-encoded ASN.1 object in `der`, ignoring any trailing OpenSSL metadata.
///
/// A PEM `CERTIFICATE` block usually decodes to exactly one DER blob: the certificate itself.
/// OpenSSL's `TRUSTED CERTIFICATE` variant is different. It starts with that same certificate
/// blob, but may append extra `X509_AUX` bytes after it to describe OpenSSL-specific trust
/// settings. `reqwest::Certificate::from_der` only understands the certificate object, not those
/// trailing OpenSSL extensions.
///
/// This helper therefore asks a narrower question than "is this a valid certificate?": where does
/// the first top-level DER object end? If that boundary can be found, the caller keeps only that
/// prefix and discards the trailing trust metadata. If it cannot be found, the input is treated as
/// malformed CA data.
fn first_der_item(der: &[u8]) -> Option<&[u8]> {
der_item_length(der).map(|length| &der[..length])
}
/// Returns the byte length of the first DER item in `der`.
///
/// DER is a binary encoding for ASN.1 objects. Each object begins with:
///
/// - a tag byte describing what kind of object follows
/// - one or more length bytes describing how many content bytes belong to that object
/// - the content bytes themselves
///
/// For this module, the important fact is that a certificate is stored as one complete top-level
/// DER object. Once we know that object's declared length, we know exactly where the certificate
/// ends and where any trailing OpenSSL `X509_AUX` data begins.
///
/// This helper intentionally parses only that outer length field. It does not validate the inner
/// certificate structure, the meaning of the tag, or every nested ASN.1 value. That narrower scope
/// is deliberate: the caller only needs a safe slice boundary for the leading certificate object
/// before handing those bytes to `reqwest`, which performs the real certificate parsing.
///
/// The implementation supports the DER length forms needed here:
///
/// - short form, where the length is stored directly in the second byte
/// - long form, where the second byte says how many following bytes make up the length value
///
/// Indefinite lengths are rejected because DER does not permit them, and any declared length that
/// would run past the end of the input is treated as malformed.
fn der_item_length(der: &[u8]) -> Option<usize> {
let &length_octet = der.get(1)?;
if length_octet & 0x80 == 0 {
return Some(2 + usize::from(length_octet)).filter(|length| *length <= der.len());
}
let length_octets = usize::from(length_octet & 0x7f);
if length_octets == 0 {
return None;
}
let length_start = 2usize;
let length_end = length_start.checked_add(length_octets)?;
let length_bytes = der.get(length_start..length_end)?;
let mut content_length = 0usize;
for &byte in length_bytes {
content_length = content_length
.checked_mul(256)?
.checked_add(usize::from(byte))?;
}
length_end
.checked_add(content_length)
.filter(|length| *length <= der.len())
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use pretty_assertions::assert_eq;
use tempfile::TempDir;
use super::BuildCustomCaTransportError;
use super::CODEX_CA_CERT_ENV;
use super::EnvSource;
use super::SSL_CERT_FILE_ENV;
use super::maybe_build_rustls_client_config_with_env;
const TEST_CERT: &str = include_str!("../tests/fixtures/test-ca.pem");
struct MapEnv {
values: HashMap<String, String>,
}
impl EnvSource for MapEnv {
fn var(&self, key: &str) -> Option<String> {
self.values.get(key).cloned()
}
}
fn map_env(pairs: &[(&str, &str)]) -> MapEnv {
MapEnv {
values: pairs
.iter()
.map(|(key, value)| ((*key).to_string(), (*value).to_string()))
.collect(),
}
}
fn write_cert_file(temp_dir: &TempDir, name: &str, contents: &str) -> PathBuf {
let path = temp_dir.path().join(name);
fs::write(&path, contents).unwrap_or_else(|error| {
panic!("write cert fixture failed for {}: {error}", path.display())
});
path
}
#[test]
fn ca_path_prefers_codex_env() {
let env = map_env(&[
(CODEX_CA_CERT_ENV, "/tmp/codex.pem"),
(SSL_CERT_FILE_ENV, "/tmp/fallback.pem"),
]);
assert_eq!(
env.configured_ca_bundle().map(|bundle| bundle.path),
Some(PathBuf::from("/tmp/codex.pem"))
);
}
#[test]
fn ca_path_falls_back_to_ssl_cert_file() {
let env = map_env(&[(SSL_CERT_FILE_ENV, "/tmp/fallback.pem")]);
assert_eq!(
env.configured_ca_bundle().map(|bundle| bundle.path),
Some(PathBuf::from("/tmp/fallback.pem"))
);
}
#[test]
fn ca_path_ignores_empty_values() {
let env = map_env(&[
(CODEX_CA_CERT_ENV, ""),
(SSL_CERT_FILE_ENV, "/tmp/fallback.pem"),
]);
assert_eq!(
env.configured_ca_bundle().map(|bundle| bundle.path),
Some(PathBuf::from("/tmp/fallback.pem"))
);
}
#[test]
fn rustls_config_uses_custom_ca_bundle_when_configured() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(&temp_dir, "ca.pem", TEST_CERT);
let env = map_env(&[(CODEX_CA_CERT_ENV, cert_path.to_string_lossy().as_ref())]);
let config = maybe_build_rustls_client_config_with_env(&env)
.expect("rustls config")
.expect("custom CA config should be present");
assert!(config.enable_sni);
}
#[test]
fn rustls_config_reports_invalid_ca_file() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(&temp_dir, "empty.pem", "");
let env = map_env(&[(CODEX_CA_CERT_ENV, cert_path.to_string_lossy().as_ref())]);
let error = maybe_build_rustls_client_config_with_env(&env).expect_err("invalid CA");
assert!(matches!(
error,
BuildCustomCaTransportError::InvalidCaFile { .. }
));
}
}

View File

@@ -1,12 +1,12 @@
use http::Error as HttpError;
use http::HeaderMap;
use http::HeaderName;
use http::HeaderValue;
use opentelemetry::global;
use opentelemetry::propagation::Injector;
use reqwest::IntoUrl;
use reqwest::Method;
use reqwest::Response;
use reqwest::header::HeaderMap;
use reqwest::header::HeaderName;
use reqwest::header::HeaderValue;
use serde::Serialize;
use std::fmt::Display;
use std::time::Duration;

View File

@@ -1,4 +1,3 @@
mod custom_ca;
mod default_client;
mod error;
mod request;
@@ -7,16 +6,6 @@ mod sse;
mod telemetry;
mod transport;
pub use crate::custom_ca::BuildCustomCaTransportError;
/// Test-only subprocess hook for custom CA coverage.
///
/// This stays public only so the `custom_ca_probe` binary target can reuse the shared helper. It
/// is hidden from normal docs because ordinary callers should use
/// [`build_reqwest_client_with_custom_ca`] instead.
#[doc(hidden)]
pub use crate::custom_ca::build_reqwest_client_for_subprocess_tests;
pub use crate::custom_ca::build_reqwest_client_with_custom_ca;
pub use crate::custom_ca::maybe_build_rustls_client_config_with_custom_ca;
pub use crate::default_client::CodexHttpClient;
pub use crate::default_client::CodexRequestBuilder;
pub use crate::error::StreamError;

View File

@@ -1,145 +0,0 @@
//! Subprocess coverage for custom CA behavior that must build a real reqwest client.
//!
//! These tests intentionally run through `custom_ca_probe` and
//! `build_reqwest_client_for_subprocess_tests` instead of calling the helper in-process. The
//! detailed explanation of what "hermetic" means here lives in `codex_client::custom_ca`; these
//! tests add the process-level half of that contract by scrubbing inherited CA environment
//! variables before each subprocess launch. They still stop at client construction: the
//! assertions here cover CA file selection, PEM parsing, and user-facing errors, not a full TLS
//! handshake.
use codex_utils_cargo_bin::cargo_bin;
use std::fs;
use std::path::Path;
use std::process::Command;
use tempfile::TempDir;
const CODEX_CA_CERT_ENV: &str = "CODEX_CA_CERTIFICATE";
const SSL_CERT_FILE_ENV: &str = "SSL_CERT_FILE";
const TEST_CERT_1: &str = include_str!("fixtures/test-ca.pem");
const TEST_CERT_2: &str = include_str!("fixtures/test-intermediate.pem");
const TRUSTED_TEST_CERT: &str = include_str!("fixtures/test-ca-trusted.pem");
fn write_cert_file(temp_dir: &TempDir, name: &str, contents: &str) -> std::path::PathBuf {
let path = temp_dir.path().join(name);
fs::write(&path, contents).unwrap_or_else(|error| {
panic!("write cert fixture failed for {}: {error}", path.display())
});
path
}
fn run_probe(envs: &[(&str, &Path)]) -> std::process::Output {
let mut cmd = Command::new(
cargo_bin("custom_ca_probe")
.unwrap_or_else(|error| panic!("failed to locate custom_ca_probe: {error}")),
);
// `Command` inherits the parent environment by default, so scrub CA-related variables first or
// these tests can accidentally pass/fail based on the developer shell or CI runner.
cmd.env_remove(CODEX_CA_CERT_ENV);
cmd.env_remove(SSL_CERT_FILE_ENV);
for (key, value) in envs {
cmd.env(key, value);
}
cmd.output()
.unwrap_or_else(|error| panic!("failed to run custom_ca_probe: {error}"))
}
#[test]
fn uses_codex_ca_cert_env() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(&temp_dir, "ca.pem", TEST_CERT_1);
let output = run_probe(&[(CODEX_CA_CERT_ENV, cert_path.as_path())]);
assert!(output.status.success());
}
#[test]
fn falls_back_to_ssl_cert_file() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(&temp_dir, "ssl.pem", TEST_CERT_1);
let output = run_probe(&[(SSL_CERT_FILE_ENV, cert_path.as_path())]);
assert!(output.status.success());
}
#[test]
fn prefers_codex_ca_cert_over_ssl_cert_file() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(&temp_dir, "ca.pem", TEST_CERT_1);
let bad_path = write_cert_file(&temp_dir, "bad.pem", "");
let output = run_probe(&[
(CODEX_CA_CERT_ENV, cert_path.as_path()),
(SSL_CERT_FILE_ENV, bad_path.as_path()),
]);
assert!(output.status.success());
}
#[test]
fn handles_multi_certificate_bundle() {
let temp_dir = TempDir::new().expect("tempdir");
let bundle = format!("{TEST_CERT_1}\n{TEST_CERT_2}");
let cert_path = write_cert_file(&temp_dir, "bundle.pem", &bundle);
let output = run_probe(&[(CODEX_CA_CERT_ENV, cert_path.as_path())]);
assert!(output.status.success());
}
#[test]
fn rejects_empty_pem_file_with_hint() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(&temp_dir, "empty.pem", "");
let output = run_probe(&[(CODEX_CA_CERT_ENV, cert_path.as_path())]);
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("no certificates found in PEM file"));
assert!(stderr.contains("CODEX_CA_CERTIFICATE"));
assert!(stderr.contains("SSL_CERT_FILE"));
}
#[test]
fn rejects_malformed_pem_with_hint() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(
&temp_dir,
"malformed.pem",
"-----BEGIN CERTIFICATE-----\nMIIBroken",
);
let output = run_probe(&[(CODEX_CA_CERT_ENV, cert_path.as_path())]);
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("failed to parse PEM file"));
assert!(stderr.contains("CODEX_CA_CERTIFICATE"));
assert!(stderr.contains("SSL_CERT_FILE"));
}
#[test]
fn accepts_openssl_trusted_certificate() {
let temp_dir = TempDir::new().expect("tempdir");
let cert_path = write_cert_file(&temp_dir, "trusted.pem", TRUSTED_TEST_CERT);
let output = run_probe(&[(CODEX_CA_CERT_ENV, cert_path.as_path())]);
assert!(output.status.success());
}
#[test]
fn accepts_bundle_with_crl() {
let temp_dir = TempDir::new().expect("tempdir");
let crl = "-----BEGIN X509 CRL-----\nMIIC\n-----END X509 CRL-----";
let bundle = format!("{TEST_CERT_1}\n{crl}");
let cert_path = write_cert_file(&temp_dir, "bundle_crl.pem", &bundle);
let output = run_probe(&[(CODEX_CA_CERT_ENV, cert_path.as_path())]);
assert!(output.status.success());
}

View File

@@ -1,25 +0,0 @@
# Test-only OpenSSL trusted-certificate fixture generated from test-ca.pem with
# `openssl x509 -addtrust serverAuth -trustout`.
# The extra trailing bytes model the OpenSSL X509_AUX data that follows the
# certificate DER in real TRUSTED CERTIFICATE bundles.
# This fixture exists to validate the X509_AUX trimming path against a real
# OpenSSL-generated artifact, not just label normalization.
-----BEGIN TRUSTED CERTIFICATE-----
MIIDBTCCAe2gAwIBAgIUZYhGvBUG7SucNzYh9VIeZ7b9zHowDQYJKoZIhvcNAQEL
BQAwEjEQMA4GA1UEAwwHdGVzdC1jYTAeFw0yNTEyMTEyMzEyNTFaFw0zNTEyMDky
MzEyNTFaMBIxEDAOBgNVBAMMB3Rlc3QtY2EwggEiMA0GCSqGSIb3DQEBAQUAA4IB
DwAwggEKAoIBAQC+NJRZAdn15FFBN8eR1HTAe+LMVpO19kKtiCsQjyqHONfhfHcF
7zQfwmH6MqeNpC/5k5m8V1uSIhyHBskQm83Jv8/vHlffNxE/hl0Na/Yd1bc+2kxH
twIAsF32GKnSKnFva/iGczV81+/ETgG6RXfTfy/Xs6fXL8On8SRRmTcMw0bEfwko
ziid87VOHg2JfdRKN5QpS9lvQ8q4q2M3jMftolpUTpwlR0u8j9OXnZfn+ja33X0l
kjkoCbXE2fVbAzO/jhUHQX1H5RbTGGUnrrCWAj84Rq/E80KK1nrRF91K+vgZmilM
gOZosLMMI1PeqTakwg1yIRngpTyk0eJP+haxAgMBAAGjUzBRMB0GA1UdDgQWBBT6
sqvfjMIl0DFZkeu8LU577YqMVDAfBgNVHSMEGDAWgBT6sqvfjMIl0DFZkeu8LU57
7YqMVDAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBQ1sYs2RvB
TZ+xSBglLwH/S7zXVJIDwQ23Rlj11dgnVvcilSJCX24Rr+pfIVLpYNDdZzc/DIJd
S1dt2JuLnvXnle29rU7cxuzYUkUkRtaeY2Sj210vsE3lqUFyIy8XCc/lteb+FiJ7
zo/gPk7P+y4ihK9Mm6SBqkDVEYSFSn9bgoemK+0e93jGe2182PyuTwfTmZgENSBO
2f9dSuay4C7e5UO8bhVccQJg6f4d70zUNG0oPHrnVxJLjwCd++jx25Gh4U7+ek13
CW57pxJrpPMDWb2YK64rT2juHMKF73YuplW92SInd+QLpI2ekTLc+bRw8JvqzXg+
SprtRUBjlWzjMAwwCgYIKwYBBQUHAwE=
-----END TRUSTED CERTIFICATE-----

View File

@@ -1,21 +0,0 @@
# Test-only self-signed CA fixture used for single-certificate loading.
# These tests only verify PEM parsing and root-certificate registration, not a TLS handshake.
-----BEGIN CERTIFICATE-----
MIIDBTCCAe2gAwIBAgIUZYhGvBUG7SucNzYh9VIeZ7b9zHowDQYJKoZIhvcNAQEL
BQAwEjEQMA4GA1UEAwwHdGVzdC1jYTAeFw0yNTEyMTEyMzEyNTFaFw0zNTEyMDky
MzEyNTFaMBIxEDAOBgNVBAMMB3Rlc3QtY2EwggEiMA0GCSqGSIb3DQEBAQUAA4IB
DwAwggEKAoIBAQC+NJRZAdn15FFBN8eR1HTAe+LMVpO19kKtiCsQjyqHONfhfHcF
7zQfwmH6MqeNpC/5k5m8V1uSIhyHBskQm83Jv8/vHlffNxE/hl0Na/Yd1bc+2kxH
twIAsF32GKnSKnFva/iGczV81+/ETgG6RXfTfy/Xs6fXL8On8SRRmTcMw0bEfwko
ziid87VOHg2JfdRKN5QpS9lvQ8q4q2M3jMftolpUTpwlR0u8j9OXnZfn+ja33X0l
kjkoCbXE2fVbAzO/jhUHQX1H5RbTGGUnrrCWAj84Rq/E80KK1nrRF91K+vgZmilM
gOZosLMMI1PeqTakwg1yIRngpTyk0eJP+haxAgMBAAGjUzBRMB0GA1UdDgQWBBT6
sqvfjMIl0DFZkeu8LU577YqMVDAfBgNVHSMEGDAWgBT6sqvfjMIl0DFZkeu8LU57
7YqMVDAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBQ1sYs2RvB
TZ+xSBglLwH/S7zXVJIDwQ23Rlj11dgnVvcilSJCX24Rr+pfIVLpYNDdZzc/DIJd
S1dt2JuLnvXnle29rU7cxuzYUkUkRtaeY2Sj210vsE3lqUFyIy8XCc/lteb+FiJ7
zo/gPk7P+y4ihK9Mm6SBqkDVEYSFSn9bgoemK+0e93jGe2182PyuTwfTmZgENSBO
2f9dSuay4C7e5UO8bhVccQJg6f4d70zUNG0oPHrnVxJLjwCd++jx25Gh4U7+ek13
CW57pxJrpPMDWb2YK64rT2juHMKF73YuplW92SInd+QLpI2ekTLc+bRw8JvqzXg+
SprtRUBjlWzj
-----END CERTIFICATE-----

View File

@@ -1,21 +0,0 @@
# Second valid test-only certificate used for multi-certificate bundle coverage.
# It is intentionally distinct from test-ca.pem; chain validation is not part of these tests.
-----BEGIN CERTIFICATE-----
MIIDGTCCAgGgAwIBAgIUWxlcvHzwITWAHWHbKMFUTgeDmjwwDQYJKoZIhvcNAQEL
BQAwHDEaMBgGA1UEAwwRdGVzdC1pbnRlcm1lZGlhdGUwHhcNMjUxMTE5MTU1MDIz
WhcNMjYxMTE5MTU1MDIzWjAcMRowGAYDVQQDDBF0ZXN0LWludGVybWVkaWF0ZTCC
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANq7xbeYpC2GaXANqD1nLk0t
j9j2sOk6e7DqTapxnIUijS7z4DF0Vo1xHM07wK1m+wsB/t9CubNYRvtn6hrIzx7K
jjlmvxo4/YluwO1EDMQWZAXkaY2O28ESKVx7QLfBPYAc4bf/5B4Nmt6KX5sQyyyH
2qTfzVBUCAl3sI+Ydd3mx7NOye1yNNkCNqyK3Hj45F1JuH8NZxcb4OlKssZhMlD+
EQx4G46AzKE9Ho8AqlQvg/tiWrMHRluw7zolMJ/AXzedAXedNIrX4fCOmZwcTkA1
a8eLPP8oM9VFrr67a7on6p4zPqugUEQ4fawp7A5KqSjUAVCt1FXmn2V8N8V6W/sC
AwEAAaNTMFEwHQYDVR0OBBYEFBEwRwW0gm3IjhLw1U3eOAvR0r6SMB8GA1UdIwQY
MBaAFBEwRwW0gm3IjhLw1U3eOAvR0r6SMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI
hvcNAQELBQADggEBAB2fjAlpevK42Odv8XUEgV6VWlEP9HAmkRvugW9hjhzx1Iz9
Vh/l9VcxL7PcqdpyGH+BIRvQIMokcYF5TXzf/KV1T2y56U8AWaSd2/xSjYNWwkgE
TLE5V+H/YDKzvTe58UrOaxa5N3URscQL9f+ZKworODmfMlkJ1mlREK130ZMlBexB
p9w5wo1M1fjx76Rqzq9MkpwBSbIO2zx/8+qy4BAH23MPGW+9OOnnq2DiIX3qUu1v
hnjYOxYpCB28MZEJmqsjFJQQ9RF+Te4U2/oknVcf8lZIMJ2ZBOwt2zg8RqCtM52/
IbATwYj77wg3CFLFKcDYs3tdUqpiniabKcf6zAs=
-----END CERTIFICATE-----

View File

@@ -6,43 +6,18 @@ use std::fmt;
use std::future::Future;
use thiserror::Error;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum CloudRequirementsLoadErrorCode {
Auth,
Timeout,
Parse,
RequestFailed,
Internal,
}
#[derive(Clone, Debug, Eq, Error, PartialEq)]
#[error("{message}")]
pub struct CloudRequirementsLoadError {
code: CloudRequirementsLoadErrorCode,
message: String,
status_code: Option<u16>,
}
impl CloudRequirementsLoadError {
pub fn new(
code: CloudRequirementsLoadErrorCode,
status_code: Option<u16>,
message: impl Into<String>,
) -> Self {
pub fn new(message: impl Into<String>) -> Self {
Self {
code,
message: message.into(),
status_code,
}
}
pub fn code(&self) -> CloudRequirementsLoadErrorCode {
self.code
}
pub fn status_code(&self) -> Option<u16> {
self.status_code
}
}
#[derive(Clone)]

View File

@@ -245,43 +245,6 @@ impl FeatureRequirementsToml {
}
}
#[derive(Deserialize, Debug, Clone, Default, PartialEq, Eq)]
pub struct AppRequirementToml {
pub enabled: Option<bool>,
}
#[derive(Deserialize, Debug, Clone, Default, PartialEq, Eq)]
pub struct AppsRequirementsToml {
#[serde(default, flatten)]
pub apps: BTreeMap<String, AppRequirementToml>,
}
impl AppsRequirementsToml {
pub fn is_empty(&self) -> bool {
self.apps.values().all(|app| app.enabled.is_none())
}
}
/// Merge `enabled` configs from a lower-precedence source into an existing higher-precedence set.
/// This lets managed sources (for example Cloud/MDM) enforce setting disablement across layers.
/// Implemented with AppsRequirementsToml for now, could be abstracted if we have more enablement-style configs in the future.
pub(crate) fn merge_enablement_settings_descending(
base: &mut AppsRequirementsToml,
incoming: AppsRequirementsToml,
) {
for (app_id, incoming_requirement) in incoming.apps {
let base_requirement = base.apps.entry(app_id).or_default();
let higher_precedence = base_requirement.enabled;
let lower_precedence = incoming_requirement.enabled;
base_requirement.enabled =
if higher_precedence == Some(false) || lower_precedence == Some(false) {
Some(false)
} else {
higher_precedence.or(lower_precedence)
};
}
}
/// Base config deserialized from system `requirements.toml` or MDM.
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
pub struct ConfigRequirementsToml {
@@ -291,7 +254,6 @@ pub struct ConfigRequirementsToml {
#[serde(rename = "features", alias = "feature_requirements")]
pub feature_requirements: Option<FeatureRequirementsToml>,
pub mcp_servers: Option<BTreeMap<String, McpServerRequirement>>,
pub apps: Option<AppsRequirementsToml>,
pub rules: Option<RequirementsExecPolicyToml>,
pub enforce_residency: Option<ResidencyRequirement>,
#[serde(rename = "experimental_network")]
@@ -327,7 +289,6 @@ pub struct ConfigRequirementsWithSources {
pub allowed_web_search_modes: Option<Sourced<Vec<WebSearchModeRequirement>>>,
pub feature_requirements: Option<Sourced<FeatureRequirementsToml>>,
pub mcp_servers: Option<Sourced<BTreeMap<String, McpServerRequirement>>>,
pub apps: Option<Sourced<AppsRequirementsToml>>,
pub rules: Option<Sourced<RequirementsExecPolicyToml>>,
pub enforce_residency: Option<Sourced<ResidencyRequirement>>,
pub network: Option<Sourced<NetworkRequirementsToml>>,
@@ -339,6 +300,10 @@ impl ConfigRequirementsWithSources {
// in `self` is `None`, copy the value from `other` into `self`.
macro_rules! fill_missing_take {
($base:expr, $other:expr, $source:expr, { $($field:ident),+ $(,)? }) => {
// Destructure without `..` so adding fields to `ConfigRequirementsToml`
// forces this merge logic to be updated.
let ConfigRequirementsToml { $($field: _,)+ } = &$other;
$(
if $base.$field.is_none()
&& let Some(value) = $other.$field.take()
@@ -349,20 +314,6 @@ impl ConfigRequirementsWithSources {
};
}
// Destructure without `..` so adding fields to `ConfigRequirementsToml`
// forces this merge logic to be updated.
let ConfigRequirementsToml {
allowed_approval_policies: _,
allowed_sandbox_modes: _,
allowed_web_search_modes: _,
feature_requirements: _,
mcp_servers: _,
apps: _,
rules: _,
enforce_residency: _,
network: _,
} = &other;
let mut other = other;
fill_missing_take!(
self,
@@ -379,14 +330,6 @@ impl ConfigRequirementsWithSources {
network,
}
);
if let Some(incoming_apps) = other.apps.take() {
if let Some(existing_apps) = self.apps.as_mut() {
merge_enablement_settings_descending(&mut existing_apps.value, incoming_apps);
} else {
self.apps = Some(Sourced::new(incoming_apps, source));
}
}
}
pub fn into_toml(self) -> ConfigRequirementsToml {
@@ -396,7 +339,6 @@ impl ConfigRequirementsWithSources {
allowed_web_search_modes,
feature_requirements,
mcp_servers,
apps,
rules,
enforce_residency,
network,
@@ -407,7 +349,6 @@ impl ConfigRequirementsWithSources {
allowed_web_search_modes: allowed_web_search_modes.map(|sourced| sourced.value),
feature_requirements: feature_requirements.map(|sourced| sourced.value),
mcp_servers: mcp_servers.map(|sourced| sourced.value),
apps: apps.map(|sourced| sourced.value),
rules: rules.map(|sourced| sourced.value),
enforce_residency: enforce_residency.map(|sourced| sourced.value),
network: network.map(|sourced| sourced.value),
@@ -458,10 +399,6 @@ impl ConfigRequirementsToml {
.as_ref()
.is_none_or(FeatureRequirementsToml::is_empty)
&& self.mcp_servers.is_none()
&& self
.apps
.as_ref()
.is_none_or(AppsRequirementsToml::is_empty)
&& self.rules.is_none()
&& self.enforce_residency.is_none()
&& self.network.is_none()
@@ -478,7 +415,6 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
allowed_web_search_modes,
feature_requirements,
mcp_servers,
apps: _apps,
rules,
enforce_residency,
network,
@@ -686,7 +622,6 @@ mod tests {
allowed_web_search_modes,
feature_requirements,
mcp_servers,
apps,
rules,
enforce_residency,
network,
@@ -701,7 +636,6 @@ mod tests {
feature_requirements: feature_requirements
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
mcp_servers: mcp_servers.map(|value| Sourced::new(value, RequirementSource::Unknown)),
apps: apps.map(|value| Sourced::new(value, RequirementSource::Unknown)),
rules: rules.map(|value| Sourced::new(value, RequirementSource::Unknown)),
enforce_residency: enforce_residency
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
@@ -737,7 +671,6 @@ mod tests {
allowed_web_search_modes: Some(allowed_web_search_modes.clone()),
feature_requirements: Some(feature_requirements.clone()),
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: Some(enforce_residency),
network: None,
@@ -762,7 +695,6 @@ mod tests {
enforce_source.clone(),
)),
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: Some(Sourced::new(enforce_residency, enforce_source)),
network: None,
@@ -796,7 +728,6 @@ mod tests {
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -838,7 +769,6 @@ mod tests {
allowed_web_search_modes: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
rules: None,
enforce_residency: None,
network: None,
@@ -847,174 +777,6 @@ mod tests {
Ok(())
}
#[test]
fn deserialize_apps_requirements() -> Result<()> {
let toml_str = r#"
[apps.connector_123123]
enabled = false
"#;
let requirements: ConfigRequirementsToml = from_str(toml_str)?;
assert_eq!(
requirements.apps,
Some(AppsRequirementsToml {
apps: BTreeMap::from([(
"connector_123123".to_string(),
AppRequirementToml {
enabled: Some(false),
},
)]),
})
);
Ok(())
}
fn apps_requirements(entries: &[(&str, Option<bool>)]) -> AppsRequirementsToml {
AppsRequirementsToml {
apps: entries
.iter()
.map(|(app_id, enabled)| {
(
(*app_id).to_string(),
AppRequirementToml { enabled: *enabled },
)
})
.collect(),
}
}
#[test]
fn merge_enablement_settings_descending_unions_distinct_apps() {
let mut merged = apps_requirements(&[("connector_high", Some(false))]);
let lower = apps_requirements(&[("connector_low", Some(true))]);
merge_enablement_settings_descending(&mut merged, lower);
assert_eq!(
merged,
apps_requirements(&[
("connector_high", Some(false)),
("connector_low", Some(true))
]),
);
}
#[test]
fn merge_enablement_settings_descending_prefers_false_from_lower_precedence() {
let mut merged = apps_requirements(&[("connector_123123", Some(true))]);
let lower = apps_requirements(&[("connector_123123", Some(false))]);
merge_enablement_settings_descending(&mut merged, lower);
assert_eq!(
merged,
apps_requirements(&[("connector_123123", Some(false))]),
);
}
#[test]
fn merge_enablement_settings_descending_keeps_higher_true_when_lower_is_unset() {
let mut merged = apps_requirements(&[("connector_123123", Some(true))]);
let lower = apps_requirements(&[("connector_123123", None)]);
merge_enablement_settings_descending(&mut merged, lower);
assert_eq!(
merged,
apps_requirements(&[("connector_123123", Some(true))]),
);
}
#[test]
fn merge_enablement_settings_descending_uses_lower_value_when_higher_missing() {
let mut merged = apps_requirements(&[]);
let lower = apps_requirements(&[("connector_123123", Some(true))]);
merge_enablement_settings_descending(&mut merged, lower);
assert_eq!(
merged,
apps_requirements(&[("connector_123123", Some(true))]),
);
}
#[test]
fn merge_enablement_settings_descending_preserves_higher_false_when_lower_missing_app() {
let mut merged = apps_requirements(&[("connector_123123", Some(false))]);
let lower = apps_requirements(&[]);
merge_enablement_settings_descending(&mut merged, lower);
assert_eq!(
merged,
apps_requirements(&[("connector_123123", Some(false))]),
);
}
#[test]
fn merge_unset_fields_merges_apps_across_sources_with_enabled_evaluation() {
let higher_source = RequirementSource::CloudRequirements;
let lower_source = RequirementSource::LegacyManagedConfigTomlFromMdm;
let mut target = ConfigRequirementsWithSources::default();
target.merge_unset_fields(
higher_source.clone(),
ConfigRequirementsToml {
apps: Some(apps_requirements(&[
("connector_high", Some(true)),
("connector_shared", Some(true)),
])),
..Default::default()
},
);
target.merge_unset_fields(
lower_source,
ConfigRequirementsToml {
apps: Some(apps_requirements(&[
("connector_low", Some(false)),
("connector_shared", Some(false)),
])),
..Default::default()
},
);
let apps = target.apps.expect("apps should be present");
assert_eq!(
apps.value,
apps_requirements(&[
("connector_high", Some(true)),
("connector_low", Some(false)),
("connector_shared", Some(false)),
])
);
assert_eq!(apps.source, higher_source);
}
#[test]
fn merge_unset_fields_apps_empty_higher_source_does_not_block_lower_disables() {
let mut target = ConfigRequirementsWithSources::default();
target.merge_unset_fields(
RequirementSource::CloudRequirements,
ConfigRequirementsToml {
apps: Some(apps_requirements(&[])),
..Default::default()
},
);
target.merge_unset_fields(
RequirementSource::LegacyManagedConfigTomlFromMdm,
ConfigRequirementsToml {
apps: Some(apps_requirements(&[("connector_123123", Some(false))])),
..Default::default()
},
);
assert_eq!(
target.apps.map(|apps| apps.value),
Some(apps_requirements(&[("connector_123123", Some(false))])),
);
}
#[test]
fn constraint_error_includes_requirement_source() -> Result<()> {
let source: ConfigRequirementsToml = from_str(

View File

@@ -11,10 +11,7 @@ mod state;
pub const CONFIG_TOML_FILE: &str = "config.toml";
pub use cloud_requirements::CloudRequirementsLoadError;
pub use cloud_requirements::CloudRequirementsLoadErrorCode;
pub use cloud_requirements::CloudRequirementsLoader;
pub use config_requirements::AppRequirementToml;
pub use config_requirements::AppsRequirementsToml;
pub use config_requirements::ConfigRequirements;
pub use config_requirements::ConfigRequirementsToml;
pub use config_requirements::ConfigRequirementsWithSources;

View File

@@ -48,18 +48,6 @@ Seatbelt also supports macOS permission-profile extensions layered on top of
Expects the binary containing `codex-core` to run the equivalent of `codex sandbox linux` (legacy alias: `codex debug landlock`) when `arg0` is `codex-linux-sandbox`. See the `codex-arg0` crate for details.
Legacy `SandboxPolicy` / `sandbox_mode` configs are still supported on Linux.
They can continue to use the legacy Landlock path when the split filesystem
policy is sandbox-equivalent to the legacy model after `cwd` resolution.
Split filesystem policies that need direct `FileSystemSandboxPolicy`
enforcement, such as read-only or denied carveouts under a broader writable
root, automatically route through bubblewrap. The legacy Landlock path is used
only when the split filesystem policy round-trips through the legacy
`SandboxPolicy` model without changing semantics. That includes overlapping
cases like `/repo = write`, `/repo/a = none`, `/repo/a/b = write`, where the
more specific writable child must reopen under a denied parent.
### All Platforms
Expects the binary containing `codex-core` to simulate the virtual `apply_patch` CLI when `arg1` is `--codex-run-as-apply-patch`. See the `codex-arg0` crate for details.

View File

@@ -231,14 +231,14 @@
},
{
"additionalProperties": false,
"description": "Fine-grained controls for individual approval flows.\n\nWhen a field is `true`, commands in that category are allowed. When it is `false`, those requests are automatically rejected instead of shown to the user.",
"description": "Fine-grained rejection controls for approval prompts.\n\nWhen a field is `true`, prompts of that category are automatically rejected instead of shown to the user.",
"properties": {
"granular": {
"$ref": "#/definitions/GranularApprovalConfig"
"reject": {
"$ref": "#/definitions/RejectConfig"
}
},
"required": [
"granular"
"reject"
],
"type": "object"
},
@@ -339,9 +339,6 @@
"code_mode": {
"type": "boolean"
},
"code_mode_only": {
"type": "boolean"
},
"codex_git_commit": {
"type": "boolean"
},
@@ -372,9 +369,6 @@
"enable_request_compression": {
"type": "boolean"
},
"exec_permission_approvals": {
"type": "boolean"
},
"experimental_use_freeform_apply_patch": {
"type": "boolean"
},
@@ -657,38 +651,6 @@
},
"type": "object"
},
"GranularApprovalConfig": {
"properties": {
"mcp_elicitations": {
"description": "Whether to allow MCP elicitation prompts.",
"type": "boolean"
},
"request_permissions": {
"default": false,
"description": "Whether to allow prompts triggered by the `request_permissions` tool.",
"type": "boolean"
},
"rules": {
"description": "Whether to allow prompts triggered by execpolicy `prompt` rules.",
"type": "boolean"
},
"sandbox_approval": {
"description": "Whether to allow shell command approval requests, including inline `with_additional_permissions` and `require_escalated` requests.",
"type": "boolean"
},
"skill_approval": {
"default": false,
"description": "Whether to allow approval prompts triggered by skill script execution.",
"type": "boolean"
}
},
"required": [
"mcp_elicitations",
"rules",
"sandbox_approval"
],
"type": "object"
},
"History": {
"additionalProperties": false,
"description": "Settings that govern if and what will be written to `~/.codex/history.jsonl`.",
@@ -1342,32 +1304,6 @@
},
"type": "object"
},
"RealtimeToml": {
"additionalProperties": false,
"properties": {
"type": {
"$ref": "#/definitions/RealtimeWsMode"
},
"version": {
"$ref": "#/definitions/RealtimeWsVersion"
}
},
"type": "object"
},
"RealtimeWsMode": {
"enum": [
"conversational",
"transcription"
],
"type": "string"
},
"RealtimeWsVersion": {
"enum": [
"v1",
"v2"
],
"type": "string"
},
"ReasoningEffort": {
"description": "See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#get-started-with-reasoning",
"enum": [
@@ -1400,6 +1336,38 @@
}
]
},
"RejectConfig": {
"properties": {
"mcp_elicitations": {
"description": "Reject MCP elicitation prompts.",
"type": "boolean"
},
"request_permissions": {
"default": false,
"description": "Reject `request_permissions` tool requests.",
"type": "boolean"
},
"rules": {
"description": "Reject prompts triggered by execpolicy `prompt` rules.",
"type": "boolean"
},
"sandbox_approval": {
"description": "Reject shell command approval requests, including inline `with_additional_permissions` and `require_escalated` requests.",
"type": "boolean"
},
"skill_approval": {
"default": false,
"description": "Reject approval prompts triggered by skill script execution.",
"type": "boolean"
}
},
"required": [
"mcp_elicitations",
"rules",
"sandbox_approval"
],
"type": "object"
},
"SandboxMode": {
"enum": [
"read-only",
@@ -1617,6 +1585,14 @@
},
"type": "array"
},
"terminal_title": {
"default": null,
"description": "Ordered list of terminal title item identifiers.\n\nWhen set, the TUI renders the selected items into the terminal window/tab title. When unset, the TUI defaults to: `spinner` and `project`.",
"items": {
"type": "string"
},
"type": "array"
},
"theme": {
"default": null,
"description": "Syntax highlighting theme name (kebab-case).\n\nWhen set, overrides automatic light/dark theme detection. Use `/theme` in the TUI or see `$CODEX_HOME/themes` for custom themes.",
@@ -1718,10 +1694,6 @@
"properties": {
"sandbox": {
"$ref": "#/definitions/WindowsSandboxModeToml"
},
"sandbox_private_desktop": {
"description": "Defaults to `true`. Set to `false` to launch the final sandboxed child process on `Winsta0\\\\Default` instead of a private desktop.",
"type": "boolean"
}
},
"type": "object"
@@ -1883,9 +1855,6 @@
"code_mode": {
"type": "boolean"
},
"code_mode_only": {
"type": "boolean"
},
"codex_git_commit": {
"type": "boolean"
},
@@ -1916,9 +1885,6 @@
"enable_request_compression": {
"type": "boolean"
},
"exec_permission_approvals": {
"type": "boolean"
},
"experimental_use_freeform_apply_patch": {
"type": "boolean"
},
@@ -2320,15 +2286,6 @@
},
"type": "object"
},
"realtime": {
"allOf": [
{
"$ref": "#/definitions/RealtimeToml"
}
],
"default": null,
"description": "Experimental / do not use. Realtime websocket session selection. `version` controls v1/v2 and `type` controls conversational/transcription."
},
"review_model": {
"description": "Review model override used by the `/review` feature.",
"type": "string"

Some files were not shown because too many files have changed in this diff Show More