mirror of
https://github.com/openai/codex.git
synced 2026-02-03 07:23:39 +00:00
Compare commits
42 Commits
concurrent
...
compact
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd2a53d1cd | ||
|
|
094d7af8c3 | ||
|
|
2d2df891bb | ||
|
|
80c19ea77c | ||
|
|
19bef7659f | ||
|
|
5ebb7dd34c | ||
|
|
d76f96ce79 | ||
|
|
fcd197d596 | ||
|
|
9102255854 | ||
|
|
7ecd3153a8 | ||
|
|
2405c40026 | ||
|
|
58bed77ba7 | ||
|
|
5a0079fea2 | ||
|
|
c66c99c5b5 | ||
|
|
75b4008094 | ||
|
|
7ee87123a6 | ||
|
|
994c9a874d | ||
|
|
480e82b00d | ||
|
|
508abbe990 | ||
|
|
a1641743a8 | ||
|
|
c9e2def494 | ||
|
|
7af9cedbd7 | ||
|
|
2437a8d17a | ||
|
|
d2be0720b5 | ||
|
|
173386eeac | ||
|
|
4a57afaaf2 | ||
|
|
9f645353e9 | ||
|
|
db84722080 | ||
|
|
6e1838e0d8 | ||
|
|
4fc4e410bd | ||
|
|
6dd62ffa3b | ||
|
|
b4ab7c1b73 | ||
|
|
084236f717 | ||
|
|
bc944e77f5 | ||
|
|
591cb6149a | ||
|
|
d6c4083f98 | ||
|
|
3ef544fb95 | ||
|
|
01c0896f0f | ||
|
|
e744548aae | ||
|
|
8b23e160c4 | ||
|
|
3df732caa1 | ||
|
|
3a4f5435e8 |
12
.github/actions/codex/bun.lock
vendored
12
.github/actions/codex/bun.lock
vendored
@@ -8,8 +8,8 @@
|
||||
"@actions/github": "^6.0.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.2.18",
|
||||
"@types/node": "^24.0.13",
|
||||
"@types/bun": "^1.2.19",
|
||||
"@types/node": "^24.1.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3",
|
||||
},
|
||||
@@ -48,15 +48,15 @@
|
||||
|
||||
"@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="],
|
||||
"@types/bun": ["@types/bun@1.2.19", "", { "dependencies": { "bun-types": "1.2.19" } }, "sha512-d9ZCmrH3CJ2uYKXQIUuZ/pUnTqIvLDS0SK7pFmbx8ma+ziH/FRMoAq5bYpRG7y+w1gl+HgyNZbtqgMq4W4e2Lg=="],
|
||||
|
||||
"@types/node": ["@types/node@24.0.13", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ=="],
|
||||
"@types/node": ["@types/node@24.1.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="],
|
||||
"bun-types": ["bun-types@1.2.19", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-uAOTaZSPuYsWIXRpj7o56Let0g/wjihKCkeRqUBhlLVM/Bt+Fj9xTo+LhC1OV1XDaGkz4hNC80et5xgy+9KTHQ=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
@@ -82,6 +82,8 @@
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"bun-types/@types/node": ["@types/node@24.0.13", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
4
.github/actions/codex/package.json
vendored
4
.github/actions/codex/package.json
vendored
@@ -13,8 +13,8 @@
|
||||
"@actions/github": "^6.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.2.18",
|
||||
"@types/node": "^24.0.13",
|
||||
"@types/bun": "^1.2.19",
|
||||
"@types/node": "^24.1.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3"
|
||||
}
|
||||
|
||||
2
.github/workflows/rust-release.yml
vendored
2
.github/workflows/rust-release.yml
vendored
@@ -93,7 +93,7 @@ jobs:
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --all-targets --all-features
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-exec --bin codex-linux-sandbox
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
|
||||
208
codex-rs/Cargo.lock
generated
208
codex-rs/Cargo.lock
generated
@@ -463,18 +463,18 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
|
||||
|
||||
[[package]]
|
||||
name = "castaway"
|
||||
version = "0.2.3"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0abae9be0aaf9ea96a3b1b8b1b55c602ca751eba1b1500220cea4ecbafe7c0d5"
|
||||
checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a"
|
||||
dependencies = [
|
||||
"rustversion",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.29"
|
||||
version = "1.2.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362"
|
||||
checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
@@ -570,9 +570,9 @@ checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
|
||||
|
||||
[[package]]
|
||||
name = "clipboard-win"
|
||||
version = "5.4.0"
|
||||
version = "5.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15efe7a882b08f34e38556b14f2fb3daa98769d06c7f0c1b076dfd0d983bc892"
|
||||
checksum = "bde03770d3df201d4fb868f2c9c59e66a3e4e2bd06692a0fe701e7103c7e84d4"
|
||||
dependencies = [
|
||||
"error-code",
|
||||
]
|
||||
@@ -605,6 +605,18 @@ dependencies = [
|
||||
"tree-sitter-bash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-arg0"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-apply-patch",
|
||||
"codex-core",
|
||||
"codex-linux-sandbox",
|
||||
"dotenvy",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-chatgpt"
|
||||
version = "0.0.0"
|
||||
@@ -628,11 +640,11 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"codex-arg0",
|
||||
"codex-chatgpt",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-exec",
|
||||
"codex-linux-sandbox",
|
||||
"codex-login",
|
||||
"codex-mcp-server",
|
||||
"codex-tui",
|
||||
@@ -649,7 +661,7 @@ dependencies = [
|
||||
"clap",
|
||||
"codex-core",
|
||||
"serde",
|
||||
"toml 0.9.1",
|
||||
"toml 0.9.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -663,6 +675,7 @@ dependencies = [
|
||||
"bytes",
|
||||
"codex-apply-patch",
|
||||
"codex-mcp-client",
|
||||
"core_test_support",
|
||||
"dirs",
|
||||
"env-flags",
|
||||
"eventsource-stream",
|
||||
@@ -676,25 +689,27 @@ dependencies = [
|
||||
"openssl-sys",
|
||||
"predicates",
|
||||
"pretty_assertions",
|
||||
"rand 0.9.1",
|
||||
"rand 0.9.2",
|
||||
"reqwest",
|
||||
"seccompiler",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha1",
|
||||
"strum_macros 0.27.1",
|
||||
"shlex",
|
||||
"strum_macros 0.27.2",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-test",
|
||||
"tokio-util",
|
||||
"toml 0.9.1",
|
||||
"toml 0.9.2",
|
||||
"tracing",
|
||||
"tree-sitter",
|
||||
"tree-sitter-bash",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"whoami",
|
||||
"wildmatch",
|
||||
"wiremock",
|
||||
]
|
||||
@@ -704,14 +719,17 @@ name = "codex-exec"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-arg0",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-linux-sandbox",
|
||||
"owo-colors",
|
||||
"predicates",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
@@ -756,6 +774,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"landlock",
|
||||
"libc",
|
||||
@@ -794,9 +813,10 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"codex-arg0",
|
||||
"codex-core",
|
||||
"codex-linux-sandbox",
|
||||
"mcp-types",
|
||||
"mcp_test_support",
|
||||
"pretty_assertions",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
@@ -805,7 +825,7 @@ dependencies = [
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tokio-test",
|
||||
"toml 0.9.1",
|
||||
"toml 0.9.2",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
@@ -820,10 +840,10 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"clap",
|
||||
"codex-ansi-escape",
|
||||
"codex-arg0",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-file-search",
|
||||
"codex-linux-sandbox",
|
||||
"codex-login",
|
||||
"color-eyre",
|
||||
"crossterm",
|
||||
@@ -836,10 +856,12 @@ dependencies = [
|
||||
"ratatui",
|
||||
"ratatui-image",
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"strum 0.27.1",
|
||||
"strum_macros 0.27.1",
|
||||
"strum 0.27.2",
|
||||
"strum_macros 0.27.2",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
@@ -848,6 +870,7 @@ dependencies = [
|
||||
"tui-markdown",
|
||||
"tui-textarea",
|
||||
"unicode-segmentation",
|
||||
"unicode-width 0.1.14",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -950,6 +973,16 @@ version = "0.8.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||
|
||||
[[package]]
|
||||
name = "core_test_support"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-core",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.17"
|
||||
@@ -961,9 +994,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crc32fast"
|
||||
version = "1.4.2"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
|
||||
checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
@@ -1272,6 +1305,12 @@ version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
|
||||
|
||||
[[package]]
|
||||
name = "dotenvy"
|
||||
version = "0.15.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
|
||||
|
||||
[[package]]
|
||||
name = "dupe"
|
||||
version = "0.9.1"
|
||||
@@ -1504,7 +1543,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -1953,9 +1992,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hyper-util"
|
||||
version = "0.1.15"
|
||||
version = "0.1.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f66d5bd4c6f02bf0542fad85d626775bab9258cf795a4256dcaf3161114d1df"
|
||||
checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
@@ -1969,7 +2008,7 @@ dependencies = [
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2",
|
||||
"socket2 0.6.0",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
@@ -2222,9 +2261,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "instability"
|
||||
version = "0.3.7"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bf9fed6d91cfb734e7476a06bde8300a1b94e217e1b523b6f0cd1a01998c71d"
|
||||
checksum = "435d80800b936787d62688c927b6490e887c7ef5ff9ce922c6c6050fca75eb9a"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"indoc",
|
||||
@@ -2255,9 +2294,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "io-uring"
|
||||
version = "0.7.8"
|
||||
version = "0.7.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
|
||||
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cfg-if",
|
||||
@@ -2461,9 +2500,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.4"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638"
|
||||
checksum = "4488594b9328dee448adb906d8b126d9b7deb7cf5c22161ee591610bb1be83c0"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"libc",
|
||||
@@ -2596,6 +2635,22 @@ dependencies = [
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mcp_test_support"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"codex-mcp-server",
|
||||
"mcp-types",
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.5"
|
||||
@@ -3271,9 +3326,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.1"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
@@ -3320,8 +3375,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "ratatui"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b"
|
||||
source = "git+https://github.com/nornagon/ratatui?branch=nornagon-v0.29.0-patch#bca287ddc5d38fe088c79e2eda22422b96226f2e"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cassowary",
|
||||
@@ -3426,9 +3480,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.13"
|
||||
version = "0.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6"
|
||||
checksum = "7e8af0dde094006011e6a740d4879319439489813bd0bcdc7d821beaeeff48ec"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
]
|
||||
@@ -3576,9 +3630,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rgb"
|
||||
version = "0.8.51"
|
||||
version = "0.8.52"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a457e416a0f90d246a4c3288bd7a25b2304ca727f253f95be383dd17af56be8f"
|
||||
checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce"
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
@@ -3654,22 +3708,22 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "1.0.7"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266"
|
||||
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.4",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls"
|
||||
version = "0.23.28"
|
||||
version = "0.23.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7160e3e10bf4535308537f3c4e1641468cd0e485175d6163087c0393c7d46643"
|
||||
checksum = "2491382039b29b9b11ff08b76ff6c97cf287671dbb74f0be44bda389fffe9bd1"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"rustls-pki-types",
|
||||
@@ -3689,9 +3743,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.103.3"
|
||||
version = "0.103.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435"
|
||||
checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"rustls-pki-types",
|
||||
@@ -3917,9 +3971,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
version = "1.0.141"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"itoa",
|
||||
@@ -4112,6 +4166,16 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.0"
|
||||
@@ -4255,9 +4319,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.27.1"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
|
||||
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
@@ -4274,14 +4338,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "strum_macros"
|
||||
version = "0.27.1"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8"
|
||||
checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
@@ -4404,7 +4467,7 @@ dependencies = [
|
||||
"fastrand",
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -4425,7 +4488,7 @@ version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed"
|
||||
dependencies = [
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -4571,7 +4634,7 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"slab",
|
||||
"socket2",
|
||||
"socket2 0.5.10",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
@@ -4658,9 +4721,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.9.1"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0207d6ed1852c2a124c1fbec61621acb8330d2bf969a5d0643131e9affd985a5"
|
||||
checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"serde",
|
||||
@@ -4704,18 +4767,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_parser"
|
||||
version = "1.0.0"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5c1c469eda89749d2230d8156a5969a69ffe0d6d01200581cdc6110674d293e"
|
||||
checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30"
|
||||
dependencies = [
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_writer"
|
||||
version = "1.0.0"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b679217f2848de74cabd3e8fc5e6d66f40b7da40f8e1954d92054d9010690fd5"
|
||||
checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64"
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
@@ -4848,9 +4911,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.6"
|
||||
version = "0.25.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7cf18d43cbf0bfca51f657132cc616a5097edc4424d538bae6fa60142eaf9f0"
|
||||
checksum = "6d7b8994f367f16e6fa14b5aebbcb350de5d7cbea82dc5b00ae997dd71680dd2"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -5092,6 +5155,12 @@ dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasite"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.100"
|
||||
@@ -5192,6 +5261,17 @@ version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3"
|
||||
|
||||
[[package]]
|
||||
name = "whoami"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7"
|
||||
dependencies = [
|
||||
"redox_syscall",
|
||||
"wasite",
|
||||
"web-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wildmatch"
|
||||
version = "2.4.0"
|
||||
@@ -5520,9 +5600,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.7.11"
|
||||
version = "0.7.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd"
|
||||
checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
@@ -3,6 +3,7 @@ resolver = "2"
|
||||
members = [
|
||||
"ansi-escape",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"cli",
|
||||
"common",
|
||||
"core",
|
||||
@@ -40,3 +41,8 @@ strip = "symbols"
|
||||
|
||||
# See https://github.com/openai/codex/issues/1411 for details.
|
||||
codegen-units = 1
|
||||
|
||||
[patch.crates-io]
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ workspace = true
|
||||
anyhow = "1"
|
||||
similar = "2.7.0"
|
||||
thiserror = "2.0.12"
|
||||
tree-sitter = "0.25.3"
|
||||
tree-sitter = "0.25.8"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
19
codex-rs/arg0/Cargo.toml
Normal file
19
codex-rs/arg0/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "codex-arg0"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_arg0"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
codex-apply-patch = { path = "../apply-patch" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
dotenvy = "0.15.7"
|
||||
tokio = { version = "1", features = ["rt-multi-thread"] }
|
||||
89
codex-rs/arg0/src/lib.rs
Normal file
89
codex-rs/arg0/src/lib.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use std::future::Future;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// While we want to deploy the Codex CLI as a single executable for simplicity,
|
||||
/// we also want to expose some of its functionality as distinct CLIs, so we use
|
||||
/// the "arg0 trick" to determine which CLI to dispatch. This effectively allows
|
||||
/// us to simulate deploying multiple executables as a single binary on Mac and
|
||||
/// Linux (but not Windows).
|
||||
///
|
||||
/// When the current executable is invoked through the hard-link or alias named
|
||||
/// `codex-linux-sandbox` we *directly* execute
|
||||
/// [`codex_linux_sandbox::run_main`] (which never returns). Otherwise we:
|
||||
///
|
||||
/// 1. Use [`dotenvy::from_path`] and [`dotenvy::dotenv`] to modify the
|
||||
/// environment before creating any threads.
|
||||
/// 2. Construct a Tokio multi-thread runtime.
|
||||
/// 3. Derive the path to the current executable (so children can re-invoke the
|
||||
/// sandbox) when running on Linux.
|
||||
/// 4. Execute the provided async `main_fn` inside that runtime, forwarding any
|
||||
/// error. Note that `main_fn` receives `codex_linux_sandbox_exe:
|
||||
/// Option<PathBuf>`, as an argument, which is generally needed as part of
|
||||
/// constructing [`codex_core::config::Config`].
|
||||
///
|
||||
/// This function should be used to wrap any `main()` function in binary crates
|
||||
/// in this workspace that depends on these helper CLIs.
|
||||
pub fn arg0_dispatch_or_else<F, Fut>(main_fn: F) -> anyhow::Result<()>
|
||||
where
|
||||
F: FnOnce(Option<PathBuf>) -> Fut,
|
||||
Fut: Future<Output = anyhow::Result<()>>,
|
||||
{
|
||||
// Determine if we were invoked via the special alias.
|
||||
let mut args = std::env::args_os();
|
||||
let argv0 = args.next().unwrap_or_default();
|
||||
let exe_name = Path::new(&argv0)
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if exe_name == "codex-linux-sandbox" {
|
||||
// Safety: [`run_main`] never returns.
|
||||
codex_linux_sandbox::run_main();
|
||||
}
|
||||
|
||||
let argv1 = args.next().unwrap_or_default();
|
||||
if argv1 == "--codex-run-as-apply-patch" {
|
||||
let patch_arg = args.next().and_then(|s| s.to_str().map(|s| s.to_owned()));
|
||||
let exit_code = match patch_arg {
|
||||
Some(patch_arg) => {
|
||||
let mut stdout = std::io::stdout();
|
||||
let mut stderr = std::io::stderr();
|
||||
match codex_apply_patch::apply_patch(&patch_arg, &mut stdout, &mut stderr) {
|
||||
Ok(()) => 0,
|
||||
Err(_) => 1,
|
||||
}
|
||||
}
|
||||
None => {
|
||||
eprintln!("Error: --codex-run-as-apply-patch requires a UTF-8 PATCH argument.");
|
||||
1
|
||||
}
|
||||
};
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
|
||||
// This modifies the environment, which is not thread-safe, so do this
|
||||
// before creating any threads/the Tokio runtime.
|
||||
load_dotenv();
|
||||
|
||||
// Regular invocation – create a Tokio runtime and execute the provided
|
||||
// async entry-point.
|
||||
let runtime = tokio::runtime::Runtime::new()?;
|
||||
runtime.block_on(async move {
|
||||
let codex_linux_sandbox_exe: Option<PathBuf> = if cfg!(target_os = "linux") {
|
||||
std::env::current_exe().ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
main_fn(codex_linux_sandbox_exe).await
|
||||
})
|
||||
}
|
||||
|
||||
/// Load env vars from ~/.codex/.env and `$(pwd)/.env`.
|
||||
fn load_dotenv() {
|
||||
if let Ok(codex_home) = codex_core::config::find_codex_home() {
|
||||
dotenvy::from_path(codex_home.join(".env")).ok();
|
||||
}
|
||||
dotenvy::dotenv().ok();
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
@@ -17,7 +19,10 @@ pub struct ApplyCommand {
|
||||
#[clap(flatten)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
}
|
||||
pub async fn run_apply_command(apply_cli: ApplyCommand) -> anyhow::Result<()> {
|
||||
pub async fn run_apply_command(
|
||||
apply_cli: ApplyCommand,
|
||||
cwd: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let config = Config::load_with_cli_overrides(
|
||||
apply_cli
|
||||
.config_overrides
|
||||
@@ -29,10 +34,13 @@ pub async fn run_apply_command(apply_cli: ApplyCommand) -> anyhow::Result<()> {
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
|
||||
let task_response = get_task(&config, apply_cli.task_id).await?;
|
||||
apply_diff_from_task(task_response).await
|
||||
apply_diff_from_task(task_response, cwd).await
|
||||
}
|
||||
|
||||
pub async fn apply_diff_from_task(task_response: GetTaskResponse) -> anyhow::Result<()> {
|
||||
pub async fn apply_diff_from_task(
|
||||
task_response: GetTaskResponse,
|
||||
cwd: Option<PathBuf>,
|
||||
) -> anyhow::Result<()> {
|
||||
let diff_turn = match task_response.current_diff_task_turn {
|
||||
Some(turn) => turn,
|
||||
None => anyhow::bail!("No diff turn found"),
|
||||
@@ -42,13 +50,17 @@ pub async fn apply_diff_from_task(task_response: GetTaskResponse) -> anyhow::Res
|
||||
_ => None,
|
||||
});
|
||||
match output_diff {
|
||||
Some(output_diff) => apply_diff(&output_diff.diff).await,
|
||||
Some(output_diff) => apply_diff(&output_diff.diff, cwd).await,
|
||||
None => anyhow::bail!("No PR output item found"),
|
||||
}
|
||||
}
|
||||
|
||||
async fn apply_diff(diff: &str) -> anyhow::Result<()> {
|
||||
let toplevel_output = tokio::process::Command::new("git")
|
||||
async fn apply_diff(diff: &str, cwd: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let mut cmd = tokio::process::Command::new("git");
|
||||
if let Some(cwd) = cwd {
|
||||
cmd.current_dir(cwd);
|
||||
}
|
||||
let toplevel_output = cmd
|
||||
.args(vec!["rev-parse", "--show-toplevel"])
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
@@ -78,17 +78,7 @@ async fn test_apply_command_creates_fibonacci_file() {
|
||||
.await
|
||||
.expect("Failed to load fixture");
|
||||
|
||||
let original_dir = std::env::current_dir().expect("Failed to get current dir");
|
||||
std::env::set_current_dir(repo_path).expect("Failed to change directory");
|
||||
struct DirGuard(std::path::PathBuf);
|
||||
impl Drop for DirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = std::env::set_current_dir(&self.0);
|
||||
}
|
||||
}
|
||||
let _guard = DirGuard(original_dir);
|
||||
|
||||
apply_diff_from_task(task_response)
|
||||
apply_diff_from_task(task_response, Some(repo_path.to_path_buf()))
|
||||
.await
|
||||
.expect("Failed to apply diff from task");
|
||||
|
||||
@@ -173,7 +163,7 @@ console.log(fib(10));
|
||||
.await
|
||||
.expect("Failed to load fixture");
|
||||
|
||||
let apply_result = apply_diff_from_task(task_response).await;
|
||||
let apply_result = apply_diff_from_task(task_response, Some(repo_path.to_path_buf())).await;
|
||||
|
||||
assert!(
|
||||
apply_result.is_err(),
|
||||
|
||||
@@ -18,12 +18,12 @@ workspace = true
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
clap_complete = "4"
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-chatgpt = { path = "../chatgpt" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-exec = { path = "../exec" }
|
||||
codex-login = { path = "../login" }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
codex-mcp-server = { path = "../mcp-server" }
|
||||
codex-tui = { path = "../tui" }
|
||||
serde_json = "1"
|
||||
|
||||
@@ -2,6 +2,7 @@ use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
use clap_complete::generate;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_chatgpt::apply_command::ApplyCommand;
|
||||
use codex_chatgpt::apply_command::run_apply_command;
|
||||
use codex_cli::LandlockCommand;
|
||||
@@ -92,7 +93,7 @@ struct LoginCommand {
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
codex_linux_sandbox::run_with_sandbox(|codex_linux_sandbox_exe| async move {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
cli_main(codex_linux_sandbox_exe).await?;
|
||||
Ok(())
|
||||
})
|
||||
@@ -105,7 +106,8 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
None => {
|
||||
let mut tui_cli = cli.interactive;
|
||||
prepend_config_flags(&mut tui_cli.config_overrides, cli.config_overrides);
|
||||
codex_tui::run_main(tui_cli, codex_linux_sandbox_exe)?;
|
||||
let usage = codex_tui::run_main(tui_cli, codex_linux_sandbox_exe)?;
|
||||
println!("{}", codex_core::protocol::FinalOutput::from(usage));
|
||||
}
|
||||
Some(Subcommand::Exec(mut exec_cli)) => {
|
||||
prepend_config_flags(&mut exec_cli.config_overrides, cli.config_overrides);
|
||||
@@ -145,7 +147,7 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(&mut apply_cli.config_overrides, cli.config_overrides);
|
||||
run_apply_command(apply_cli).await?;
|
||||
run_apply_command(apply_cli, None).await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::sync::Arc;
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::Codex;
|
||||
use codex_core::CodexSpawnOk;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::protocol::Submission;
|
||||
@@ -35,7 +36,7 @@ pub async fn run_main(opts: ProtoCli) -> anyhow::Result<()> {
|
||||
|
||||
let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?;
|
||||
let ctrl_c = notify_on_sigint();
|
||||
let (codex, _init_id, _session_id) = Codex::spawn(config, ctrl_c.clone()).await?;
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(config, ctrl_c.clone()).await?;
|
||||
let codex = Arc::new(codex);
|
||||
|
||||
// Task that reads JSON lines from stdin and forwards to Submission Queue
|
||||
|
||||
@@ -498,14 +498,5 @@ Options that are specific to the TUI.
|
||||
|
||||
```toml
|
||||
[tui]
|
||||
# This will make it so that Codex does not try to process mouse events, which
|
||||
# means your Terminal's native drag-to-text to text selection and copy/paste
|
||||
# should work. The tradeoff is that Codex will not receive any mouse events, so
|
||||
# it will not be possible to use the mouse to scroll conversation history.
|
||||
#
|
||||
# Note that most terminals support holding down a modifier key when using the
|
||||
# mouse to support text selection. For example, even if Codex mouse capture is
|
||||
# enabled (i.e., this is set to `false`), you can still hold down alt while
|
||||
# dragging the mouse to select text.
|
||||
disable_mouse_capture = true # defaults to `false`
|
||||
# More to come here
|
||||
```
|
||||
|
||||
@@ -30,7 +30,8 @@ reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
sha1 = "0.10.6"
|
||||
strum_macros = "0.27.1"
|
||||
shlex = "1.3.0"
|
||||
strum_macros = "0.27.2"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3", features = ["formatting", "local-offset", "macros"] }
|
||||
tokio = { version = "1", features = [
|
||||
@@ -41,12 +42,14 @@ tokio = { version = "1", features = [
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = "0.7.14"
|
||||
toml = "0.9.1"
|
||||
toml = "0.9.2"
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tree-sitter = "0.25.3"
|
||||
tree-sitter = "0.25.8"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
uuid = { version = "1", features = ["serde", "v4"] }
|
||||
wildmatch = "2.4.0"
|
||||
whoami = "1.6.0"
|
||||
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
landlock = "0.4.1"
|
||||
@@ -62,6 +65,7 @@ openssl-sys = { version = "*", features = ["vendored"] }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
core_test_support = { path = "tests/common" }
|
||||
maplit = "1.0.2"
|
||||
predicates = "3"
|
||||
pretty_assertions = "1.4.1"
|
||||
|
||||
@@ -2,9 +2,18 @@
|
||||
|
||||
This crate implements the business logic for Codex. It is designed to be used by the various Codex UIs written in Rust.
|
||||
|
||||
Though for non-Rust UIs, we are also working to define a _protocol_ for talking to Codex. See:
|
||||
## Dependencies
|
||||
|
||||
- [Specification](../docs/protocol_v1.md)
|
||||
- [Rust types](./src/protocol.rs)
|
||||
Note that `codex-core` makes some assumptions about certain helper utilities being available in the environment. Currently, this
|
||||
|
||||
You can use the `proto` subcommand using the executable in the [`cli` crate](../cli) to speak the protocol using newline-delimited-JSON over stdin/stdout.
|
||||
### macOS
|
||||
|
||||
Expects `/usr/bin/sandbox-exec` to be present.
|
||||
|
||||
### Linux
|
||||
|
||||
Expects the binary containing `codex-core` to run the equivalent of `codex debug landlock` when `arg0` is `codex-linux-sandbox`. See the `codex-arg0` crate for details.
|
||||
|
||||
### All Platforms
|
||||
|
||||
Expects the binary containing `codex-core` to simulate the virtual `apply_patch` CLI when `arg1` is `--codex-run-as-apply-patch`. See the `codex-arg0` crate for details.
|
||||
|
||||
406
codex-rs/core/src/apply_patch.rs
Normal file
406
codex-rs/core/src/apply_patch.rs
Normal file
@@ -0,0 +1,406 @@
|
||||
use crate::codex::Session;
|
||||
use crate::models::FunctionCallOutputPayload;
|
||||
use crate::models::ResponseInputItem;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::FileChange;
|
||||
use crate::protocol::PatchApplyBeginEvent;
|
||||
use crate::protocol::PatchApplyEndEvent;
|
||||
use crate::protocol::ReviewDecision;
|
||||
use crate::safety::SafetyCheck;
|
||||
use crate::safety::assess_patch_safety;
|
||||
use anyhow::Context;
|
||||
use codex_apply_patch::AffectedPaths;
|
||||
use codex_apply_patch::ApplyPatchAction;
|
||||
use codex_apply_patch::ApplyPatchFileChange;
|
||||
use codex_apply_patch::print_summary;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub(crate) async fn apply_patch(
|
||||
sess: &Session,
|
||||
sub_id: String,
|
||||
call_id: String,
|
||||
action: ApplyPatchAction,
|
||||
) -> ResponseInputItem {
|
||||
let writable_roots_snapshot = {
|
||||
#[allow(clippy::unwrap_used)]
|
||||
let guard = sess.writable_roots.lock().unwrap();
|
||||
guard.clone()
|
||||
};
|
||||
|
||||
let auto_approved = match assess_patch_safety(
|
||||
&action,
|
||||
sess.approval_policy,
|
||||
&writable_roots_snapshot,
|
||||
&sess.cwd,
|
||||
) {
|
||||
SafetyCheck::AutoApprove { .. } => true,
|
||||
SafetyCheck::AskUser => {
|
||||
// Compute a readable summary of path changes to include in the
|
||||
// approval request so the user can make an informed decision.
|
||||
let rx_approve = sess
|
||||
.request_patch_approval(sub_id.clone(), call_id.clone(), &action, None, None)
|
||||
.await;
|
||||
match rx_approve.await.unwrap_or_default() {
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession => false,
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "patch rejected by user".to_string(),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
SafetyCheck::Reject { reason } => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("patch rejected: {reason}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// Verify write permissions before touching the filesystem.
|
||||
let writable_snapshot = {
|
||||
#[allow(clippy::unwrap_used)]
|
||||
sess.writable_roots.lock().unwrap().clone()
|
||||
};
|
||||
|
||||
if let Some(offending) = first_offending_path(&action, &writable_snapshot, &sess.cwd) {
|
||||
let root = offending.parent().unwrap_or(&offending).to_path_buf();
|
||||
|
||||
let reason = Some(format!(
|
||||
"grant write access to {} for this session",
|
||||
root.display()
|
||||
));
|
||||
|
||||
let rx = sess
|
||||
.request_patch_approval(
|
||||
sub_id.clone(),
|
||||
call_id.clone(),
|
||||
&action,
|
||||
reason.clone(),
|
||||
Some(root.clone()),
|
||||
)
|
||||
.await;
|
||||
|
||||
if !matches!(
|
||||
rx.await.unwrap_or_default(),
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession
|
||||
) {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "patch rejected by user".to_string(),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// user approved, extend writable roots for this session
|
||||
#[allow(clippy::unwrap_used)]
|
||||
sess.writable_roots.lock().unwrap().push(root);
|
||||
}
|
||||
|
||||
let _ = sess
|
||||
.tx_event
|
||||
.send(Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
|
||||
call_id: call_id.clone(),
|
||||
auto_approved,
|
||||
changes: convert_apply_patch_to_protocol(&action),
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut stdout = Vec::new();
|
||||
let mut stderr = Vec::new();
|
||||
// Enforce writable roots. If a write is blocked, collect offending root
|
||||
// and prompt the user to extend permissions.
|
||||
let mut result = apply_changes_from_apply_patch_and_report(&action, &mut stdout, &mut stderr);
|
||||
|
||||
if let Err(err) = &result {
|
||||
if err.kind() == std::io::ErrorKind::PermissionDenied {
|
||||
// Determine first offending path.
|
||||
let offending_opt = action
|
||||
.changes()
|
||||
.iter()
|
||||
.flat_map(|(path, change)| match change {
|
||||
ApplyPatchFileChange::Add { .. } => vec![path.as_ref()],
|
||||
ApplyPatchFileChange::Delete => vec![path.as_ref()],
|
||||
ApplyPatchFileChange::Update {
|
||||
move_path: Some(move_path),
|
||||
..
|
||||
} => {
|
||||
vec![path.as_ref(), move_path.as_ref()]
|
||||
}
|
||||
ApplyPatchFileChange::Update {
|
||||
move_path: None, ..
|
||||
} => vec![path.as_ref()],
|
||||
})
|
||||
.find_map(|path: &Path| {
|
||||
// ApplyPatchAction promises to guarantee absolute paths.
|
||||
if !path.is_absolute() {
|
||||
panic!("apply_patch invariant failed: path is not absolute: {path:?}");
|
||||
}
|
||||
|
||||
let writable = {
|
||||
#[allow(clippy::unwrap_used)]
|
||||
let roots = sess.writable_roots.lock().unwrap();
|
||||
roots.iter().any(|root| path.starts_with(root))
|
||||
};
|
||||
if writable {
|
||||
None
|
||||
} else {
|
||||
Some(path.to_path_buf())
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(offending) = offending_opt {
|
||||
let root = offending.parent().unwrap_or(&offending).to_path_buf();
|
||||
|
||||
let reason = Some(format!(
|
||||
"grant write access to {} for this session",
|
||||
root.display()
|
||||
));
|
||||
let rx = sess
|
||||
.request_patch_approval(
|
||||
sub_id.clone(),
|
||||
call_id.clone(),
|
||||
&action,
|
||||
reason.clone(),
|
||||
Some(root.clone()),
|
||||
)
|
||||
.await;
|
||||
if matches!(
|
||||
rx.await.unwrap_or_default(),
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession
|
||||
) {
|
||||
// Extend writable roots.
|
||||
#[allow(clippy::unwrap_used)]
|
||||
sess.writable_roots.lock().unwrap().push(root);
|
||||
stdout.clear();
|
||||
stderr.clear();
|
||||
result = apply_changes_from_apply_patch_and_report(
|
||||
&action,
|
||||
&mut stdout,
|
||||
&mut stderr,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Emit PatchApplyEnd event.
|
||||
let success_flag = result.is_ok();
|
||||
let _ = sess
|
||||
.tx_event
|
||||
.send(Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::PatchApplyEnd(PatchApplyEndEvent {
|
||||
call_id: call_id.clone(),
|
||||
stdout: String::from_utf8_lossy(&stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&stderr).to_string(),
|
||||
success: success_flag,
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(_) => ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: String::from_utf8_lossy(&stdout).to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
Err(e) => ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("error: {e:#}, stderr: {}", String::from_utf8_lossy(&stderr)),
|
||||
success: Some(false),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the first path in `hunks` that is NOT under any of the
|
||||
/// `writable_roots` (after normalising). If all paths are acceptable,
|
||||
/// returns None.
|
||||
fn first_offending_path(
|
||||
action: &ApplyPatchAction,
|
||||
writable_roots: &[PathBuf],
|
||||
cwd: &Path,
|
||||
) -> Option<PathBuf> {
|
||||
let changes = action.changes();
|
||||
for (path, change) in changes {
|
||||
let candidate = match change {
|
||||
ApplyPatchFileChange::Add { .. } => path,
|
||||
ApplyPatchFileChange::Delete => path,
|
||||
ApplyPatchFileChange::Update { move_path, .. } => move_path.as_ref().unwrap_or(path),
|
||||
};
|
||||
|
||||
let abs = if candidate.is_absolute() {
|
||||
candidate.clone()
|
||||
} else {
|
||||
cwd.join(candidate)
|
||||
};
|
||||
|
||||
let mut allowed = false;
|
||||
for root in writable_roots {
|
||||
let root_abs = if root.is_absolute() {
|
||||
root.clone()
|
||||
} else {
|
||||
cwd.join(root)
|
||||
};
|
||||
if abs.starts_with(&root_abs) {
|
||||
allowed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !allowed {
|
||||
return Some(candidate.clone());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn convert_apply_patch_to_protocol(
|
||||
action: &ApplyPatchAction,
|
||||
) -> HashMap<PathBuf, FileChange> {
|
||||
let changes = action.changes();
|
||||
let mut result = HashMap::with_capacity(changes.len());
|
||||
for (path, change) in changes {
|
||||
let protocol_change = match change {
|
||||
ApplyPatchFileChange::Add { content } => FileChange::Add {
|
||||
content: content.clone(),
|
||||
},
|
||||
ApplyPatchFileChange::Delete => FileChange::Delete,
|
||||
ApplyPatchFileChange::Update {
|
||||
unified_diff,
|
||||
move_path,
|
||||
new_content: _new_content,
|
||||
} => FileChange::Update {
|
||||
unified_diff: unified_diff.clone(),
|
||||
move_path: move_path.clone(),
|
||||
},
|
||||
};
|
||||
result.insert(path.clone(), protocol_change);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn apply_changes_from_apply_patch_and_report(
|
||||
action: &ApplyPatchAction,
|
||||
stdout: &mut impl std::io::Write,
|
||||
stderr: &mut impl std::io::Write,
|
||||
) -> std::io::Result<()> {
|
||||
match apply_changes_from_apply_patch(action) {
|
||||
Ok(affected_paths) => {
|
||||
print_summary(&affected_paths, stdout)?;
|
||||
}
|
||||
Err(err) => {
|
||||
writeln!(stderr, "{err:?}")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_changes_from_apply_patch(action: &ApplyPatchAction) -> anyhow::Result<AffectedPaths> {
|
||||
let mut added: Vec<PathBuf> = Vec::new();
|
||||
let mut modified: Vec<PathBuf> = Vec::new();
|
||||
let mut deleted: Vec<PathBuf> = Vec::new();
|
||||
|
||||
let changes = action.changes();
|
||||
for (path, change) in changes {
|
||||
match change {
|
||||
ApplyPatchFileChange::Add { content } => {
|
||||
if let Some(parent) = path.parent() {
|
||||
if !parent.as_os_str().is_empty() {
|
||||
std::fs::create_dir_all(parent).with_context(|| {
|
||||
format!("Failed to create parent directories for {}", path.display())
|
||||
})?;
|
||||
}
|
||||
}
|
||||
std::fs::write(path, content)
|
||||
.with_context(|| format!("Failed to write file {}", path.display()))?;
|
||||
added.push(path.clone());
|
||||
}
|
||||
ApplyPatchFileChange::Delete => {
|
||||
std::fs::remove_file(path)
|
||||
.with_context(|| format!("Failed to delete file {}", path.display()))?;
|
||||
deleted.push(path.clone());
|
||||
}
|
||||
ApplyPatchFileChange::Update {
|
||||
unified_diff: _unified_diff,
|
||||
move_path,
|
||||
new_content,
|
||||
} => {
|
||||
if let Some(move_path) = move_path {
|
||||
if let Some(parent) = move_path.parent() {
|
||||
if !parent.as_os_str().is_empty() {
|
||||
std::fs::create_dir_all(parent).with_context(|| {
|
||||
format!(
|
||||
"Failed to create parent directories for {}",
|
||||
move_path.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
std::fs::rename(path, move_path)
|
||||
.with_context(|| format!("Failed to rename file {}", path.display()))?;
|
||||
std::fs::write(move_path, new_content)?;
|
||||
modified.push(move_path.clone());
|
||||
deleted.push(path.clone());
|
||||
} else {
|
||||
std::fs::write(path, new_content)?;
|
||||
modified.push(path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(AffectedPaths {
|
||||
added,
|
||||
modified,
|
||||
deleted,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn get_writable_roots(cwd: &Path) -> Vec<PathBuf> {
|
||||
let mut writable_roots = Vec::new();
|
||||
if cfg!(target_os = "macos") {
|
||||
// On macOS, $TMPDIR is private to the user.
|
||||
writable_roots.push(std::env::temp_dir());
|
||||
|
||||
// Allow pyenv to update its shims directory. Without this, any tool
|
||||
// that happens to be managed by `pyenv` will fail with an error like:
|
||||
//
|
||||
// pyenv: cannot rehash: $HOME/.pyenv/shims isn't writable
|
||||
//
|
||||
// which is emitted every time `pyenv` tries to run `rehash` (for
|
||||
// example, after installing a new Python package that drops an entry
|
||||
// point). Although the sandbox is intentionally read‑only by default,
|
||||
// writing to the user's local `pyenv` directory is safe because it
|
||||
// is already user‑writable and scoped to the current user account.
|
||||
if let Ok(home_dir) = std::env::var("HOME") {
|
||||
let pyenv_dir = PathBuf::from(home_dir).join(".pyenv");
|
||||
writable_roots.push(pyenv_dir);
|
||||
}
|
||||
}
|
||||
|
||||
writable_roots.push(cwd.to_path_buf());
|
||||
|
||||
writable_roots
|
||||
}
|
||||
219
codex-rs/core/src/bash.rs
Normal file
219
codex-rs/core/src/bash.rs
Normal file
@@ -0,0 +1,219 @@
|
||||
use tree_sitter::Parser;
|
||||
use tree_sitter::Tree;
|
||||
use tree_sitter_bash::LANGUAGE as BASH;
|
||||
|
||||
/// Parse the provided bash source using tree-sitter-bash, returning a Tree on
|
||||
/// success or None if parsing failed.
|
||||
pub fn try_parse_bash(bash_lc_arg: &str) -> Option<Tree> {
|
||||
let lang = BASH.into();
|
||||
let mut parser = Parser::new();
|
||||
#[expect(clippy::expect_used)]
|
||||
parser.set_language(&lang).expect("load bash grammar");
|
||||
let old_tree: Option<&Tree> = None;
|
||||
parser.parse(bash_lc_arg, old_tree)
|
||||
}
|
||||
|
||||
/// Parse a script which may contain multiple simple commands joined only by
|
||||
/// the safe logical/pipe/sequencing operators: `&&`, `||`, `;`, `|`.
|
||||
///
|
||||
/// Returns `Some(Vec<command_words>)` if every command is a plain word‑only
|
||||
/// command and the parse tree does not contain disallowed constructs
|
||||
/// (parentheses, redirections, substitutions, control flow, etc.). Otherwise
|
||||
/// returns `None`.
|
||||
pub fn try_parse_word_only_commands_sequence(tree: &Tree, src: &str) -> Option<Vec<Vec<String>>> {
|
||||
if tree.root_node().has_error() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// List of allowed (named) node kinds for a "word only commands sequence".
|
||||
// If we encounter a named node that is not in this list we reject.
|
||||
const ALLOWED_KINDS: &[&str] = &[
|
||||
// top level containers
|
||||
"program",
|
||||
"list",
|
||||
"pipeline",
|
||||
// commands & words
|
||||
"command",
|
||||
"command_name",
|
||||
"word",
|
||||
"string",
|
||||
"string_content",
|
||||
"raw_string",
|
||||
"number",
|
||||
];
|
||||
// Allow only safe punctuation / operator tokens; anything else causes reject.
|
||||
const ALLOWED_PUNCT_TOKENS: &[&str] = &["&&", "||", ";", "|", "\"", "'"];
|
||||
|
||||
let root = tree.root_node();
|
||||
let mut cursor = root.walk();
|
||||
let mut stack = vec![root];
|
||||
let mut command_nodes = Vec::new();
|
||||
while let Some(node) = stack.pop() {
|
||||
let kind = node.kind();
|
||||
if node.is_named() {
|
||||
if !ALLOWED_KINDS.contains(&kind) {
|
||||
return None;
|
||||
}
|
||||
if kind == "command" {
|
||||
command_nodes.push(node);
|
||||
}
|
||||
} else {
|
||||
// Reject any punctuation / operator tokens that are not explicitly allowed.
|
||||
if kind.chars().any(|c| "&;|".contains(c)) && !ALLOWED_PUNCT_TOKENS.contains(&kind) {
|
||||
return None;
|
||||
}
|
||||
if !(ALLOWED_PUNCT_TOKENS.contains(&kind) || kind.trim().is_empty()) {
|
||||
// If it's a quote token or operator it's allowed above; we also allow whitespace tokens.
|
||||
// Any other punctuation like parentheses, braces, redirects, backticks, etc are rejected.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
for child in node.children(&mut cursor) {
|
||||
stack.push(child);
|
||||
}
|
||||
}
|
||||
|
||||
let mut commands = Vec::new();
|
||||
for node in command_nodes {
|
||||
if let Some(words) = parse_plain_command_from_node(node, src) {
|
||||
commands.push(words);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some(commands)
|
||||
}
|
||||
|
||||
fn parse_plain_command_from_node(cmd: tree_sitter::Node, src: &str) -> Option<Vec<String>> {
|
||||
if cmd.kind() != "command" {
|
||||
return None;
|
||||
}
|
||||
let mut words = Vec::new();
|
||||
let mut cursor = cmd.walk();
|
||||
for child in cmd.named_children(&mut cursor) {
|
||||
match child.kind() {
|
||||
"command_name" => {
|
||||
let word_node = child.named_child(0)?;
|
||||
if word_node.kind() != "word" {
|
||||
return None;
|
||||
}
|
||||
words.push(word_node.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
}
|
||||
"word" | "number" => {
|
||||
words.push(child.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
}
|
||||
"string" => {
|
||||
if child.child_count() == 3
|
||||
&& child.child(0)?.kind() == "\""
|
||||
&& child.child(1)?.kind() == "string_content"
|
||||
&& child.child(2)?.kind() == "\""
|
||||
{
|
||||
words.push(child.child(1)?.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
"raw_string" => {
|
||||
let raw_string = child.utf8_text(src.as_bytes()).ok()?;
|
||||
let stripped = raw_string
|
||||
.strip_prefix('\'')
|
||||
.and_then(|s| s.strip_suffix('\''));
|
||||
if let Some(s) = stripped {
|
||||
words.push(s.to_owned());
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
Some(words)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
use super::*;
|
||||
|
||||
fn parse_seq(src: &str) -> Option<Vec<Vec<String>>> {
|
||||
let tree = try_parse_bash(src)?;
|
||||
try_parse_word_only_commands_sequence(&tree, src)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepts_single_simple_command() {
|
||||
let cmds = parse_seq("ls -1").unwrap();
|
||||
assert_eq!(cmds, vec![vec!["ls".to_string(), "-1".to_string()]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepts_multiple_commands_with_allowed_operators() {
|
||||
let src = "ls && pwd; echo 'hi there' | wc -l";
|
||||
let cmds = parse_seq(src).unwrap();
|
||||
let expected: Vec<Vec<String>> = vec![
|
||||
vec!["wc".to_string(), "-l".to_string()],
|
||||
vec!["echo".to_string(), "hi there".to_string()],
|
||||
vec!["pwd".to_string()],
|
||||
vec!["ls".to_string()],
|
||||
];
|
||||
assert_eq!(cmds, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extracts_double_and_single_quoted_strings() {
|
||||
let cmds = parse_seq("echo \"hello world\"").unwrap();
|
||||
assert_eq!(
|
||||
cmds,
|
||||
vec![vec!["echo".to_string(), "hello world".to_string()]]
|
||||
);
|
||||
|
||||
let cmds2 = parse_seq("echo 'hi there'").unwrap();
|
||||
assert_eq!(
|
||||
cmds2,
|
||||
vec![vec!["echo".to_string(), "hi there".to_string()]]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepts_numbers_as_words() {
|
||||
let cmds = parse_seq("echo 123 456").unwrap();
|
||||
assert_eq!(
|
||||
cmds,
|
||||
vec![vec![
|
||||
"echo".to_string(),
|
||||
"123".to_string(),
|
||||
"456".to_string()
|
||||
]]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_parentheses_and_subshells() {
|
||||
assert!(parse_seq("(ls)").is_none());
|
||||
assert!(parse_seq("ls || (pwd && echo hi)").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_redirections_and_unsupported_operators() {
|
||||
assert!(parse_seq("ls > out.txt").is_none());
|
||||
assert!(parse_seq("echo hi & echo bye").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_command_and_process_substitutions_and_expansions() {
|
||||
assert!(parse_seq("echo $(pwd)").is_none());
|
||||
assert!(parse_seq("echo `pwd`").is_none());
|
||||
assert!(parse_seq("echo $HOME").is_none());
|
||||
assert!(parse_seq("echo \"hi $USER\"").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_variable_assignment_prefix() {
|
||||
assert!(parse_seq("FOO=bar ls").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rejects_trailing_operator_parse_error() {
|
||||
assert!(parse_seq("ls &&").is_none());
|
||||
}
|
||||
}
|
||||
@@ -41,7 +41,7 @@ pub(crate) async fn stream_chat_completions(
|
||||
|
||||
for item in &prompt.input {
|
||||
match item {
|
||||
ResponseItem::Message { role, content } => {
|
||||
ResponseItem::Message { role, content, .. } => {
|
||||
let mut text = String::new();
|
||||
for c in content {
|
||||
match c {
|
||||
@@ -58,6 +58,7 @@ pub(crate) async fn stream_chat_completions(
|
||||
name,
|
||||
arguments,
|
||||
call_id,
|
||||
..
|
||||
} => {
|
||||
messages.push(json!({
|
||||
"role": "assistant",
|
||||
@@ -259,6 +260,7 @@ async fn process_chat_sse<S>(
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: content.to_string(),
|
||||
}],
|
||||
id: None,
|
||||
};
|
||||
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
@@ -300,6 +302,7 @@ async fn process_chat_sse<S>(
|
||||
"tool_calls" if fn_call_state.active => {
|
||||
// Build the FunctionCall response item.
|
||||
let item = ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: fn_call_state.name.clone().unwrap_or_else(|| "".to_string()),
|
||||
arguments: fn_call_state.arguments.clone(),
|
||||
call_id: fn_call_state.call_id.clone().unwrap_or_else(String::new),
|
||||
@@ -402,6 +405,7 @@ where
|
||||
}))) => {
|
||||
if !this.cumulative.is_empty() {
|
||||
let aggregated_item = crate::models::ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![crate::models::ContentItem::OutputText {
|
||||
text: std::mem::take(&mut this.cumulative),
|
||||
|
||||
@@ -117,6 +117,15 @@ impl ModelClient {
|
||||
let full_instructions = prompt.get_full_instructions(&self.config.model);
|
||||
let tools_json = create_tools_json_for_responses_api(prompt, &self.config.model)?;
|
||||
let reasoning = create_reasoning_param_for_request(&self.config, self.effort, self.summary);
|
||||
|
||||
// Request encrypted COT if we are not storing responses,
|
||||
// otherwise reasoning items will be referenced by ID
|
||||
let include = if !prompt.store && reasoning.is_some() {
|
||||
vec!["reasoning.encrypted_content".to_string()]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let payload = ResponsesApiRequest {
|
||||
model: &self.config.model,
|
||||
instructions: &full_instructions,
|
||||
@@ -125,10 +134,10 @@ impl ModelClient {
|
||||
tool_choice: "auto",
|
||||
parallel_tool_calls: false,
|
||||
reasoning,
|
||||
previous_response_id: prompt.prev_id.clone(),
|
||||
store: prompt.store,
|
||||
// TODO: make this configurable
|
||||
stream: true,
|
||||
include,
|
||||
};
|
||||
|
||||
trace!(
|
||||
|
||||
@@ -22,8 +22,6 @@ const BASE_INSTRUCTIONS: &str = include_str!("../prompt.md");
|
||||
pub struct Prompt {
|
||||
/// Conversation context input items.
|
||||
pub input: Vec<ResponseItem>,
|
||||
/// Optional previous response ID (when storage is enabled).
|
||||
pub prev_id: Option<String>,
|
||||
/// Optional instructions from the user to amend to the built-in agent
|
||||
/// instructions.
|
||||
pub user_instructions: Option<String>,
|
||||
@@ -133,11 +131,10 @@ pub(crate) struct ResponsesApiRequest<'a> {
|
||||
pub(crate) tool_choice: &'static str,
|
||||
pub(crate) parallel_tool_calls: bool,
|
||||
pub(crate) reasoning: Option<Reasoning>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) previous_response_id: Option<String>,
|
||||
/// true when using the Responses API.
|
||||
pub(crate) store: bool,
|
||||
pub(crate) stream: bool,
|
||||
pub(crate) include: Vec<String>,
|
||||
}
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
@@ -4,22 +4,17 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use async_channel::Receiver;
|
||||
use async_channel::Sender;
|
||||
use codex_apply_patch::AffectedPaths;
|
||||
use codex_apply_patch::ApplyPatchAction;
|
||||
use codex_apply_patch::ApplyPatchFileChange;
|
||||
use codex_apply_patch::MaybeApplyPatchVerified;
|
||||
use codex_apply_patch::maybe_parse_apply_patch_verified;
|
||||
use codex_apply_patch::print_summary;
|
||||
use futures::prelude::*;
|
||||
use mcp_types::CallToolResult;
|
||||
use serde::Serialize;
|
||||
@@ -34,7 +29,9 @@ use tracing::trace;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::WireApi;
|
||||
use crate::apply_patch::convert_apply_patch_to_protocol;
|
||||
use crate::apply_patch::get_writable_roots;
|
||||
use crate::apply_patch::{self};
|
||||
use crate::client::ModelClient;
|
||||
use crate::client_common::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
@@ -72,11 +69,8 @@ use crate::protocol::EventMsg;
|
||||
use crate::protocol::ExecApprovalRequestEvent;
|
||||
use crate::protocol::ExecCommandBeginEvent;
|
||||
use crate::protocol::ExecCommandEndEvent;
|
||||
use crate::protocol::FileChange;
|
||||
use crate::protocol::InputItem;
|
||||
use crate::protocol::Op;
|
||||
use crate::protocol::PatchApplyBeginEvent;
|
||||
use crate::protocol::PatchApplyEndEvent;
|
||||
use crate::protocol::ReviewDecision;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::SessionConfiguredEvent;
|
||||
@@ -85,7 +79,7 @@ use crate::protocol::TaskCompleteEvent;
|
||||
use crate::rollout::RolloutRecorder;
|
||||
use crate::safety::SafetyCheck;
|
||||
use crate::safety::assess_command_safety;
|
||||
use crate::safety::assess_patch_safety;
|
||||
use crate::shell;
|
||||
use crate::user_notification::UserNotification;
|
||||
use crate::util::backoff;
|
||||
|
||||
@@ -97,11 +91,18 @@ pub struct Codex {
|
||||
rx_event: Receiver<Event>,
|
||||
}
|
||||
|
||||
/// Wrapper returned by [`Codex::spawn`] containing the spawned [`Codex`],
|
||||
/// the submission id for the initial `ConfigureSession` request and the
|
||||
/// unique session id.
|
||||
pub struct CodexSpawnOk {
|
||||
pub codex: Codex,
|
||||
pub init_id: String,
|
||||
pub session_id: Uuid,
|
||||
}
|
||||
|
||||
impl Codex {
|
||||
/// Spawn a new [`Codex`] and initialize the session. Returns the instance
|
||||
/// of `Codex` and the ID of the `SessionInitialized` event that was
|
||||
/// submitted to start the session.
|
||||
pub async fn spawn(config: Config, ctrl_c: Arc<Notify>) -> CodexResult<(Codex, String, Uuid)> {
|
||||
/// Spawn a new [`Codex`] and initialize the session.
|
||||
pub async fn spawn(config: Config, ctrl_c: Arc<Notify>) -> CodexResult<CodexSpawnOk> {
|
||||
// experimental resume path (undocumented)
|
||||
let resume_path = config.experimental_resume.clone();
|
||||
info!("resume_path: {resume_path:?}");
|
||||
@@ -139,7 +140,11 @@ impl Codex {
|
||||
};
|
||||
let init_id = codex.submit(configure_session).await?;
|
||||
|
||||
Ok((codex, init_id, session_id))
|
||||
Ok(CodexSpawnOk {
|
||||
codex,
|
||||
init_id,
|
||||
session_id,
|
||||
})
|
||||
}
|
||||
|
||||
/// Submit the `op` wrapped in a `Submission` with a unique ID.
|
||||
@@ -178,19 +183,20 @@ impl Codex {
|
||||
/// A session has at most 1 running task at a time, and can be interrupted by user input.
|
||||
pub(crate) struct Session {
|
||||
client: ModelClient,
|
||||
tx_event: Sender<Event>,
|
||||
pub(crate) tx_event: Sender<Event>,
|
||||
ctrl_c: Arc<Notify>,
|
||||
|
||||
/// The session's current working directory. All relative paths provided by
|
||||
/// the model as well as sandbox policies are resolved against this path
|
||||
/// instead of `std::env::current_dir()`.
|
||||
cwd: PathBuf,
|
||||
pub(crate) cwd: PathBuf,
|
||||
base_instructions: Option<String>,
|
||||
user_instructions: Option<String>,
|
||||
approval_policy: AskForApproval,
|
||||
pub(crate) approval_policy: AskForApproval,
|
||||
sandbox_policy: SandboxPolicy,
|
||||
shell_environment_policy: ShellEnvironmentPolicy,
|
||||
writable_roots: Mutex<Vec<PathBuf>>,
|
||||
pub(crate) writable_roots: Mutex<Vec<PathBuf>>,
|
||||
disable_response_storage: bool,
|
||||
|
||||
/// Manager for external MCP servers/tools.
|
||||
mcp_connection_manager: McpConnectionManager,
|
||||
@@ -204,6 +210,7 @@ pub(crate) struct Session {
|
||||
rollout: Mutex<Option<RolloutRecorder>>,
|
||||
state: Mutex<State>,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
user_shell: shell::Shell,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
@@ -212,6 +219,23 @@ impl Session {
|
||||
.map(PathBuf::from)
|
||||
.map_or_else(|| self.cwd.clone(), |p| self.cwd.join(p))
|
||||
}
|
||||
/// Erases all previous messages from the conversation history (zdr_transcript), if present.
|
||||
pub fn erase_conversation_history(&self) {
|
||||
let mut state = self.state.lock().unwrap();
|
||||
if let Some(transcript) = state.zdr_transcript.as_mut() {
|
||||
transcript.clear();
|
||||
}
|
||||
|
||||
// When using the experimental OpenAI Responses API with server-side
|
||||
// storage enabled, `previous_response_id` is used to let the model
|
||||
// access the earlier part of the conversation **without** having to
|
||||
// resend the full transcript. To truly wipe all historical context
|
||||
// we must drop this identifier as well, otherwise the backend will
|
||||
// still be able to retrieve the prior messages via the ID even
|
||||
// though our local transcript has been cleared. See
|
||||
// https://platform.openai.com/docs/guides/responses for details.
|
||||
state.previous_response_id = None;
|
||||
}
|
||||
}
|
||||
|
||||
/// Mutable state of the agent
|
||||
@@ -219,13 +243,9 @@ impl Session {
|
||||
struct State {
|
||||
approved_commands: HashSet<Vec<String>>,
|
||||
current_task: Option<AgentTask>,
|
||||
/// Call IDs that have been sent from the Responses API but have not been sent back yet.
|
||||
/// You CANNOT send a Responses API follow-up message unless you have sent back the output for all pending calls or else it will 400.
|
||||
pending_call_ids: HashSet<String>,
|
||||
previous_response_id: Option<String>,
|
||||
pending_approvals: HashMap<String, oneshot::Sender<ReviewDecision>>,
|
||||
pending_input: Vec<ResponseInputItem>,
|
||||
zdr_transcript: Option<ConversationHistory>,
|
||||
history: ConversationHistory,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
@@ -257,6 +277,7 @@ impl Session {
|
||||
pub async fn request_command_approval(
|
||||
&self,
|
||||
sub_id: String,
|
||||
call_id: String,
|
||||
command: Vec<String>,
|
||||
cwd: PathBuf,
|
||||
reason: Option<String>,
|
||||
@@ -265,6 +286,7 @@ impl Session {
|
||||
let event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
call_id,
|
||||
command,
|
||||
cwd,
|
||||
reason,
|
||||
@@ -281,6 +303,7 @@ impl Session {
|
||||
pub async fn request_patch_approval(
|
||||
&self,
|
||||
sub_id: String,
|
||||
call_id: String,
|
||||
action: &ApplyPatchAction,
|
||||
reason: Option<String>,
|
||||
grant_root: Option<PathBuf>,
|
||||
@@ -289,6 +312,7 @@ impl Session {
|
||||
let event = Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id,
|
||||
changes: convert_apply_patch_to_protocol(action),
|
||||
reason,
|
||||
grant_root,
|
||||
@@ -320,18 +344,11 @@ impl Session {
|
||||
debug!("Recording items for conversation: {items:?}");
|
||||
self.record_state_snapshot(items).await;
|
||||
|
||||
if let Some(transcript) = self.state.lock().unwrap().zdr_transcript.as_mut() {
|
||||
transcript.record_items(items);
|
||||
}
|
||||
self.state.lock().unwrap().history.record_items(items);
|
||||
}
|
||||
|
||||
async fn record_state_snapshot(&self, items: &[ResponseItem]) {
|
||||
let snapshot = {
|
||||
let state = self.state.lock().unwrap();
|
||||
crate::rollout::SessionStateSnapshot {
|
||||
previous_response_id: state.previous_response_id.clone(),
|
||||
}
|
||||
};
|
||||
let snapshot = { crate::rollout::SessionStateSnapshot {} };
|
||||
|
||||
let recorder = {
|
||||
let guard = self.rollout.lock().unwrap();
|
||||
@@ -433,8 +450,6 @@ impl Session {
|
||||
pub fn abort(&self) {
|
||||
info!("Aborting existing session");
|
||||
let mut state = self.state.lock().unwrap();
|
||||
// Don't clear pending_call_ids because we need to keep track of them to ensure we don't 400 on the next turn.
|
||||
// We will generate a synthetic aborted response for each pending call id.
|
||||
state.pending_approvals.clear();
|
||||
state.pending_input.clear();
|
||||
if let Some(task) = state.current_task.take() {
|
||||
@@ -479,15 +494,10 @@ impl Drop for Session {
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub fn partial_clone(&self, retain_zdr_transcript: bool) -> Self {
|
||||
pub fn partial_clone(&self) -> Self {
|
||||
Self {
|
||||
approved_commands: self.approved_commands.clone(),
|
||||
previous_response_id: self.previous_response_id.clone(),
|
||||
zdr_transcript: if retain_zdr_transcript {
|
||||
self.zdr_transcript.clone()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
history: self.history.clone(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
@@ -565,6 +575,11 @@ async fn submission_loop(
|
||||
|
||||
debug!(?sub, "Submission");
|
||||
match sub.op {
|
||||
Op::EraseConversationHistory => {
|
||||
if let Some(sess) = sess.as_ref() {
|
||||
sess.erase_conversation_history();
|
||||
}
|
||||
}
|
||||
Op::Interrupt => {
|
||||
let sess = match sess.as_ref() {
|
||||
Some(sess) => sess,
|
||||
@@ -606,13 +621,11 @@ async fn submission_loop(
|
||||
}
|
||||
// Optionally resume an existing rollout.
|
||||
let mut restored_items: Option<Vec<ResponseItem>> = None;
|
||||
let mut restored_prev_id: Option<String> = None;
|
||||
let rollout_recorder: Option<RolloutRecorder> =
|
||||
if let Some(path) = resume_path.as_ref() {
|
||||
match RolloutRecorder::resume(path).await {
|
||||
match RolloutRecorder::resume(path, cwd.clone()).await {
|
||||
Ok((rec, saved)) => {
|
||||
session_id = saved.session_id;
|
||||
restored_prev_id = saved.state.previous_response_id;
|
||||
if !saved.items.is_empty() {
|
||||
restored_items = Some(saved.items);
|
||||
}
|
||||
@@ -651,22 +664,13 @@ async fn submission_loop(
|
||||
);
|
||||
|
||||
// abort any current running session and clone its state
|
||||
let retain_zdr_transcript =
|
||||
record_conversation_history(disable_response_storage, provider.wire_api);
|
||||
let state = match sess.take() {
|
||||
Some(sess) => {
|
||||
sess.abort();
|
||||
sess.state
|
||||
.lock()
|
||||
.unwrap()
|
||||
.partial_clone(retain_zdr_transcript)
|
||||
sess.state.lock().unwrap().partial_clone()
|
||||
}
|
||||
None => State {
|
||||
zdr_transcript: if retain_zdr_transcript {
|
||||
Some(ConversationHistory::new())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
history: ConversationHistory::new(),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
@@ -701,6 +705,7 @@ async fn submission_loop(
|
||||
});
|
||||
}
|
||||
}
|
||||
let default_shell = shell::default_user_shell().await;
|
||||
sess = Some(Arc::new(Session {
|
||||
client,
|
||||
tx_event: tx_event.clone(),
|
||||
@@ -717,18 +722,15 @@ async fn submission_loop(
|
||||
state: Mutex::new(state),
|
||||
rollout: Mutex::new(rollout_recorder),
|
||||
codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(),
|
||||
disable_response_storage,
|
||||
user_shell: default_shell,
|
||||
}));
|
||||
|
||||
// Patch restored state into the newly created session.
|
||||
if let Some(sess_arc) = &sess {
|
||||
if restored_prev_id.is_some() || restored_items.is_some() {
|
||||
if restored_items.is_some() {
|
||||
let mut st = sess_arc.state.lock().unwrap();
|
||||
st.previous_response_id = restored_prev_id;
|
||||
if let (Some(hist), Some(items)) =
|
||||
(st.zdr_transcript.as_mut(), restored_items.as_ref())
|
||||
{
|
||||
hist.record_items(items.iter());
|
||||
}
|
||||
st.history.record_items(restored_items.unwrap().iter());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -841,6 +843,37 @@ async fn submission_loop(
|
||||
}
|
||||
});
|
||||
}
|
||||
Op::Shutdown => {
|
||||
info!("Shutting down Codex instance");
|
||||
|
||||
// Gracefully flush and shutdown rollout recorder on session end so tests
|
||||
// that inspect the rollout file do not race with the background writer.
|
||||
if let Some(sess_arc) = sess {
|
||||
let recorder_opt = sess_arc.rollout.lock().unwrap().take();
|
||||
if let Some(rec) = recorder_opt {
|
||||
if let Err(e) = rec.shutdown().await {
|
||||
warn!("failed to shutdown rollout recorder: {e}");
|
||||
let event = Event {
|
||||
id: sub.id.clone(),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: "Failed to shutdown rollout recorder".to_string(),
|
||||
}),
|
||||
};
|
||||
if let Err(e) = tx_event.send(event).await {
|
||||
warn!("failed to send error message: {e:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let event = Event {
|
||||
id: sub.id.clone(),
|
||||
msg: EventMsg::ShutdownComplete,
|
||||
};
|
||||
if let Err(e) = tx_event.send(event).await {
|
||||
warn!("failed to send Shutdown event: {e}");
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
debug!("Agent loop exited");
|
||||
@@ -875,14 +908,8 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
sess.record_conversation_items(&[initial_input_for_turn.clone().into()])
|
||||
.await;
|
||||
|
||||
let mut input_for_next_turn: Vec<ResponseInputItem> = vec![initial_input_for_turn];
|
||||
let last_agent_message: Option<String>;
|
||||
loop {
|
||||
let mut net_new_turn_input = input_for_next_turn
|
||||
.drain(..)
|
||||
.map(ResponseItem::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Note that pending_input would be something like a message the user
|
||||
// submitted through the UI while the model was running. Though the UI
|
||||
// may support this, the model might not.
|
||||
@@ -899,29 +926,7 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
// only record the new items that originated in this turn so that it
|
||||
// represents an append-only log without duplicates.
|
||||
let turn_input: Vec<ResponseItem> =
|
||||
if let Some(transcript) = sess.state.lock().unwrap().zdr_transcript.as_mut() {
|
||||
// If we are using Chat/ZDR, we need to send the transcript with
|
||||
// every turn. By induction, `transcript` already contains:
|
||||
// - The `input` that kicked off this task.
|
||||
// - Each `ResponseItem` that was recorded in the previous turn.
|
||||
// - Each response to a `ResponseItem` (in practice, the only
|
||||
// response type we seem to have is `FunctionCallOutput`).
|
||||
//
|
||||
// The only thing the `transcript` does not contain is the
|
||||
// `pending_input` that was injected while the model was
|
||||
// running. We need to add that to the conversation history
|
||||
// so that the model can see it in the next turn.
|
||||
[transcript.contents(), pending_input].concat()
|
||||
} else {
|
||||
// In practice, net_new_turn_input should contain only:
|
||||
// - User messages
|
||||
// - Outputs for function calls requested by the model
|
||||
net_new_turn_input.extend(pending_input);
|
||||
|
||||
// Responses API path – we can just send the new items and
|
||||
// record the same.
|
||||
net_new_turn_input
|
||||
};
|
||||
[sess.state.lock().unwrap().history.contents(), pending_input].concat();
|
||||
|
||||
let turn_input_messages: Vec<String> = turn_input
|
||||
.iter()
|
||||
@@ -997,8 +1002,19 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
},
|
||||
);
|
||||
}
|
||||
(ResponseItem::Reasoning { .. }, None) => {
|
||||
// Omit from conversation history.
|
||||
(
|
||||
ResponseItem::Reasoning {
|
||||
id,
|
||||
summary,
|
||||
encrypted_content,
|
||||
},
|
||||
None,
|
||||
) => {
|
||||
items_to_record_in_conversation_history.push(ResponseItem::Reasoning {
|
||||
id: id.clone(),
|
||||
summary: summary.clone(),
|
||||
encrypted_content: encrypted_content.clone(),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
warn!("Unexpected response item: {item:?} with response: {response:?}");
|
||||
@@ -1027,8 +1043,6 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
input_for_next_turn = responses;
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Turn error: {e:#}");
|
||||
@@ -1056,26 +1070,11 @@ async fn run_turn(
|
||||
sub_id: String,
|
||||
input: Vec<ResponseItem>,
|
||||
) -> CodexResult<Vec<ProcessedResponseItem>> {
|
||||
// Decide whether to use server-side storage (previous_response_id) or disable it
|
||||
let (prev_id, store) = {
|
||||
let state = sess.state.lock().unwrap();
|
||||
let store = state.zdr_transcript.is_none();
|
||||
let prev_id = if store {
|
||||
state.previous_response_id.clone()
|
||||
} else {
|
||||
// When using ZDR, the Responses API may send previous_response_id
|
||||
// back, but trying to use it results in a 400.
|
||||
None
|
||||
};
|
||||
(prev_id, store)
|
||||
};
|
||||
|
||||
let extra_tools = sess.mcp_connection_manager.list_all_tools();
|
||||
let prompt = Prompt {
|
||||
input,
|
||||
prev_id,
|
||||
user_instructions: sess.user_instructions.clone(),
|
||||
store,
|
||||
store: !sess.disable_response_storage,
|
||||
extra_tools,
|
||||
base_instructions_override: sess.base_instructions.clone(),
|
||||
};
|
||||
@@ -1149,11 +1148,17 @@ async fn try_run_turn(
|
||||
// This usually happens because the user interrupted the model before we responded to one of its tool calls
|
||||
// and then the user sent a follow-up message.
|
||||
let missing_calls = {
|
||||
sess.state
|
||||
.lock()
|
||||
.unwrap()
|
||||
.pending_call_ids
|
||||
prompt
|
||||
.input
|
||||
.iter()
|
||||
.filter_map(|ri| match ri {
|
||||
ResponseItem::FunctionCall { call_id, .. } => Some(call_id),
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(call_id),
|
||||
..
|
||||
} => Some(call_id),
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|call_id| {
|
||||
if completed_call_ids.contains(&call_id) {
|
||||
None
|
||||
@@ -1207,31 +1212,14 @@ async fn try_run_turn(
|
||||
};
|
||||
|
||||
match event {
|
||||
ResponseEvent::Created => {
|
||||
let mut state = sess.state.lock().unwrap();
|
||||
// We successfully created a new response and ensured that all pending calls were included so we can clear the pending call ids.
|
||||
state.pending_call_ids.clear();
|
||||
}
|
||||
ResponseEvent::Created => {}
|
||||
ResponseEvent::OutputItemDone(item) => {
|
||||
let call_id = match &item {
|
||||
ResponseItem::LocalShellCall {
|
||||
call_id: Some(call_id),
|
||||
..
|
||||
} => Some(call_id),
|
||||
ResponseItem::FunctionCall { call_id, .. } => Some(call_id),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(call_id) = call_id {
|
||||
// We just got a new call id so we need to make sure to respond to it in the next turn.
|
||||
let mut state = sess.state.lock().unwrap();
|
||||
state.pending_call_ids.insert(call_id.clone());
|
||||
}
|
||||
let response = handle_response_item(sess, sub_id, item.clone()).await?;
|
||||
|
||||
output.push(ProcessedResponseItem { item, response });
|
||||
}
|
||||
ResponseEvent::Completed {
|
||||
response_id,
|
||||
response_id: _,
|
||||
token_usage,
|
||||
} => {
|
||||
if let Some(token_usage) = token_usage {
|
||||
@@ -1244,8 +1232,6 @@ async fn try_run_turn(
|
||||
.ok();
|
||||
}
|
||||
|
||||
let mut state = sess.state.lock().unwrap();
|
||||
state.previous_response_id = Some(response_id);
|
||||
return Ok(output);
|
||||
}
|
||||
ResponseEvent::OutputTextDelta(delta) => {
|
||||
@@ -1285,7 +1271,7 @@ async fn handle_response_item(
|
||||
}
|
||||
None
|
||||
}
|
||||
ResponseItem::Reasoning { id: _, summary } => {
|
||||
ResponseItem::Reasoning { summary, .. } => {
|
||||
for item in summary {
|
||||
let text = match item {
|
||||
ReasoningItemReasoningSummary::SummaryText { text } => text,
|
||||
@@ -1302,6 +1288,7 @@ async fn handle_response_item(
|
||||
name,
|
||||
arguments,
|
||||
call_id,
|
||||
..
|
||||
} => {
|
||||
info!("FunctionCall: {arguments}");
|
||||
Some(handle_function_call(sess, sub_id.to_string(), name, arguments, call_id).await)
|
||||
@@ -1427,6 +1414,18 @@ fn parse_container_exec_arguments(
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_run_with_user_profile(params: ExecParams, sess: &Session) -> ExecParams {
|
||||
if sess.shell_environment_policy.use_profile {
|
||||
let command = sess
|
||||
.user_shell
|
||||
.format_default_shell_invocation(params.command.clone());
|
||||
if let Some(command) = command {
|
||||
return ExecParams { command, ..params };
|
||||
}
|
||||
}
|
||||
params
|
||||
}
|
||||
|
||||
async fn handle_container_exec_with_params(
|
||||
params: ExecParams,
|
||||
sess: &Session,
|
||||
@@ -1436,7 +1435,7 @@ async fn handle_container_exec_with_params(
|
||||
// check if this was a patch, and apply it if so
|
||||
match maybe_parse_apply_patch_verified(¶ms.command, ¶ms.cwd) {
|
||||
MaybeApplyPatchVerified::Body(changes) => {
|
||||
return apply_patch(sess, sub_id, call_id, changes).await;
|
||||
return apply_patch::apply_patch(sess, sub_id, call_id, changes).await;
|
||||
}
|
||||
MaybeApplyPatchVerified::CorrectnessError(parse_error) => {
|
||||
// It looks like an invocation of `apply_patch`, but we
|
||||
@@ -1472,6 +1471,7 @@ async fn handle_container_exec_with_params(
|
||||
let rx_approve = sess
|
||||
.request_command_approval(
|
||||
sub_id.clone(),
|
||||
call_id.clone(),
|
||||
params.command.clone(),
|
||||
params.cwd.clone(),
|
||||
None,
|
||||
@@ -1512,6 +1512,7 @@ async fn handle_container_exec_with_params(
|
||||
sess.notify_exec_command_begin(&sub_id, &call_id, ¶ms)
|
||||
.await;
|
||||
|
||||
let params = maybe_run_with_user_profile(params, sess);
|
||||
let output_result = process_exec_tool_call(
|
||||
params.clone(),
|
||||
sandbox_type,
|
||||
@@ -1599,6 +1600,7 @@ async fn handle_sandbox_error(
|
||||
let rx_approve = sess
|
||||
.request_command_approval(
|
||||
sub_id.clone(),
|
||||
call_id.clone(),
|
||||
params.command.clone(),
|
||||
params.cwd.clone(),
|
||||
Some("command failed; retry without sandbox?".to_string()),
|
||||
@@ -1616,9 +1618,7 @@ async fn handle_sandbox_error(
|
||||
sess.notify_background_event(&sub_id, "retrying command without sandbox")
|
||||
.await;
|
||||
|
||||
// Emit a fresh Begin event so progress bars reset.
|
||||
let retry_call_id = format!("{call_id}-retry");
|
||||
sess.notify_exec_command_begin(&sub_id, &retry_call_id, ¶ms)
|
||||
sess.notify_exec_command_begin(&sub_id, &call_id, ¶ms)
|
||||
.await;
|
||||
|
||||
// This is an escalated retry; the policy will not be
|
||||
@@ -1641,14 +1641,8 @@ async fn handle_sandbox_error(
|
||||
duration,
|
||||
} = retry_output;
|
||||
|
||||
sess.notify_exec_command_end(
|
||||
&sub_id,
|
||||
&retry_call_id,
|
||||
&stdout,
|
||||
&stderr,
|
||||
exit_code,
|
||||
)
|
||||
.await;
|
||||
sess.notify_exec_command_end(&sub_id, &call_id, &stdout, &stderr, exit_code)
|
||||
.await;
|
||||
|
||||
let is_success = exit_code == 0;
|
||||
let content = format_exec_output(
|
||||
@@ -1690,377 +1684,6 @@ async fn handle_sandbox_error(
|
||||
}
|
||||
}
|
||||
|
||||
async fn apply_patch(
|
||||
sess: &Session,
|
||||
sub_id: String,
|
||||
call_id: String,
|
||||
action: ApplyPatchAction,
|
||||
) -> ResponseInputItem {
|
||||
let writable_roots_snapshot = {
|
||||
let guard = sess.writable_roots.lock().unwrap();
|
||||
guard.clone()
|
||||
};
|
||||
|
||||
let auto_approved = match assess_patch_safety(
|
||||
&action,
|
||||
sess.approval_policy,
|
||||
&writable_roots_snapshot,
|
||||
&sess.cwd,
|
||||
) {
|
||||
SafetyCheck::AutoApprove { .. } => true,
|
||||
SafetyCheck::AskUser => {
|
||||
// Compute a readable summary of path changes to include in the
|
||||
// approval request so the user can make an informed decision.
|
||||
let rx_approve = sess
|
||||
.request_patch_approval(sub_id.clone(), &action, None, None)
|
||||
.await;
|
||||
match rx_approve.await.unwrap_or_default() {
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession => false,
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "patch rejected by user".to_string(),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
SafetyCheck::Reject { reason } => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("patch rejected: {reason}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// Verify write permissions before touching the filesystem.
|
||||
let writable_snapshot = { sess.writable_roots.lock().unwrap().clone() };
|
||||
|
||||
if let Some(offending) = first_offending_path(&action, &writable_snapshot, &sess.cwd) {
|
||||
let root = offending.parent().unwrap_or(&offending).to_path_buf();
|
||||
|
||||
let reason = Some(format!(
|
||||
"grant write access to {} for this session",
|
||||
root.display()
|
||||
));
|
||||
|
||||
let rx = sess
|
||||
.request_patch_approval(sub_id.clone(), &action, reason.clone(), Some(root.clone()))
|
||||
.await;
|
||||
|
||||
if !matches!(
|
||||
rx.await.unwrap_or_default(),
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession
|
||||
) {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "patch rejected by user".to_string(),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// user approved, extend writable roots for this session
|
||||
sess.writable_roots.lock().unwrap().push(root);
|
||||
}
|
||||
|
||||
let _ = sess
|
||||
.tx_event
|
||||
.send(Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
|
||||
call_id: call_id.clone(),
|
||||
auto_approved,
|
||||
changes: convert_apply_patch_to_protocol(&action),
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut stdout = Vec::new();
|
||||
let mut stderr = Vec::new();
|
||||
// Enforce writable roots. If a write is blocked, collect offending root
|
||||
// and prompt the user to extend permissions.
|
||||
let mut result = apply_changes_from_apply_patch_and_report(&action, &mut stdout, &mut stderr);
|
||||
|
||||
if let Err(err) = &result {
|
||||
if err.kind() == std::io::ErrorKind::PermissionDenied {
|
||||
// Determine first offending path.
|
||||
let offending_opt = action
|
||||
.changes()
|
||||
.iter()
|
||||
.flat_map(|(path, change)| match change {
|
||||
ApplyPatchFileChange::Add { .. } => vec![path.as_ref()],
|
||||
ApplyPatchFileChange::Delete => vec![path.as_ref()],
|
||||
ApplyPatchFileChange::Update {
|
||||
move_path: Some(move_path),
|
||||
..
|
||||
} => {
|
||||
vec![path.as_ref(), move_path.as_ref()]
|
||||
}
|
||||
ApplyPatchFileChange::Update {
|
||||
move_path: None, ..
|
||||
} => vec![path.as_ref()],
|
||||
})
|
||||
.find_map(|path: &Path| {
|
||||
// ApplyPatchAction promises to guarantee absolute paths.
|
||||
if !path.is_absolute() {
|
||||
panic!("apply_patch invariant failed: path is not absolute: {path:?}");
|
||||
}
|
||||
|
||||
let writable = {
|
||||
let roots = sess.writable_roots.lock().unwrap();
|
||||
roots.iter().any(|root| path.starts_with(root))
|
||||
};
|
||||
if writable {
|
||||
None
|
||||
} else {
|
||||
Some(path.to_path_buf())
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(offending) = offending_opt {
|
||||
let root = offending.parent().unwrap_or(&offending).to_path_buf();
|
||||
|
||||
let reason = Some(format!(
|
||||
"grant write access to {} for this session",
|
||||
root.display()
|
||||
));
|
||||
let rx = sess
|
||||
.request_patch_approval(
|
||||
sub_id.clone(),
|
||||
&action,
|
||||
reason.clone(),
|
||||
Some(root.clone()),
|
||||
)
|
||||
.await;
|
||||
if matches!(
|
||||
rx.await.unwrap_or_default(),
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession
|
||||
) {
|
||||
// Extend writable roots.
|
||||
sess.writable_roots.lock().unwrap().push(root);
|
||||
stdout.clear();
|
||||
stderr.clear();
|
||||
result = apply_changes_from_apply_patch_and_report(
|
||||
&action,
|
||||
&mut stdout,
|
||||
&mut stderr,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Emit PatchApplyEnd event.
|
||||
let success_flag = result.is_ok();
|
||||
let _ = sess
|
||||
.tx_event
|
||||
.send(Event {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::PatchApplyEnd(PatchApplyEndEvent {
|
||||
call_id: call_id.clone(),
|
||||
stdout: String::from_utf8_lossy(&stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&stderr).to_string(),
|
||||
success: success_flag,
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(_) => ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: String::from_utf8_lossy(&stdout).to_string(),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
Err(e) => ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("error: {e:#}, stderr: {}", String::from_utf8_lossy(&stderr)),
|
||||
success: Some(false),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the first path in `hunks` that is NOT under any of the
|
||||
/// `writable_roots` (after normalising). If all paths are acceptable,
|
||||
/// returns None.
|
||||
fn first_offending_path(
|
||||
action: &ApplyPatchAction,
|
||||
writable_roots: &[PathBuf],
|
||||
cwd: &Path,
|
||||
) -> Option<PathBuf> {
|
||||
let changes = action.changes();
|
||||
for (path, change) in changes {
|
||||
let candidate = match change {
|
||||
ApplyPatchFileChange::Add { .. } => path,
|
||||
ApplyPatchFileChange::Delete => path,
|
||||
ApplyPatchFileChange::Update { move_path, .. } => move_path.as_ref().unwrap_or(path),
|
||||
};
|
||||
|
||||
let abs = if candidate.is_absolute() {
|
||||
candidate.clone()
|
||||
} else {
|
||||
cwd.join(candidate)
|
||||
};
|
||||
|
||||
let mut allowed = false;
|
||||
for root in writable_roots {
|
||||
let root_abs = if root.is_absolute() {
|
||||
root.clone()
|
||||
} else {
|
||||
cwd.join(root)
|
||||
};
|
||||
if abs.starts_with(&root_abs) {
|
||||
allowed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !allowed {
|
||||
return Some(candidate.clone());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn convert_apply_patch_to_protocol(action: &ApplyPatchAction) -> HashMap<PathBuf, FileChange> {
|
||||
let changes = action.changes();
|
||||
let mut result = HashMap::with_capacity(changes.len());
|
||||
for (path, change) in changes {
|
||||
let protocol_change = match change {
|
||||
ApplyPatchFileChange::Add { content } => FileChange::Add {
|
||||
content: content.clone(),
|
||||
},
|
||||
ApplyPatchFileChange::Delete => FileChange::Delete,
|
||||
ApplyPatchFileChange::Update {
|
||||
unified_diff,
|
||||
move_path,
|
||||
new_content: _new_content,
|
||||
} => FileChange::Update {
|
||||
unified_diff: unified_diff.clone(),
|
||||
move_path: move_path.clone(),
|
||||
},
|
||||
};
|
||||
result.insert(path.clone(), protocol_change);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn apply_changes_from_apply_patch_and_report(
|
||||
action: &ApplyPatchAction,
|
||||
stdout: &mut impl std::io::Write,
|
||||
stderr: &mut impl std::io::Write,
|
||||
) -> std::io::Result<()> {
|
||||
match apply_changes_from_apply_patch(action) {
|
||||
Ok(affected_paths) => {
|
||||
print_summary(&affected_paths, stdout)?;
|
||||
}
|
||||
Err(err) => {
|
||||
writeln!(stderr, "{err:?}")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_changes_from_apply_patch(action: &ApplyPatchAction) -> anyhow::Result<AffectedPaths> {
|
||||
let mut added: Vec<PathBuf> = Vec::new();
|
||||
let mut modified: Vec<PathBuf> = Vec::new();
|
||||
let mut deleted: Vec<PathBuf> = Vec::new();
|
||||
|
||||
let changes = action.changes();
|
||||
for (path, change) in changes {
|
||||
match change {
|
||||
ApplyPatchFileChange::Add { content } => {
|
||||
if let Some(parent) = path.parent() {
|
||||
if !parent.as_os_str().is_empty() {
|
||||
std::fs::create_dir_all(parent).with_context(|| {
|
||||
format!("Failed to create parent directories for {}", path.display())
|
||||
})?;
|
||||
}
|
||||
}
|
||||
std::fs::write(path, content)
|
||||
.with_context(|| format!("Failed to write file {}", path.display()))?;
|
||||
added.push(path.clone());
|
||||
}
|
||||
ApplyPatchFileChange::Delete => {
|
||||
std::fs::remove_file(path)
|
||||
.with_context(|| format!("Failed to delete file {}", path.display()))?;
|
||||
deleted.push(path.clone());
|
||||
}
|
||||
ApplyPatchFileChange::Update {
|
||||
unified_diff: _unified_diff,
|
||||
move_path,
|
||||
new_content,
|
||||
} => {
|
||||
if let Some(move_path) = move_path {
|
||||
if let Some(parent) = move_path.parent() {
|
||||
if !parent.as_os_str().is_empty() {
|
||||
std::fs::create_dir_all(parent).with_context(|| {
|
||||
format!(
|
||||
"Failed to create parent directories for {}",
|
||||
move_path.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
std::fs::rename(path, move_path)
|
||||
.with_context(|| format!("Failed to rename file {}", path.display()))?;
|
||||
std::fs::write(move_path, new_content)?;
|
||||
modified.push(move_path.clone());
|
||||
deleted.push(path.clone());
|
||||
} else {
|
||||
std::fs::write(path, new_content)?;
|
||||
modified.push(path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(AffectedPaths {
|
||||
added,
|
||||
modified,
|
||||
deleted,
|
||||
})
|
||||
}
|
||||
|
||||
fn get_writable_roots(cwd: &Path) -> Vec<PathBuf> {
|
||||
let mut writable_roots = Vec::new();
|
||||
if cfg!(target_os = "macos") {
|
||||
// On macOS, $TMPDIR is private to the user.
|
||||
writable_roots.push(std::env::temp_dir());
|
||||
|
||||
// Allow pyenv to update its shims directory. Without this, any tool
|
||||
// that happens to be managed by `pyenv` will fail with an error like:
|
||||
//
|
||||
// pyenv: cannot rehash: $HOME/.pyenv/shims isn't writable
|
||||
//
|
||||
// which is emitted every time `pyenv` tries to run `rehash` (for
|
||||
// example, after installing a new Python package that drops an entry
|
||||
// point). Although the sandbox is intentionally read‑only by default,
|
||||
// writing to the user's local `pyenv` directory is safe because it
|
||||
// is already user‑writable and scoped to the current user account.
|
||||
if let Ok(home_dir) = std::env::var("HOME") {
|
||||
let pyenv_dir = PathBuf::from(home_dir).join(".pyenv");
|
||||
writable_roots.push(pyenv_dir);
|
||||
}
|
||||
}
|
||||
|
||||
writable_roots.push(cwd.to_path_buf());
|
||||
|
||||
writable_roots
|
||||
}
|
||||
|
||||
/// Exec output is a pre-serialized JSON payload
|
||||
fn format_exec_output(output: &str, exit_code: i32, duration: Duration) -> String {
|
||||
#[derive(Serialize)]
|
||||
@@ -2092,7 +1715,7 @@ fn format_exec_output(output: &str, exit_code: i32, duration: Duration) -> Strin
|
||||
|
||||
fn get_last_assistant_message_from_turn(responses: &[ResponseItem]) -> Option<String> {
|
||||
responses.iter().rev().find_map(|item| {
|
||||
if let ResponseItem::Message { role, content } = item {
|
||||
if let ResponseItem::Message { role, content, .. } = item {
|
||||
if role == "assistant" {
|
||||
content.iter().rev().find_map(|ci| {
|
||||
if let ContentItem::OutputText { text } = ci {
|
||||
@@ -2109,15 +1732,3 @@ fn get_last_assistant_message_from_turn(responses: &[ResponseItem]) -> Option<St
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// See [`ConversationHistory`] for details.
|
||||
fn record_conversation_history(disable_response_storage: bool, wire_api: WireApi) -> bool {
|
||||
if disable_response_storage {
|
||||
return true;
|
||||
}
|
||||
|
||||
match wire_api {
|
||||
WireApi::Responses => false,
|
||||
WireApi::Chat => true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::Codex;
|
||||
use crate::CodexSpawnOk;
|
||||
use crate::config::Config;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
@@ -8,14 +9,27 @@ use crate::util::notify_on_sigint;
|
||||
use tokio::sync::Notify;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Represents an active Codex conversation, including the first event
|
||||
/// (which is [`EventMsg::SessionConfigured`]).
|
||||
pub struct CodexConversation {
|
||||
pub codex: Codex,
|
||||
pub session_id: Uuid,
|
||||
pub session_configured: Event,
|
||||
pub ctrl_c: Arc<Notify>,
|
||||
}
|
||||
|
||||
/// Spawn a new [`Codex`] and initialize the session.
|
||||
///
|
||||
/// Returns the wrapped [`Codex`] **and** the `SessionInitialized` event that
|
||||
/// is received as a response to the initial `ConfigureSession` submission so
|
||||
/// that callers can surface the information to the UI.
|
||||
pub async fn init_codex(config: Config) -> anyhow::Result<(Codex, Event, Arc<Notify>, Uuid)> {
|
||||
pub async fn init_codex(config: Config) -> anyhow::Result<CodexConversation> {
|
||||
let ctrl_c = notify_on_sigint();
|
||||
let (codex, init_id, session_id) = Codex::spawn(config, ctrl_c.clone()).await?;
|
||||
let CodexSpawnOk {
|
||||
codex,
|
||||
init_id,
|
||||
session_id,
|
||||
} = Codex::spawn(config, ctrl_c.clone()).await?;
|
||||
|
||||
// The first event must be `SessionInitialized`. Validate and forward it to
|
||||
// the caller so that they can display it in the conversation history.
|
||||
@@ -34,5 +48,10 @@ pub async fn init_codex(config: Config) -> anyhow::Result<(Codex, Event, Arc<Not
|
||||
));
|
||||
}
|
||||
|
||||
Ok((codex, event, ctrl_c, session_id))
|
||||
Ok(CodexConversation {
|
||||
codex,
|
||||
session_id,
|
||||
session_configured: event,
|
||||
ctrl_c,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -561,7 +561,7 @@ fn default_model() -> String {
|
||||
/// function will Err if the path does not exist.
|
||||
/// - If `CODEX_HOME` is not set, this function does not verify that the
|
||||
/// directory exists.
|
||||
fn find_codex_home() -> std::io::Result<PathBuf> {
|
||||
pub fn find_codex_home() -> std::io::Result<PathBuf> {
|
||||
// Honor the `CODEX_HOME` environment variable when it is set to allow users
|
||||
// (and tests) to override the default location.
|
||||
if let Ok(val) = std::env::var("CODEX_HOME") {
|
||||
|
||||
@@ -76,20 +76,7 @@ pub enum HistoryPersistence {
|
||||
|
||||
/// Collection of settings that are specific to the TUI.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Default)]
|
||||
pub struct Tui {
|
||||
/// By default, mouse capture is enabled in the TUI so that it is possible
|
||||
/// to scroll the conversation history with a mouse. This comes at the cost
|
||||
/// of not being able to use the mouse to select text in the TUI.
|
||||
/// (Most terminals support a modifier key to allow this. For example,
|
||||
/// text selection works in iTerm if you hold down the `Option` key while
|
||||
/// clicking and dragging.)
|
||||
///
|
||||
/// Setting this option to `true` disables mouse capture, so scrolling with
|
||||
/// the mouse is not possible, though the keyboard shortcuts e.g. `b` and
|
||||
/// `space` still work. This allows the user to select text in the TUI
|
||||
/// using the mouse without needing to hold down a modifier key.
|
||||
pub disable_mouse_capture: bool,
|
||||
}
|
||||
pub struct Tui {}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
@@ -143,6 +130,8 @@ pub struct ShellEnvironmentPolicyToml {
|
||||
|
||||
/// List of regular expressions.
|
||||
pub include_only: Option<Vec<String>>,
|
||||
|
||||
pub experimental_use_profile: Option<bool>,
|
||||
}
|
||||
|
||||
pub type EnvironmentVariablePattern = WildMatchPattern<'*', '?'>;
|
||||
@@ -171,6 +160,9 @@ pub struct ShellEnvironmentPolicy {
|
||||
|
||||
/// Environment variable names to retain in the environment.
|
||||
pub include_only: Vec<EnvironmentVariablePattern>,
|
||||
|
||||
/// If true, the shell profile will be used to run the command.
|
||||
pub use_profile: bool,
|
||||
}
|
||||
|
||||
impl From<ShellEnvironmentPolicyToml> for ShellEnvironmentPolicy {
|
||||
@@ -190,6 +182,7 @@ impl From<ShellEnvironmentPolicyToml> for ShellEnvironmentPolicy {
|
||||
.into_iter()
|
||||
.map(|s| EnvironmentVariablePattern::new_case_insensitive(&s))
|
||||
.collect();
|
||||
let use_profile = toml.experimental_use_profile.unwrap_or(false);
|
||||
|
||||
Self {
|
||||
inherit,
|
||||
@@ -197,6 +190,7 @@ impl From<ShellEnvironmentPolicyToml> for ShellEnvironmentPolicy {
|
||||
exclude,
|
||||
r#set,
|
||||
include_only,
|
||||
use_profile,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
use crate::models::ResponseItem;
|
||||
|
||||
/// Transcript of conversation history that is needed:
|
||||
/// - for ZDR clients for which previous_response_id is not available, so we
|
||||
/// must include the transcript with every API call. This must include each
|
||||
/// `function_call` and its corresponding `function_call_output`.
|
||||
/// - for clients using the "chat completions" API as opposed to the
|
||||
/// "responses" API.
|
||||
#[derive(Debug, Clone)]
|
||||
/// Transcript of conversation history
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub(crate) struct ConversationHistory {
|
||||
/// The oldest items are at the beginning of the vector.
|
||||
items: Vec<ResponseItem>,
|
||||
@@ -35,6 +30,11 @@ impl ConversationHistory {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Clears the conversation history.
|
||||
pub(crate) fn clear(&mut self) {
|
||||
self.items.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Anything that is not a system message or "reasoning" message is considered
|
||||
@@ -44,7 +44,36 @@ fn is_api_message(message: &ResponseItem) -> bool {
|
||||
ResponseItem::Message { role, .. } => role.as_str() != "system",
|
||||
ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::LocalShellCall { .. } => true,
|
||||
ResponseItem::Reasoning { .. } | ResponseItem::Other => false,
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::Reasoning { .. } => true,
|
||||
ResponseItem::Other => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::models::ResponseItem;
|
||||
|
||||
#[test]
|
||||
fn clear_removes_all_items() {
|
||||
let mut hist = ConversationHistory::new();
|
||||
|
||||
use crate::models::ContentItem;
|
||||
|
||||
let items = [ResponseItem::Message {
|
||||
role: "user".into(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
}];
|
||||
|
||||
hist.record_items(items.iter());
|
||||
|
||||
assert_eq!(hist.contents().len(), 1, "sanity – item should be present");
|
||||
|
||||
hist.clear();
|
||||
|
||||
assert!(hist.contents().is_empty(), "all items should be removed");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ use tokio::io::BufReader;
|
||||
use tokio::process::Child;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::Notify;
|
||||
use tracing::trace;
|
||||
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
@@ -82,7 +83,8 @@ pub async fn process_exec_tool_call(
|
||||
) -> Result<ExecToolCallOutput> {
|
||||
let start = Instant::now();
|
||||
|
||||
let raw_output_result = match sandbox_type {
|
||||
let raw_output_result: std::result::Result<RawExecToolCallOutput, CodexErr> = match sandbox_type
|
||||
{
|
||||
SandboxType::None => exec(params, sandbox_policy, ctrl_c).await,
|
||||
SandboxType::MacosSeatbelt => {
|
||||
let ExecParams {
|
||||
@@ -372,6 +374,10 @@ async fn spawn_child_async(
|
||||
stdio_policy: StdioPolicy,
|
||||
env: HashMap<String, String>,
|
||||
) -> std::io::Result<Child> {
|
||||
trace!(
|
||||
"spawn_child_async: {program:?} {args:?} {arg0:?} {cwd:?} {sandbox_policy:?} {stdio_policy:?} {env:?}"
|
||||
);
|
||||
|
||||
let mut cmd = Command::new(&program);
|
||||
#[cfg(unix)]
|
||||
cmd.arg0(arg0.map_or_else(|| program.to_string_lossy().to_string(), String::from));
|
||||
|
||||
307
codex-rs/core/src/git_info.rs
Normal file
307
codex-rs/core/src/git_info.rs
Normal file
@@ -0,0 +1,307 @@
|
||||
use std::path::Path;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::process::Command;
|
||||
use tokio::time::Duration as TokioDuration;
|
||||
use tokio::time::timeout;
|
||||
|
||||
/// Timeout for git commands to prevent freezing on large repositories
|
||||
const GIT_COMMAND_TIMEOUT: TokioDuration = TokioDuration::from_secs(5);
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct GitInfo {
|
||||
/// Current commit hash (SHA)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub commit_hash: Option<String>,
|
||||
/// Current branch name
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub branch: Option<String>,
|
||||
/// Repository URL (if available from remote)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub repository_url: Option<String>,
|
||||
}
|
||||
|
||||
/// Collect git repository information from the given working directory using command-line git.
|
||||
/// Returns None if no git repository is found or if git operations fail.
|
||||
/// Uses timeouts to prevent freezing on large repositories.
|
||||
/// All git commands (except the initial repo check) run in parallel for better performance.
|
||||
pub async fn collect_git_info(cwd: &Path) -> Option<GitInfo> {
|
||||
// Check if we're in a git repository first
|
||||
let is_git_repo = run_git_command_with_timeout(&["rev-parse", "--git-dir"], cwd)
|
||||
.await?
|
||||
.status
|
||||
.success();
|
||||
|
||||
if !is_git_repo {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Run all git info collection commands in parallel
|
||||
let (commit_result, branch_result, url_result) = tokio::join!(
|
||||
run_git_command_with_timeout(&["rev-parse", "HEAD"], cwd),
|
||||
run_git_command_with_timeout(&["rev-parse", "--abbrev-ref", "HEAD"], cwd),
|
||||
run_git_command_with_timeout(&["remote", "get-url", "origin"], cwd)
|
||||
);
|
||||
|
||||
let mut git_info = GitInfo {
|
||||
commit_hash: None,
|
||||
branch: None,
|
||||
repository_url: None,
|
||||
};
|
||||
|
||||
// Process commit hash
|
||||
if let Some(output) = commit_result {
|
||||
if output.status.success() {
|
||||
if let Ok(hash) = String::from_utf8(output.stdout) {
|
||||
git_info.commit_hash = Some(hash.trim().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process branch name
|
||||
if let Some(output) = branch_result {
|
||||
if output.status.success() {
|
||||
if let Ok(branch) = String::from_utf8(output.stdout) {
|
||||
let branch = branch.trim();
|
||||
if branch != "HEAD" {
|
||||
git_info.branch = Some(branch.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process repository URL
|
||||
if let Some(output) = url_result {
|
||||
if output.status.success() {
|
||||
if let Ok(url) = String::from_utf8(output.stdout) {
|
||||
git_info.repository_url = Some(url.trim().to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(git_info)
|
||||
}
|
||||
|
||||
/// Run a git command with a timeout to prevent blocking on large repositories
|
||||
async fn run_git_command_with_timeout(args: &[&str], cwd: &Path) -> Option<std::process::Output> {
|
||||
let result = timeout(
|
||||
GIT_COMMAND_TIMEOUT,
|
||||
Command::new("git").args(args).current_dir(cwd).output(),
|
||||
)
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(Ok(output)) => Some(output),
|
||||
_ => None, // Timeout or error
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::expect_used)]
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use super::*;
|
||||
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper function to create a test git repository
|
||||
async fn create_test_git_repo(temp_dir: &TempDir) -> PathBuf {
|
||||
let repo_path = temp_dir.path().to_path_buf();
|
||||
|
||||
// Initialize git repo
|
||||
Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to init git repo");
|
||||
|
||||
// Configure git user (required for commits)
|
||||
Command::new("git")
|
||||
.args(["config", "user.name", "Test User"])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to set git user name");
|
||||
|
||||
Command::new("git")
|
||||
.args(["config", "user.email", "test@example.com"])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to set git user email");
|
||||
|
||||
// Create a test file and commit it
|
||||
let test_file = repo_path.join("test.txt");
|
||||
fs::write(&test_file, "test content").expect("Failed to write test file");
|
||||
|
||||
Command::new("git")
|
||||
.args(["add", "."])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to add files");
|
||||
|
||||
Command::new("git")
|
||||
.args(["commit", "-m", "Initial commit"])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to commit");
|
||||
|
||||
repo_path
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_collect_git_info_non_git_directory() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let result = collect_git_info(temp_dir.path()).await;
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_collect_git_info_git_repository() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let repo_path = create_test_git_repo(&temp_dir).await;
|
||||
|
||||
let git_info = collect_git_info(&repo_path)
|
||||
.await
|
||||
.expect("Should collect git info from repo");
|
||||
|
||||
// Should have commit hash
|
||||
assert!(git_info.commit_hash.is_some());
|
||||
let commit_hash = git_info.commit_hash.unwrap();
|
||||
assert_eq!(commit_hash.len(), 40); // SHA-1 hash should be 40 characters
|
||||
assert!(commit_hash.chars().all(|c| c.is_ascii_hexdigit()));
|
||||
|
||||
// Should have branch (likely "main" or "master")
|
||||
assert!(git_info.branch.is_some());
|
||||
let branch = git_info.branch.unwrap();
|
||||
assert!(branch == "main" || branch == "master");
|
||||
|
||||
// Repository URL might be None for local repos without remote
|
||||
// This is acceptable behavior
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_collect_git_info_with_remote() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let repo_path = create_test_git_repo(&temp_dir).await;
|
||||
|
||||
// Add a remote origin
|
||||
Command::new("git")
|
||||
.args([
|
||||
"remote",
|
||||
"add",
|
||||
"origin",
|
||||
"https://github.com/example/repo.git",
|
||||
])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to add remote");
|
||||
|
||||
let git_info = collect_git_info(&repo_path)
|
||||
.await
|
||||
.expect("Should collect git info from repo");
|
||||
|
||||
// Should have repository URL
|
||||
assert_eq!(
|
||||
git_info.repository_url,
|
||||
Some("https://github.com/example/repo.git".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_collect_git_info_detached_head() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let repo_path = create_test_git_repo(&temp_dir).await;
|
||||
|
||||
// Get the current commit hash
|
||||
let output = Command::new("git")
|
||||
.args(["rev-parse", "HEAD"])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to get HEAD");
|
||||
let commit_hash = String::from_utf8(output.stdout).unwrap().trim().to_string();
|
||||
|
||||
// Checkout the commit directly (detached HEAD)
|
||||
Command::new("git")
|
||||
.args(["checkout", &commit_hash])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to checkout commit");
|
||||
|
||||
let git_info = collect_git_info(&repo_path)
|
||||
.await
|
||||
.expect("Should collect git info from repo");
|
||||
|
||||
// Should have commit hash
|
||||
assert!(git_info.commit_hash.is_some());
|
||||
// Branch should be None for detached HEAD (since rev-parse --abbrev-ref HEAD returns "HEAD")
|
||||
assert!(git_info.branch.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_collect_git_info_with_branch() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let repo_path = create_test_git_repo(&temp_dir).await;
|
||||
|
||||
// Create and checkout a new branch
|
||||
Command::new("git")
|
||||
.args(["checkout", "-b", "feature-branch"])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to create branch");
|
||||
|
||||
let git_info = collect_git_info(&repo_path)
|
||||
.await
|
||||
.expect("Should collect git info from repo");
|
||||
|
||||
// Should have the new branch name
|
||||
assert_eq!(git_info.branch, Some("feature-branch".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_info_serialization() {
|
||||
let git_info = GitInfo {
|
||||
commit_hash: Some("abc123def456".to_string()),
|
||||
branch: Some("main".to_string()),
|
||||
repository_url: Some("https://github.com/example/repo.git".to_string()),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&git_info).expect("Should serialize GitInfo");
|
||||
let parsed: serde_json::Value = serde_json::from_str(&json).expect("Should parse JSON");
|
||||
|
||||
assert_eq!(parsed["commit_hash"], "abc123def456");
|
||||
assert_eq!(parsed["branch"], "main");
|
||||
assert_eq!(
|
||||
parsed["repository_url"],
|
||||
"https://github.com/example/repo.git"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_git_info_serialization_with_nones() {
|
||||
let git_info = GitInfo {
|
||||
commit_hash: None,
|
||||
branch: None,
|
||||
repository_url: None,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&git_info).expect("Should serialize GitInfo");
|
||||
let parsed: serde_json::Value = serde_json::from_str(&json).expect("Should parse JSON");
|
||||
|
||||
// Fields with None values should be omitted due to skip_serializing_if
|
||||
assert!(!parsed.as_object().unwrap().contains_key("commit_hash"));
|
||||
assert!(!parsed.as_object().unwrap().contains_key("branch"));
|
||||
assert!(!parsed.as_object().unwrap().contains_key("repository_url"));
|
||||
}
|
||||
}
|
||||
@@ -1,31 +1,57 @@
|
||||
use tree_sitter::Parser;
|
||||
use tree_sitter::Tree;
|
||||
use tree_sitter_bash::LANGUAGE as BASH;
|
||||
use crate::bash::try_parse_bash;
|
||||
use crate::bash::try_parse_word_only_commands_sequence;
|
||||
|
||||
pub fn is_known_safe_command(command: &[String]) -> bool {
|
||||
if is_safe_to_call_with_exec(command) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// TODO(mbolin): Also support safe commands that are piped together such
|
||||
// as `cat foo | wc -l`.
|
||||
matches!(
|
||||
command,
|
||||
[bash, flag, script]
|
||||
if bash == "bash"
|
||||
&& flag == "-lc"
|
||||
&& try_parse_bash(script).and_then(|tree|
|
||||
try_parse_single_word_only_command(&tree, script)).is_some_and(|parsed_bash_command| is_safe_to_call_with_exec(&parsed_bash_command))
|
||||
)
|
||||
// Support `bash -lc "..."` where the script consists solely of one or
|
||||
// more "plain" commands (only bare words / quoted strings) combined with
|
||||
// a conservative allow‑list of shell operators that themselves do not
|
||||
// introduce side effects ( "&&", "||", ";", and "|" ). If every
|
||||
// individual command in the script is itself a known‑safe command, then
|
||||
// the composite expression is considered safe.
|
||||
if let [bash, flag, script] = command {
|
||||
if bash == "bash" && flag == "-lc" {
|
||||
if let Some(tree) = try_parse_bash(script) {
|
||||
if let Some(all_commands) = try_parse_word_only_commands_sequence(&tree, script) {
|
||||
if !all_commands.is_empty()
|
||||
&& all_commands
|
||||
.iter()
|
||||
.all(|cmd| is_safe_to_call_with_exec(cmd))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn is_safe_to_call_with_exec(command: &[String]) -> bool {
|
||||
let cmd0 = command.first().map(String::as_str);
|
||||
|
||||
match cmd0 {
|
||||
Some("cat" | "cd" | "echo" | "grep" | "head" | "ls" | "pwd" | "tail" | "wc" | "which") => {
|
||||
#[rustfmt::skip]
|
||||
Some(
|
||||
"cat" |
|
||||
"cd" |
|
||||
"echo" |
|
||||
"false" |
|
||||
"grep" |
|
||||
"head" |
|
||||
"ls" |
|
||||
"nl" |
|
||||
"pwd" |
|
||||
"tail" |
|
||||
"true" |
|
||||
"wc" |
|
||||
"which") => {
|
||||
true
|
||||
}
|
||||
},
|
||||
|
||||
Some("find") => {
|
||||
// Certain options to `find` can delete files, write to files, or
|
||||
@@ -95,90 +121,7 @@ fn is_safe_to_call_with_exec(command: &[String]) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn try_parse_bash(bash_lc_arg: &str) -> Option<Tree> {
|
||||
let lang = BASH.into();
|
||||
let mut parser = Parser::new();
|
||||
#[expect(clippy::expect_used)]
|
||||
parser.set_language(&lang).expect("load bash grammar");
|
||||
|
||||
let old_tree: Option<&Tree> = None;
|
||||
parser.parse(bash_lc_arg, old_tree)
|
||||
}
|
||||
|
||||
/// If `tree` represents a single Bash command whose name and every argument is
|
||||
/// an ordinary `word`, return those words in order; otherwise, return `None`.
|
||||
///
|
||||
/// `src` must be the exact source string that was parsed into `tree`, so we can
|
||||
/// extract the text for every node.
|
||||
pub fn try_parse_single_word_only_command(tree: &Tree, src: &str) -> Option<Vec<String>> {
|
||||
// Any parse error is an immediate rejection.
|
||||
if tree.root_node().has_error() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// (program …) with exactly one statement
|
||||
let root = tree.root_node();
|
||||
if root.kind() != "program" || root.named_child_count() != 1 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let cmd = root.named_child(0)?; // (command …)
|
||||
if cmd.kind() != "command" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut words = Vec::new();
|
||||
let mut cursor = cmd.walk();
|
||||
|
||||
for child in cmd.named_children(&mut cursor) {
|
||||
match child.kind() {
|
||||
// The command name node wraps one `word` child.
|
||||
"command_name" => {
|
||||
let word_node = child.named_child(0)?; // make sure it's only a word
|
||||
if word_node.kind() != "word" {
|
||||
return None;
|
||||
}
|
||||
words.push(word_node.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
}
|
||||
// Positional‑argument word (allowed).
|
||||
"word" | "number" => {
|
||||
words.push(child.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
}
|
||||
"string" => {
|
||||
if child.child_count() == 3
|
||||
&& child.child(0)?.kind() == "\""
|
||||
&& child.child(1)?.kind() == "string_content"
|
||||
&& child.child(2)?.kind() == "\""
|
||||
{
|
||||
words.push(child.child(1)?.utf8_text(src.as_bytes()).ok()?.to_owned());
|
||||
} else {
|
||||
// Anything else means the command is *not* plain words.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
"concatenation" => {
|
||||
// TODO: Consider things like `'ab\'a'`.
|
||||
return None;
|
||||
}
|
||||
"raw_string" => {
|
||||
// Raw string is a single word, but we need to strip the quotes.
|
||||
let raw_string = child.utf8_text(src.as_bytes()).ok()?;
|
||||
let stripped = raw_string
|
||||
.strip_prefix('\'')
|
||||
.and_then(|s| s.strip_suffix('\''));
|
||||
if let Some(stripped) = stripped {
|
||||
words.push(stripped.to_owned());
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
// Anything else means the command is *not* plain words.
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
|
||||
Some(words)
|
||||
}
|
||||
// (bash parsing helpers implemented in crate::bash)
|
||||
|
||||
/* ----------------------------------------------------------
|
||||
Example
|
||||
@@ -216,6 +159,7 @@ fn is_valid_sed_n_arg(arg: Option<&str>) -> bool {
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
@@ -232,6 +176,11 @@ mod tests {
|
||||
assert!(is_safe_to_call_with_exec(&vec_str(&[
|
||||
"sed", "-n", "1,5p", "file.txt"
|
||||
])));
|
||||
assert!(is_safe_to_call_with_exec(&vec_str(&[
|
||||
"nl",
|
||||
"-nrz",
|
||||
"Cargo.toml"
|
||||
])));
|
||||
|
||||
// Safe `find` command (no unsafe options).
|
||||
assert!(is_safe_to_call_with_exec(&vec_str(&[
|
||||
@@ -334,6 +283,30 @@ mod tests {
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bash_lc_safe_examples_with_operators() {
|
||||
assert!(is_known_safe_command(&vec_str(&[
|
||||
"bash",
|
||||
"-lc",
|
||||
"grep -R \"Cargo.toml\" -n || true"
|
||||
])));
|
||||
assert!(is_known_safe_command(&vec_str(&[
|
||||
"bash",
|
||||
"-lc",
|
||||
"ls && pwd"
|
||||
])));
|
||||
assert!(is_known_safe_command(&vec_str(&[
|
||||
"bash",
|
||||
"-lc",
|
||||
"echo 'hi' ; ls"
|
||||
])));
|
||||
assert!(is_known_safe_command(&vec_str(&[
|
||||
"bash",
|
||||
"-lc",
|
||||
"ls | wc -l"
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bash_lc_unsafe_examples() {
|
||||
assert!(
|
||||
@@ -347,44 +320,29 @@ mod tests {
|
||||
|
||||
assert!(
|
||||
!is_known_safe_command(&vec_str(&["bash", "-lc", "find . -name file.txt -delete"])),
|
||||
"Unsafe find option should not be auto‑approved."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_parse_single_word_only_command() {
|
||||
let script_with_single_quoted_string = "sed -n '1,5p' file.txt";
|
||||
let parsed_words = try_parse_bash(script_with_single_quoted_string)
|
||||
.and_then(|tree| {
|
||||
try_parse_single_word_only_command(&tree, script_with_single_quoted_string)
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
vec![
|
||||
"sed".to_string(),
|
||||
"-n".to_string(),
|
||||
// Ensure the single quotes are properly removed.
|
||||
"1,5p".to_string(),
|
||||
"file.txt".to_string()
|
||||
],
|
||||
parsed_words,
|
||||
"Unsafe find option should not be auto-approved."
|
||||
);
|
||||
|
||||
let script_with_number_arg = "ls -1";
|
||||
let parsed_words = try_parse_bash(script_with_number_arg)
|
||||
.and_then(|tree| try_parse_single_word_only_command(&tree, script_with_number_arg))
|
||||
.unwrap();
|
||||
assert_eq!(vec!["ls", "-1"], parsed_words,);
|
||||
// Disallowed because of unsafe command in sequence.
|
||||
assert!(
|
||||
!is_known_safe_command(&vec_str(&["bash", "-lc", "ls && rm -rf /"])),
|
||||
"Sequence containing unsafe command must be rejected"
|
||||
);
|
||||
|
||||
let script_with_double_quoted_string_with_no_funny_stuff_arg = "grep -R \"Cargo.toml\" -n";
|
||||
let parsed_words = try_parse_bash(script_with_double_quoted_string_with_no_funny_stuff_arg)
|
||||
.and_then(|tree| {
|
||||
try_parse_single_word_only_command(
|
||||
&tree,
|
||||
script_with_double_quoted_string_with_no_funny_stuff_arg,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(vec!["grep", "-R", "Cargo.toml", "-n"], parsed_words);
|
||||
// Disallowed because of parentheses / subshell.
|
||||
assert!(
|
||||
!is_known_safe_command(&vec_str(&["bash", "-lc", "(ls)"])),
|
||||
"Parentheses (subshell) are not provably safe with the current parser"
|
||||
);
|
||||
assert!(
|
||||
!is_known_safe_command(&vec_str(&["bash", "-lc", "ls || (pwd && echo hi)"])),
|
||||
"Nested parentheses are not provably safe with the current parser"
|
||||
);
|
||||
|
||||
// Disallowed redirection.
|
||||
assert!(
|
||||
!is_known_safe_command(&vec_str(&["bash", "-lc", "ls > out.txt"])),
|
||||
"> redirection should be rejected"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,11 +5,14 @@
|
||||
// the TUI or the tracing stack).
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
mod apply_patch;
|
||||
mod bash;
|
||||
mod chat_completions;
|
||||
mod client;
|
||||
mod client_common;
|
||||
pub mod codex;
|
||||
pub use codex::Codex;
|
||||
pub use codex::CodexSpawnOk;
|
||||
pub mod codex_wrapper;
|
||||
pub mod config;
|
||||
pub mod config_profile;
|
||||
@@ -19,6 +22,7 @@ pub mod error;
|
||||
pub mod exec;
|
||||
pub mod exec_env;
|
||||
mod flags;
|
||||
pub mod git_info;
|
||||
mod is_safe_command;
|
||||
mod mcp_connection_manager;
|
||||
mod mcp_tool_call;
|
||||
@@ -34,6 +38,7 @@ mod project_doc;
|
||||
pub mod protocol;
|
||||
mod rollout;
|
||||
mod safety;
|
||||
pub mod shell;
|
||||
mod user_notification;
|
||||
pub mod util;
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsString;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
@@ -127,7 +128,12 @@ impl McpConnectionManager {
|
||||
|
||||
join_set.spawn(async move {
|
||||
let McpServerConfig { command, args, env } = cfg;
|
||||
let client_res = McpClient::new_stdio_client(command, args, env).await;
|
||||
let client_res = McpClient::new_stdio_client(
|
||||
command.into(),
|
||||
args.into_iter().map(OsString::from).collect(),
|
||||
env,
|
||||
)
|
||||
.await;
|
||||
match client_res {
|
||||
Ok(client) => {
|
||||
// Initialize the client.
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::collections::HashMap;
|
||||
use base64::Engine;
|
||||
use mcp_types::CallToolResult;
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
use serde::Serialize;
|
||||
use serde::ser::Serializer;
|
||||
|
||||
@@ -37,12 +38,14 @@ pub enum ContentItem {
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ResponseItem {
|
||||
Message {
|
||||
id: Option<String>,
|
||||
role: String,
|
||||
content: Vec<ContentItem>,
|
||||
},
|
||||
Reasoning {
|
||||
id: String,
|
||||
summary: Vec<ReasoningItemReasoningSummary>,
|
||||
encrypted_content: Option<String>,
|
||||
},
|
||||
LocalShellCall {
|
||||
/// Set when using the chat completions API.
|
||||
@@ -53,6 +56,7 @@ pub enum ResponseItem {
|
||||
action: LocalShellAction,
|
||||
},
|
||||
FunctionCall {
|
||||
id: Option<String>,
|
||||
name: String,
|
||||
// The Responses API returns the function call arguments as a *string* that contains
|
||||
// JSON, not as an already‑parsed object. We keep it as a raw string here and let
|
||||
@@ -78,7 +82,11 @@ pub enum ResponseItem {
|
||||
impl From<ResponseInputItem> for ResponseItem {
|
||||
fn from(item: ResponseInputItem) -> Self {
|
||||
match item {
|
||||
ResponseInputItem::Message { role, content } => Self::Message { role, content },
|
||||
ResponseInputItem::Message { role, content } => Self::Message {
|
||||
role,
|
||||
content,
|
||||
id: None,
|
||||
},
|
||||
ResponseInputItem::FunctionCallOutput { call_id, output } => {
|
||||
Self::FunctionCallOutput { call_id, output }
|
||||
}
|
||||
@@ -177,7 +185,7 @@ pub struct ShellToolCallParams {
|
||||
pub timeout_ms: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FunctionCallOutputPayload {
|
||||
pub content: String,
|
||||
#[expect(dead_code)]
|
||||
@@ -205,6 +213,19 @@ impl Serialize for FunctionCallOutputPayload {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for FunctionCallOutputPayload {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
Ok(FunctionCallOutputPayload {
|
||||
content: s,
|
||||
success: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Implement Display so callers can treat the payload like a plain string when logging or doing
|
||||
// trivial substring checks in tests (existing tests call `.contains()` on the output). Display
|
||||
// returns the raw `content` field.
|
||||
|
||||
@@ -4,13 +4,15 @@
|
||||
//! between user and agent.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::str::FromStr; // Added for FinalOutput Display implementation
|
||||
|
||||
use mcp_types::CallToolResult;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
@@ -33,6 +35,8 @@ pub struct Submission {
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[non_exhaustive]
|
||||
pub enum Op {
|
||||
/// Erase all conversation history for the current session.
|
||||
EraseConversationHistory,
|
||||
/// Configure the model session.
|
||||
ConfigureSession {
|
||||
/// Provider identifier ("openai", "openrouter", ...).
|
||||
@@ -116,18 +120,23 @@ pub enum Op {
|
||||
|
||||
/// Request a single history entry identified by `log_id` + `offset`.
|
||||
GetHistoryEntryRequest { offset: usize, log_id: u64 },
|
||||
|
||||
/// Request to shut down codex instance.
|
||||
Shutdown,
|
||||
}
|
||||
|
||||
/// Determines the conditions under which the user is consulted to approve
|
||||
/// running the command proposed by Codex.
|
||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash, Serialize, Deserialize, Display)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[strum(serialize_all = "kebab-case")]
|
||||
pub enum AskForApproval {
|
||||
/// Under this policy, only "known safe" commands—as determined by
|
||||
/// `is_safe_command()`—that **only read files** are auto‑approved.
|
||||
/// Everything else will ask the user to approve.
|
||||
#[default]
|
||||
#[serde(rename = "untrusted")]
|
||||
#[strum(serialize = "untrusted")]
|
||||
UnlessTrusted,
|
||||
|
||||
/// *All* commands are auto‑approved, but they are expected to run inside a
|
||||
@@ -271,8 +280,9 @@ pub struct Event {
|
||||
}
|
||||
|
||||
/// Response event from the agent
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, Display)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum EventMsg {
|
||||
/// Error while executing a submission
|
||||
Error(ErrorEvent),
|
||||
@@ -326,6 +336,9 @@ pub enum EventMsg {
|
||||
|
||||
/// Response to GetHistoryEntryRequest.
|
||||
GetHistoryEntryResponse(GetHistoryEntryResponseEvent),
|
||||
|
||||
/// Notification that the agent is shutting down.
|
||||
ShutdownComplete,
|
||||
}
|
||||
|
||||
// Individual event payload types matching each `EventMsg` variant.
|
||||
@@ -349,6 +362,36 @@ pub struct TokenUsage {
|
||||
pub total_tokens: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct FinalOutput {
|
||||
pub token_usage: TokenUsage,
|
||||
}
|
||||
|
||||
impl From<TokenUsage> for FinalOutput {
|
||||
fn from(token_usage: TokenUsage) -> Self {
|
||||
Self { token_usage }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FinalOutput {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let u = &self.token_usage;
|
||||
write!(
|
||||
f,
|
||||
"Token usage: total={} input={}{} output={}{}",
|
||||
u.total_tokens,
|
||||
u.input_tokens,
|
||||
u.cached_input_tokens
|
||||
.map(|c| format!(" (cached {c})"))
|
||||
.unwrap_or_default(),
|
||||
u.output_tokens,
|
||||
u.reasoning_output_tokens
|
||||
.map(|r| format!(" (reasoning {r})"))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AgentMessageEvent {
|
||||
pub message: String,
|
||||
@@ -422,6 +465,8 @@ pub struct ExecCommandEndEvent {
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct ExecApprovalRequestEvent {
|
||||
/// Identifier for the associated exec call, if available.
|
||||
pub call_id: String,
|
||||
/// The command to be executed.
|
||||
pub command: Vec<String>,
|
||||
/// The command's working directory.
|
||||
@@ -433,6 +478,8 @@ pub struct ExecApprovalRequestEvent {
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct ApplyPatchApprovalRequestEvent {
|
||||
/// Responses API call id for the associated patch apply call, if available.
|
||||
pub call_id: String,
|
||||
pub changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
|
||||
@@ -14,10 +14,14 @@ use time::macros::format_description;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::sync::mpsc::Sender;
|
||||
use tokio::sync::mpsc::{self};
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::git_info::GitInfo;
|
||||
use crate::git_info::collect_git_info;
|
||||
use crate::models::ResponseItem;
|
||||
|
||||
const SESSIONS_SUBDIR: &str = "sessions";
|
||||
@@ -29,11 +33,17 @@ pub struct SessionMeta {
|
||||
pub instructions: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct SessionStateSnapshot {
|
||||
pub previous_response_id: Option<String>,
|
||||
#[derive(Serialize)]
|
||||
struct SessionMetaWithGit {
|
||||
#[serde(flatten)]
|
||||
meta: SessionMeta,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
git: Option<GitInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct SessionStateSnapshot {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct SavedSession {
|
||||
pub session: SessionMeta,
|
||||
@@ -58,10 +68,10 @@ pub(crate) struct RolloutRecorder {
|
||||
tx: Sender<RolloutCmd>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum RolloutCmd {
|
||||
AddItems(Vec<ResponseItem>),
|
||||
UpdateState(SessionStateSnapshot),
|
||||
Shutdown { ack: oneshot::Sender<()> },
|
||||
}
|
||||
|
||||
impl RolloutRecorder {
|
||||
@@ -86,15 +96,12 @@ impl RolloutRecorder {
|
||||
.format(timestamp_format)
|
||||
.map_err(|e| IoError::other(format!("failed to format timestamp: {e}")))?;
|
||||
|
||||
let meta = SessionMeta {
|
||||
timestamp,
|
||||
id: session_id,
|
||||
instructions,
|
||||
};
|
||||
// Clone the cwd for the spawned task to collect git info asynchronously
|
||||
let cwd = config.cwd.clone();
|
||||
|
||||
// A reasonably-sized bounded channel. If the buffer fills up the send
|
||||
// future will yield, which is fine – we only need to ensure we do not
|
||||
// perform *blocking* I/O on the caller’s thread.
|
||||
// perform *blocking* I/O on the caller's thread.
|
||||
let (tx, rx) = mpsc::channel::<RolloutCmd>(256);
|
||||
|
||||
// Spawn a Tokio task that owns the file handle and performs async
|
||||
@@ -103,7 +110,12 @@ impl RolloutRecorder {
|
||||
tokio::task::spawn(rollout_writer(
|
||||
tokio::fs::File::from_std(file),
|
||||
rx,
|
||||
Some(meta),
|
||||
Some(SessionMeta {
|
||||
timestamp,
|
||||
id: session_id,
|
||||
instructions,
|
||||
}),
|
||||
cwd,
|
||||
));
|
||||
|
||||
Ok(Self { tx })
|
||||
@@ -119,8 +131,9 @@ impl RolloutRecorder {
|
||||
ResponseItem::Message { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::FunctionCallOutput { .. } => filtered.push(item.clone()),
|
||||
ResponseItem::Reasoning { .. } | ResponseItem::Other => {
|
||||
| ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::Reasoning { .. } => filtered.push(item.clone()),
|
||||
ResponseItem::Other => {
|
||||
// These should never be serialized.
|
||||
continue;
|
||||
}
|
||||
@@ -142,7 +155,10 @@ impl RolloutRecorder {
|
||||
.map_err(|e| IoError::other(format!("failed to queue rollout state: {e}")))
|
||||
}
|
||||
|
||||
pub async fn resume(path: &Path) -> std::io::Result<(Self, SavedSession)> {
|
||||
pub async fn resume(
|
||||
path: &Path,
|
||||
cwd: std::path::PathBuf,
|
||||
) -> std::io::Result<(Self, SavedSession)> {
|
||||
info!("Resuming rollout from {path:?}");
|
||||
let text = tokio::fs::read_to_string(path).await?;
|
||||
let mut lines = text.lines();
|
||||
@@ -172,13 +188,17 @@ impl RolloutRecorder {
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Ok(item) = serde_json::from_value::<ResponseItem>(v.clone()) {
|
||||
match item {
|
||||
match serde_json::from_value::<ResponseItem>(v.clone()) {
|
||||
Ok(item) => match item {
|
||||
ResponseItem::Message { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::FunctionCallOutput { .. } => items.push(item),
|
||||
ResponseItem::Reasoning { .. } | ResponseItem::Other => {}
|
||||
| ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::Reasoning { .. } => items.push(item),
|
||||
ResponseItem::Other => {}
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("failed to parse item: {v:?}, error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -196,10 +216,30 @@ impl RolloutRecorder {
|
||||
.open(path)?;
|
||||
|
||||
let (tx, rx) = mpsc::channel::<RolloutCmd>(256);
|
||||
tokio::task::spawn(rollout_writer(tokio::fs::File::from_std(file), rx, None));
|
||||
tokio::task::spawn(rollout_writer(
|
||||
tokio::fs::File::from_std(file),
|
||||
rx,
|
||||
None,
|
||||
cwd,
|
||||
));
|
||||
info!("Resumed rollout successfully from {path:?}");
|
||||
Ok((Self { tx }, saved))
|
||||
}
|
||||
|
||||
pub async fn shutdown(&self) -> std::io::Result<()> {
|
||||
let (tx_done, rx_done) = oneshot::channel();
|
||||
match self.tx.send(RolloutCmd::Shutdown { ack: tx_done }).await {
|
||||
Ok(_) => rx_done
|
||||
.await
|
||||
.map_err(|e| IoError::other(format!("failed waiting for rollout shutdown: {e}"))),
|
||||
Err(e) => {
|
||||
warn!("failed to send rollout shutdown command: {e}");
|
||||
Err(IoError::other(format!(
|
||||
"failed to send rollout shutdown command: {e}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct LogFileInfo {
|
||||
@@ -248,17 +288,26 @@ fn create_log_file(config: &Config, session_id: Uuid) -> std::io::Result<LogFile
|
||||
}
|
||||
|
||||
async fn rollout_writer(
|
||||
mut file: tokio::fs::File,
|
||||
file: tokio::fs::File,
|
||||
mut rx: mpsc::Receiver<RolloutCmd>,
|
||||
meta: Option<SessionMeta>,
|
||||
) {
|
||||
if let Some(meta) = meta {
|
||||
if let Ok(json) = serde_json::to_string(&meta) {
|
||||
let _ = file.write_all(json.as_bytes()).await;
|
||||
let _ = file.write_all(b"\n").await;
|
||||
let _ = file.flush().await;
|
||||
}
|
||||
mut meta: Option<SessionMeta>,
|
||||
cwd: std::path::PathBuf,
|
||||
) -> std::io::Result<()> {
|
||||
let mut writer = JsonlWriter { file };
|
||||
|
||||
// If we have a meta, collect git info asynchronously and write meta first
|
||||
if let Some(session_meta) = meta.take() {
|
||||
let git_info = collect_git_info(&cwd).await;
|
||||
let session_meta_with_git = SessionMetaWithGit {
|
||||
meta: session_meta,
|
||||
git: git_info,
|
||||
};
|
||||
|
||||
// Write the SessionMeta as the first item in the file
|
||||
writer.write_line(&session_meta_with_git).await?;
|
||||
}
|
||||
|
||||
// Process rollout commands
|
||||
while let Some(cmd) = rx.recv().await {
|
||||
match cmd {
|
||||
RolloutCmd::AddItems(items) => {
|
||||
@@ -267,16 +316,13 @@ async fn rollout_writer(
|
||||
ResponseItem::Message { .. }
|
||||
| ResponseItem::LocalShellCall { .. }
|
||||
| ResponseItem::FunctionCall { .. }
|
||||
| ResponseItem::FunctionCallOutput { .. } => {
|
||||
if let Ok(json) = serde_json::to_string(&item) {
|
||||
let _ = file.write_all(json.as_bytes()).await;
|
||||
let _ = file.write_all(b"\n").await;
|
||||
}
|
||||
| ResponseItem::FunctionCallOutput { .. }
|
||||
| ResponseItem::Reasoning { .. } => {
|
||||
writer.write_line(&item).await?;
|
||||
}
|
||||
ResponseItem::Reasoning { .. } | ResponseItem::Other => {}
|
||||
ResponseItem::Other => {}
|
||||
}
|
||||
}
|
||||
let _ = file.flush().await;
|
||||
}
|
||||
RolloutCmd::UpdateState(state) => {
|
||||
#[derive(Serialize)]
|
||||
@@ -285,15 +331,32 @@ async fn rollout_writer(
|
||||
#[serde(flatten)]
|
||||
state: &'a SessionStateSnapshot,
|
||||
}
|
||||
if let Ok(json) = serde_json::to_string(&StateLine {
|
||||
record_type: "state",
|
||||
state: &state,
|
||||
}) {
|
||||
let _ = file.write_all(json.as_bytes()).await;
|
||||
let _ = file.write_all(b"\n").await;
|
||||
let _ = file.flush().await;
|
||||
}
|
||||
writer
|
||||
.write_line(&StateLine {
|
||||
record_type: "state",
|
||||
state: &state,
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
RolloutCmd::Shutdown { ack } => {
|
||||
let _ = ack.send(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct JsonlWriter {
|
||||
file: tokio::fs::File,
|
||||
}
|
||||
|
||||
impl JsonlWriter {
|
||||
async fn write_line(&mut self, item: &impl serde::Serialize) -> std::io::Result<()> {
|
||||
let mut json = serde_json::to_string(item)?;
|
||||
json.push('\n');
|
||||
let _ = self.file.write_all(json.as_bytes()).await;
|
||||
self.file.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
204
codex-rs/core/src/shell.rs
Normal file
204
codex-rs/core/src/shell.rs
Normal file
@@ -0,0 +1,204 @@
|
||||
use shlex;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ZshShell {
|
||||
shell_path: String,
|
||||
zshrc_path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum Shell {
|
||||
Zsh(ZshShell),
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl Shell {
|
||||
pub fn format_default_shell_invocation(&self, command: Vec<String>) -> Option<Vec<String>> {
|
||||
match self {
|
||||
Shell::Zsh(zsh) => {
|
||||
if !std::path::Path::new(&zsh.zshrc_path).exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut result = vec![zsh.shell_path.clone(), "-c".to_string()];
|
||||
if let Ok(joined) = shlex::try_join(command.iter().map(|s| s.as_str())) {
|
||||
result.push(format!("source {} && ({joined})", zsh.zshrc_path));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
Shell::Unknown => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub async fn default_user_shell() -> Shell {
|
||||
use tokio::process::Command;
|
||||
use whoami;
|
||||
|
||||
let user = whoami::username();
|
||||
let home = format!("/Users/{user}");
|
||||
let output = Command::new("dscl")
|
||||
.args([".", "-read", &home, "UserShell"])
|
||||
.output()
|
||||
.await
|
||||
.ok();
|
||||
match output {
|
||||
Some(o) => {
|
||||
if !o.status.success() {
|
||||
return Shell::Unknown;
|
||||
}
|
||||
let stdout = String::from_utf8_lossy(&o.stdout);
|
||||
for line in stdout.lines() {
|
||||
if let Some(shell_path) = line.strip_prefix("UserShell: ") {
|
||||
if shell_path.ends_with("/zsh") {
|
||||
return Shell::Zsh(ZshShell {
|
||||
shell_path: shell_path.to_string(),
|
||||
zshrc_path: format!("{home}/.zshrc"),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Shell::Unknown
|
||||
}
|
||||
_ => Shell::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
pub async fn default_user_shell() -> Shell {
|
||||
Shell::Unknown
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(target_os = "macos")]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::process::Command;
|
||||
|
||||
#[tokio::test]
|
||||
#[expect(clippy::unwrap_used)]
|
||||
async fn test_current_shell_detects_zsh() {
|
||||
let shell = Command::new("sh")
|
||||
.arg("-c")
|
||||
.arg("echo $SHELL")
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
let home = std::env::var("HOME").unwrap();
|
||||
let shell_path = String::from_utf8_lossy(&shell.stdout).trim().to_string();
|
||||
if shell_path.ends_with("/zsh") {
|
||||
assert_eq!(
|
||||
default_user_shell().await,
|
||||
Shell::Zsh(ZshShell {
|
||||
shell_path: shell_path.to_string(),
|
||||
zshrc_path: format!("{home}/.zshrc",),
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_run_with_profile_zshrc_not_exists() {
|
||||
let shell = Shell::Zsh(ZshShell {
|
||||
shell_path: "/bin/zsh".to_string(),
|
||||
zshrc_path: "/does/not/exist/.zshrc".to_string(),
|
||||
});
|
||||
let actual_cmd = shell.format_default_shell_invocation(vec!["myecho".to_string()]);
|
||||
assert_eq!(actual_cmd, None);
|
||||
}
|
||||
|
||||
#[expect(clippy::unwrap_used)]
|
||||
#[tokio::test]
|
||||
async fn test_run_with_profile_escaping_and_execution() {
|
||||
let shell_path = "/bin/zsh";
|
||||
|
||||
let cases = vec![
|
||||
(
|
||||
vec!["myecho"],
|
||||
vec![shell_path, "-c", "source ZSHRC_PATH && (myecho)"],
|
||||
Some("It works!\n"),
|
||||
),
|
||||
(
|
||||
vec!["bash", "-lc", "echo 'single' \"double\""],
|
||||
vec![
|
||||
shell_path,
|
||||
"-c",
|
||||
"source ZSHRC_PATH && (bash -lc \"echo 'single' \\\"double\\\"\")",
|
||||
],
|
||||
Some("single double\n"),
|
||||
),
|
||||
];
|
||||
for (input, expected_cmd, expected_output) in cases {
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::Notify;
|
||||
|
||||
use crate::exec::ExecParams;
|
||||
use crate::exec::SandboxType;
|
||||
use crate::exec::process_exec_tool_call;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
|
||||
// create a temp directory with a zshrc file in it
|
||||
let temp_home = tempfile::tempdir().unwrap();
|
||||
let zshrc_path = temp_home.path().join(".zshrc");
|
||||
std::fs::write(
|
||||
&zshrc_path,
|
||||
r#"
|
||||
set -x
|
||||
function myecho {
|
||||
echo 'It works!'
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let shell = Shell::Zsh(ZshShell {
|
||||
shell_path: shell_path.to_string(),
|
||||
zshrc_path: zshrc_path.to_str().unwrap().to_string(),
|
||||
});
|
||||
|
||||
let actual_cmd = shell
|
||||
.format_default_shell_invocation(input.iter().map(|s| s.to_string()).collect());
|
||||
let expected_cmd = expected_cmd
|
||||
.iter()
|
||||
.map(|s| {
|
||||
s.replace("ZSHRC_PATH", zshrc_path.to_str().unwrap())
|
||||
.to_string()
|
||||
})
|
||||
.collect();
|
||||
|
||||
assert_eq!(actual_cmd, Some(expected_cmd));
|
||||
// Actually run the command and check output/exit code
|
||||
let output = process_exec_tool_call(
|
||||
ExecParams {
|
||||
command: actual_cmd.unwrap(),
|
||||
cwd: PathBuf::from(temp_home.path()),
|
||||
timeout_ms: None,
|
||||
env: HashMap::from([(
|
||||
"HOME".to_string(),
|
||||
temp_home.path().to_str().unwrap().to_string(),
|
||||
)]),
|
||||
},
|
||||
SandboxType::None,
|
||||
Arc::new(Notify::new()),
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
&None,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(output.exit_code, 0, "input: {input:?} output: {output:?}");
|
||||
if let Some(expected) = expected_output {
|
||||
assert_eq!(
|
||||
output.stdout, expected,
|
||||
"input: {input:?} output: {output:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -329,6 +329,7 @@ async fn integration_creates_and_checks_session_file() {
|
||||
.env("OPENAI_API_KEY", "dummy")
|
||||
.env("CODEX_RS_SSE_FIXTURE", &fixture)
|
||||
.env("OPENAI_BASE_URL", "http://unused.local");
|
||||
|
||||
let output2 = cmd2.output().unwrap();
|
||||
assert!(output2.status.success(), "resume codex-cli run failed");
|
||||
|
||||
@@ -359,3 +360,125 @@ async fn integration_creates_and_checks_session_file() {
|
||||
"rollout missing resumed marker"
|
||||
);
|
||||
}
|
||||
|
||||
/// Integration test to verify git info is collected and recorded in session files.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn integration_git_info_unit_test() {
|
||||
// This test verifies git info collection works independently
|
||||
// without depending on the full CLI integration
|
||||
|
||||
// 1. Create temp directory for git repo
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let git_repo = temp_dir.path().to_path_buf();
|
||||
|
||||
// 2. Initialize a git repository with some content
|
||||
let init_output = std::process::Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
assert!(init_output.status.success(), "git init failed");
|
||||
|
||||
// Configure git user (required for commits)
|
||||
std::process::Command::new("git")
|
||||
.args(["config", "user.name", "Integration Test"])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
std::process::Command::new("git")
|
||||
.args(["config", "user.email", "test@example.com"])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
// Create a test file and commit it
|
||||
let test_file = git_repo.join("test.txt");
|
||||
std::fs::write(&test_file, "integration test content").unwrap();
|
||||
|
||||
std::process::Command::new("git")
|
||||
.args(["add", "."])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
let commit_output = std::process::Command::new("git")
|
||||
.args(["commit", "-m", "Integration test commit"])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
assert!(commit_output.status.success(), "git commit failed");
|
||||
|
||||
// Create a branch to test branch detection
|
||||
std::process::Command::new("git")
|
||||
.args(["checkout", "-b", "integration-test-branch"])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
// Add a remote to test repository URL detection
|
||||
std::process::Command::new("git")
|
||||
.args([
|
||||
"remote",
|
||||
"add",
|
||||
"origin",
|
||||
"https://github.com/example/integration-test.git",
|
||||
])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
|
||||
// 3. Test git info collection directly
|
||||
let git_info = codex_core::git_info::collect_git_info(&git_repo).await;
|
||||
|
||||
// 4. Verify git info is present and contains expected data
|
||||
assert!(git_info.is_some(), "Git info should be collected");
|
||||
|
||||
let git_info = git_info.unwrap();
|
||||
|
||||
// Check that we have a commit hash
|
||||
assert!(
|
||||
git_info.commit_hash.is_some(),
|
||||
"Git info should contain commit_hash"
|
||||
);
|
||||
let commit_hash = git_info.commit_hash.as_ref().unwrap();
|
||||
assert_eq!(commit_hash.len(), 40, "Commit hash should be 40 characters");
|
||||
assert!(
|
||||
commit_hash.chars().all(|c| c.is_ascii_hexdigit()),
|
||||
"Commit hash should be hexadecimal"
|
||||
);
|
||||
|
||||
// Check that we have the correct branch
|
||||
assert!(git_info.branch.is_some(), "Git info should contain branch");
|
||||
let branch = git_info.branch.as_ref().unwrap();
|
||||
assert_eq!(
|
||||
branch, "integration-test-branch",
|
||||
"Branch should match what we created"
|
||||
);
|
||||
|
||||
// Check that we have the repository URL
|
||||
assert!(
|
||||
git_info.repository_url.is_some(),
|
||||
"Git info should contain repository_url"
|
||||
);
|
||||
let repo_url = git_info.repository_url.as_ref().unwrap();
|
||||
assert_eq!(
|
||||
repo_url, "https://github.com/example/integration-test.git",
|
||||
"Repository URL should match what we configured"
|
||||
);
|
||||
|
||||
println!("✅ Git info collection test passed!");
|
||||
println!(" Commit: {commit_hash}");
|
||||
println!(" Branch: {branch}");
|
||||
println!(" Repo: {repo_url}");
|
||||
|
||||
// 5. Test serialization to ensure it works in SessionMeta
|
||||
let serialized = serde_json::to_string(&git_info).unwrap();
|
||||
let deserialized: codex_core::git_info::GitInfo = serde_json::from_str(&serialized).unwrap();
|
||||
|
||||
assert_eq!(git_info.commit_hash, deserialized.commit_hash);
|
||||
assert_eq!(git_info.branch, deserialized.branch);
|
||||
assert_eq!(git_info.repository_url, deserialized.repository_url);
|
||||
|
||||
println!("✅ Git info serialization test passed!");
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use codex_core::Codex;
|
||||
use codex_core::CodexSpawnOk;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
mod test_support;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::wait_for_event;
|
||||
use tempfile::TempDir;
|
||||
use test_support::load_default_config_for_test;
|
||||
use test_support::load_sse_fixture_with_id;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
@@ -72,7 +73,7 @@ async fn includes_session_id_and_model_headers_in_request() {
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider = model_provider;
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let (codex, _init_id, _session_id) = Codex::spawn(config, ctrl_c.clone()).await.unwrap();
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(config, ctrl_c.clone()).await.unwrap();
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -84,14 +85,13 @@ async fn includes_session_id_and_model_headers_in_request() {
|
||||
.unwrap();
|
||||
|
||||
let EventMsg::SessionConfigured(SessionConfiguredEvent { session_id, .. }) =
|
||||
test_support::wait_for_event(&codex, |ev| matches!(ev, EventMsg::SessionConfigured(_)))
|
||||
.await
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::SessionConfigured(_))).await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
let current_session_id = Some(session_id.to_string());
|
||||
test_support::wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
// get request from the server
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
@@ -149,7 +149,7 @@ async fn includes_base_instructions_override_in_request() {
|
||||
config.model_provider = model_provider;
|
||||
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let (codex, ..) = Codex::spawn(config, ctrl_c.clone()).await.unwrap();
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(config, ctrl_c.clone()).await.unwrap();
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -160,7 +160,7 @@ async fn includes_base_instructions_override_in_request() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
test_support::wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let request_body = request.body_json::<serde_json::Value>().unwrap();
|
||||
|
||||
13
codex-rs/core/tests/common/Cargo.toml
Normal file
13
codex-rs/core/tests/common/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "core_test_support"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
codex-core = { path = "../.." }
|
||||
serde_json = "1"
|
||||
tempfile = "3"
|
||||
tokio = { version = "1", features = ["time"] }
|
||||
@@ -1,9 +1,5 @@
|
||||
#![allow(clippy::expect_used)]
|
||||
|
||||
// Helpers shared by the integration tests. These are located inside the
|
||||
// `tests/` tree on purpose so they never become part of the public API surface
|
||||
// of the `codex-core` crate.
|
||||
|
||||
use tempfile::TempDir;
|
||||
|
||||
use codex_core::config::Config;
|
||||
@@ -30,7 +26,6 @@ pub fn load_default_config_for_test(codex_home: &TempDir) -> Config {
|
||||
/// with only a `type` field results in an event with no `data:` section. This
|
||||
/// makes it trivial to extend the fixtures as OpenAI adds new event kinds or
|
||||
/// fields.
|
||||
#[allow(dead_code)]
|
||||
pub fn load_sse_fixture(path: impl AsRef<std::path::Path>) -> String {
|
||||
let events: Vec<serde_json::Value> =
|
||||
serde_json::from_reader(std::fs::File::open(path).expect("read fixture"))
|
||||
@@ -55,7 +50,6 @@ pub fn load_sse_fixture(path: impl AsRef<std::path::Path>) -> String {
|
||||
/// fixture template with the supplied identifier before parsing. This lets a
|
||||
/// single JSON template be reused by multiple tests that each need a unique
|
||||
/// `response_id`.
|
||||
#[allow(dead_code)]
|
||||
pub fn load_sse_fixture_with_id(path: impl AsRef<std::path::Path>, id: &str) -> String {
|
||||
let raw = std::fs::read_to_string(path).expect("read fixture template");
|
||||
let replaced = raw.replace("__ID__", id);
|
||||
@@ -77,7 +71,6 @@ pub fn load_sse_fixture_with_id(path: impl AsRef<std::path::Path>, id: &str) ->
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn wait_for_event<F>(
|
||||
codex: &codex_core::Codex,
|
||||
mut predicate: F,
|
||||
@@ -20,15 +20,15 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::CodexSpawnOk;
|
||||
use codex_core::error::CodexErr;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::ErrorEvent;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
mod test_support;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use tempfile::TempDir;
|
||||
use test_support::load_default_config_for_test;
|
||||
use tokio::sync::Notify;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -49,7 +49,7 @@ async fn spawn_codex() -> Result<Codex, CodexErr> {
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider.request_max_retries = Some(2);
|
||||
config.model_provider.stream_max_retries = Some(2);
|
||||
let (agent, _init_id, _session_id) =
|
||||
let CodexSpawnOk { codex: agent, .. } =
|
||||
Codex::spawn(config, std::sync::Arc::new(Notify::new())).await?;
|
||||
|
||||
Ok(agent)
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_core::protocol::ErrorEvent;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
mod test_support;
|
||||
use serde_json::Value;
|
||||
use tempfile::TempDir;
|
||||
use test_support::load_default_config_for_test;
|
||||
use test_support::load_sse_fixture_with_id;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::Match;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Request;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
/// Matcher asserting that JSON body has NO `previous_response_id` field.
|
||||
struct NoPrevId;
|
||||
|
||||
impl Match for NoPrevId {
|
||||
fn matches(&self, req: &Request) -> bool {
|
||||
serde_json::from_slice::<Value>(&req.body)
|
||||
.map(|v| v.get("previous_response_id").is_none())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// Matcher asserting that JSON body HAS a `previous_response_id` field.
|
||||
struct HasPrevId;
|
||||
|
||||
impl Match for HasPrevId {
|
||||
fn matches(&self, req: &Request) -> bool {
|
||||
serde_json::from_slice::<Value>(&req.body)
|
||||
.map(|v| v.get("previous_response_id").is_some())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// Build minimal SSE stream with completed marker using the JSON fixture.
|
||||
fn sse_completed(id: &str) -> String {
|
||||
load_sse_fixture_with_id("tests/fixtures/completed_template.json", id)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn keeps_previous_response_id_between_tasks() {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
|
||||
// First request – must NOT include `previous_response_id`.
|
||||
let first = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.and(NoPrevId)
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
// Second request – MUST include `previous_response_id`.
|
||||
let second = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp2"), "text/event-stream");
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.and(HasPrevId)
|
||||
.respond_with(second)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
// Configure retry behavior explicitly to avoid mutating process-wide
|
||||
// environment variables.
|
||||
let model_provider = ModelProviderInfo {
|
||||
name: "openai".into(),
|
||||
base_url: format!("{}/v1", server.uri()),
|
||||
// Environment variable that should exist in the test environment.
|
||||
// ModelClient will return an error if the environment variable for the
|
||||
// provider is not set.
|
||||
env_key: Some("PATH".into()),
|
||||
env_key_instructions: None,
|
||||
wire_api: codex_core::WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
// disable retries so we don't get duplicate calls in this test
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: None,
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider = model_provider;
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let (codex, _init_id, _session_id) = Codex::spawn(config, ctrl_c.clone()).await.unwrap();
|
||||
|
||||
// Task 1 – triggers first request (no previous_response_id)
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Wait for TaskComplete
|
||||
loop {
|
||||
let ev = timeout(Duration::from_secs(1), codex.next_event())
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
if matches!(ev.msg, EventMsg::TaskComplete(_)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Task 2 – should include `previous_response_id` (triggers second request)
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: "again".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Wait for TaskComplete or error
|
||||
loop {
|
||||
let ev = timeout(Duration::from_secs(1), codex.next_event())
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
match ev.msg {
|
||||
EventMsg::TaskComplete(_) => break,
|
||||
EventMsg::Error(ErrorEvent { message }) => {
|
||||
panic!("unexpected error: {message}")
|
||||
}
|
||||
_ => {
|
||||
// Ignore other events.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,16 +4,16 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::CodexSpawnOk;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
mod test_support;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use tempfile::TempDir;
|
||||
use test_support::load_default_config_for_test;
|
||||
use test_support::load_sse_fixture;
|
||||
use test_support::load_sse_fixture_with_id;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
@@ -95,7 +95,7 @@ async fn retries_on_early_close() {
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider = model_provider;
|
||||
let (codex, _init_id, _session_id) = Codex::spawn(config, ctrl_c).await.unwrap();
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(config, ctrl_c).await.unwrap();
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
|
||||
@@ -18,13 +18,13 @@ workspace = true
|
||||
anyhow = "1"
|
||||
chrono = "0.4.40"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-common = { path = "../common", features = [
|
||||
"cli",
|
||||
"elapsed",
|
||||
"sandbox_summary",
|
||||
] }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
owo-colors = "4.2.0"
|
||||
serde_json = "1"
|
||||
shlex = "1.3.0"
|
||||
@@ -37,3 +37,8 @@ tokio = { version = "1", features = [
|
||||
] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
predicates = "3"
|
||||
tempfile = "3.13.0"
|
||||
|
||||
@@ -1,15 +1,23 @@
|
||||
use std::path::Path;
|
||||
|
||||
use codex_common::summarize_sandbox_policy;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::model_supports_reasoning_summaries;
|
||||
use codex_core::protocol::Event;
|
||||
|
||||
pub(crate) enum CodexStatus {
|
||||
Running,
|
||||
InitiateShutdown,
|
||||
Shutdown,
|
||||
}
|
||||
|
||||
pub(crate) trait EventProcessor {
|
||||
/// Print summary of effective configuration and user prompt.
|
||||
fn print_config_summary(&mut self, config: &Config, prompt: &str);
|
||||
|
||||
/// Handle a single event emitted by the agent.
|
||||
fn process_event(&mut self, event: Event);
|
||||
fn process_event(&mut self, event: Event) -> CodexStatus;
|
||||
}
|
||||
|
||||
pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static str, String)> {
|
||||
@@ -17,7 +25,7 @@ pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static st
|
||||
("workdir", config.cwd.display().to_string()),
|
||||
("model", config.model.clone()),
|
||||
("provider", config.model_provider_id.clone()),
|
||||
("approval", format!("{:?}", config.approval_policy)),
|
||||
("approval", config.approval_policy.to_string()),
|
||||
("sandbox", summarize_sandbox_policy(&config.sandbox_policy)),
|
||||
];
|
||||
if config.model_provider.wire_api == WireApi::Responses
|
||||
@@ -35,3 +43,28 @@ pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static st
|
||||
|
||||
entries
|
||||
}
|
||||
|
||||
pub(crate) fn handle_last_message(
|
||||
last_agent_message: Option<&str>,
|
||||
last_message_path: Option<&Path>,
|
||||
) {
|
||||
match (last_message_path, last_agent_message) {
|
||||
(Some(path), Some(msg)) => write_last_message_file(msg, Some(path)),
|
||||
(Some(path), None) => {
|
||||
write_last_message_file("", Some(path));
|
||||
eprintln!(
|
||||
"Warning: no last agent message; wrote empty content to {}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
(None, _) => eprintln!("Warning: no file to write last message to."),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_last_message_file(contents: &str, last_message_path: Option<&Path>) {
|
||||
if let Some(path) = last_message_path {
|
||||
if let Err(e) = std::fs::write(path, contents) {
|
||||
eprintln!("Failed to write last message file {path:?}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,16 +15,20 @@ use codex_core::protocol::McpToolCallEndEvent;
|
||||
use codex_core::protocol::PatchApplyBeginEvent;
|
||||
use codex_core::protocol::PatchApplyEndEvent;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use owo_colors::OwoColorize;
|
||||
use owo_colors::Style;
|
||||
use shlex::try_join;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::event_processor::CodexStatus;
|
||||
use crate::event_processor::EventProcessor;
|
||||
use crate::event_processor::create_config_summary_entries;
|
||||
use crate::event_processor::handle_last_message;
|
||||
|
||||
/// This should be configurable. When used in CI, users may not want to impose
|
||||
/// a limit so they can see the full transcript.
|
||||
@@ -54,10 +58,15 @@ pub(crate) struct EventProcessorWithHumanOutput {
|
||||
show_agent_reasoning: bool,
|
||||
answer_started: bool,
|
||||
reasoning_started: bool,
|
||||
last_message_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl EventProcessorWithHumanOutput {
|
||||
pub(crate) fn create_with_ansi(with_ansi: bool, config: &Config) -> Self {
|
||||
pub(crate) fn create_with_ansi(
|
||||
with_ansi: bool,
|
||||
config: &Config,
|
||||
last_message_path: Option<PathBuf>,
|
||||
) -> Self {
|
||||
let call_id_to_command = HashMap::new();
|
||||
let call_id_to_patch = HashMap::new();
|
||||
let call_id_to_tool_call = HashMap::new();
|
||||
@@ -77,6 +86,7 @@ impl EventProcessorWithHumanOutput {
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
answer_started: false,
|
||||
reasoning_started: false,
|
||||
last_message_path,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
@@ -93,6 +103,7 @@ impl EventProcessorWithHumanOutput {
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
answer_started: false,
|
||||
reasoning_started: false,
|
||||
last_message_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -158,7 +169,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
);
|
||||
}
|
||||
|
||||
fn process_event(&mut self, event: Event) {
|
||||
fn process_event(&mut self, event: Event) -> CodexStatus {
|
||||
let Event { id: _, msg } = event;
|
||||
match msg {
|
||||
EventMsg::Error(ErrorEvent { message }) => {
|
||||
@@ -168,9 +179,16 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
EventMsg::BackgroundEvent(BackgroundEventEvent { message }) => {
|
||||
ts_println!(self, "{}", message.style(self.dimmed));
|
||||
}
|
||||
EventMsg::TaskStarted | EventMsg::TaskComplete(_) => {
|
||||
EventMsg::TaskStarted => {
|
||||
// Ignore.
|
||||
}
|
||||
EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
|
||||
handle_last_message(
|
||||
last_agent_message.as_deref(),
|
||||
self.last_message_path.as_deref(),
|
||||
);
|
||||
return CodexStatus::InitiateShutdown;
|
||||
}
|
||||
EventMsg::TokenCount(TokenUsage { total_tokens, .. }) => {
|
||||
ts_println!(self, "tokens used: {total_tokens}");
|
||||
}
|
||||
@@ -185,7 +203,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
}
|
||||
EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }) => {
|
||||
if !self.show_agent_reasoning {
|
||||
return;
|
||||
return CodexStatus::Running;
|
||||
}
|
||||
if !self.reasoning_started {
|
||||
ts_println!(
|
||||
@@ -498,7 +516,9 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
EventMsg::GetHistoryEntryResponse(_) => {
|
||||
// Currently ignored in exec output.
|
||||
}
|
||||
EventMsg::ShutdownComplete => return CodexStatus::Shutdown,
|
||||
}
|
||||
CodexStatus::Running
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,18 +1,24 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::event_processor::CodexStatus;
|
||||
use crate::event_processor::EventProcessor;
|
||||
use crate::event_processor::create_config_summary_entries;
|
||||
use crate::event_processor::handle_last_message;
|
||||
|
||||
pub(crate) struct EventProcessorWithJsonOutput;
|
||||
pub(crate) struct EventProcessorWithJsonOutput {
|
||||
last_message_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl EventProcessorWithJsonOutput {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
pub fn new(last_message_path: Option<PathBuf>) -> Self {
|
||||
Self { last_message_path }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,15 +39,25 @@ impl EventProcessor for EventProcessorWithJsonOutput {
|
||||
println!("{prompt_json}");
|
||||
}
|
||||
|
||||
fn process_event(&mut self, event: Event) {
|
||||
fn process_event(&mut self, event: Event) -> CodexStatus {
|
||||
match event.msg {
|
||||
EventMsg::AgentMessageDelta(_) | EventMsg::AgentReasoningDelta(_) => {
|
||||
// Suppress streaming events in JSON mode.
|
||||
CodexStatus::Running
|
||||
}
|
||||
EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
|
||||
handle_last_message(
|
||||
last_agent_message.as_deref(),
|
||||
self.last_message_path.as_deref(),
|
||||
);
|
||||
CodexStatus::InitiateShutdown
|
||||
}
|
||||
EventMsg::ShutdownComplete => CodexStatus::Shutdown,
|
||||
_ => {
|
||||
if let Ok(line) = serde_json::to_string(&event) {
|
||||
println!("{line}");
|
||||
}
|
||||
CodexStatus::Running
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,12 +5,12 @@ mod event_processor_with_json_output;
|
||||
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use cli::Cli;
|
||||
use codex_core::codex_wrapper;
|
||||
use codex_core::codex_wrapper::CodexConversation;
|
||||
use codex_core::codex_wrapper::{self};
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config_types::SandboxMode;
|
||||
@@ -28,6 +28,7 @@ use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::event_processor::CodexStatus;
|
||||
use crate::event_processor::EventProcessor;
|
||||
|
||||
pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
@@ -123,11 +124,12 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides)?;
|
||||
let mut event_processor: Box<dyn EventProcessor> = if json_mode {
|
||||
Box::new(EventProcessorWithJsonOutput::new())
|
||||
Box::new(EventProcessorWithJsonOutput::new(last_message_file.clone()))
|
||||
} else {
|
||||
Box::new(EventProcessorWithHumanOutput::create_with_ansi(
|
||||
stdout_with_ansi,
|
||||
&config,
|
||||
last_message_file.clone(),
|
||||
))
|
||||
};
|
||||
|
||||
@@ -154,9 +156,14 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
.with_writer(std::io::stderr)
|
||||
.try_init();
|
||||
|
||||
let (codex_wrapper, event, ctrl_c, _session_id) = codex_wrapper::init_codex(config).await?;
|
||||
let CodexConversation {
|
||||
codex: codex_wrapper,
|
||||
session_configured,
|
||||
ctrl_c,
|
||||
..
|
||||
} = codex_wrapper::init_codex(config).await?;
|
||||
let codex = Arc::new(codex_wrapper);
|
||||
info!("Codex initialized with event: {event:?}");
|
||||
info!("Codex initialized with event: {session_configured:?}");
|
||||
|
||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::<Event>();
|
||||
{
|
||||
@@ -224,40 +231,17 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
|
||||
// Run the loop until the task is complete.
|
||||
while let Some(event) = rx.recv().await {
|
||||
let (is_last_event, last_assistant_message) = match &event.msg {
|
||||
EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
|
||||
(true, last_agent_message.clone())
|
||||
let shutdown: CodexStatus = event_processor.process_event(event);
|
||||
match shutdown {
|
||||
CodexStatus::Running => continue,
|
||||
CodexStatus::InitiateShutdown => {
|
||||
codex.submit(Op::Shutdown).await?;
|
||||
}
|
||||
CodexStatus::Shutdown => {
|
||||
break;
|
||||
}
|
||||
_ => (false, None),
|
||||
};
|
||||
event_processor.process_event(event);
|
||||
if is_last_event {
|
||||
handle_last_message(last_assistant_message, last_message_file.as_deref())?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_last_message(
|
||||
last_agent_message: Option<String>,
|
||||
last_message_file: Option<&Path>,
|
||||
) -> std::io::Result<()> {
|
||||
match (last_agent_message, last_message_file) {
|
||||
(Some(last_agent_message), Some(last_message_file)) => {
|
||||
// Last message and a file to write to.
|
||||
std::fs::write(last_message_file, last_agent_message)?;
|
||||
}
|
||||
(None, Some(last_message_file)) => {
|
||||
eprintln!(
|
||||
"Warning: No last message to write to file: {}",
|
||||
last_message_file.to_string_lossy()
|
||||
);
|
||||
}
|
||||
(_, None) => {
|
||||
// No last message and no file to write to.
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
//! This allows us to ship a completely separate set of functionality as part
|
||||
//! of the `codex-exec` binary.
|
||||
use clap::Parser;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_exec::Cli;
|
||||
use codex_exec::run_main;
|
||||
@@ -24,7 +25,7 @@ struct TopCli {
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
codex_linux_sandbox::run_with_sandbox(|codex_linux_sandbox_exe| async move {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
let top_cli = TopCli::parse();
|
||||
// Merge root-level overrides into inner CLI struct so downstream logic remains unchanged.
|
||||
let mut inner = top_cli.inner;
|
||||
|
||||
38
codex-rs/exec/tests/apply_patch.rs
Normal file
38
codex-rs/exec/tests/apply_patch.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use tempfile::tempdir;
|
||||
|
||||
/// While we may add an `apply-patch` subcommand to the `codex` CLI multitool
|
||||
/// at some point, we must ensure that the smaller `codex-exec` CLI can still
|
||||
/// emulate the `apply_patch` CLI.
|
||||
#[test]
|
||||
fn test_standalone_exec_cli_can_use_apply_patch() -> anyhow::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let relative_path = "source.txt";
|
||||
let absolute_path = tmp.path().join(relative_path);
|
||||
fs::write(&absolute_path, "original content\n")?;
|
||||
|
||||
Command::cargo_bin("codex-exec")
|
||||
.context("should find binary for codex-exec")?
|
||||
.arg("--codex-run-as-apply-patch")
|
||||
.arg(
|
||||
r#"*** Begin Patch
|
||||
*** Update File: source.txt
|
||||
@@
|
||||
-original content
|
||||
+modified by apply_patch
|
||||
*** End Patch"#,
|
||||
)
|
||||
.current_dir(tmp.path())
|
||||
.assert()
|
||||
.success()
|
||||
.stdout("Success. Updated the following files:\nM source.txt\n")
|
||||
.stderr(predicates::str::is_empty());
|
||||
assert_eq!(
|
||||
fs::read_to_string(absolute_path)?,
|
||||
"modified by apply_patch\n"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
@@ -14,13 +14,16 @@ path = "src/lib.rs"
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
tokio = { version = "1", features = ["rt-multi-thread"] }
|
||||
libc = "0.2.172"
|
||||
landlock = "0.4.1"
|
||||
seccompiler = "0.5.0"
|
||||
|
||||
[dev-dependencies]
|
||||
[target.'cfg(target_os = "linux")'.dev-dependencies]
|
||||
tempfile = "3"
|
||||
tokio = { version = "1", features = [
|
||||
"io-std",
|
||||
@@ -29,8 +32,3 @@ tokio = { version = "1", features = [
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
libc = "0.2.172"
|
||||
landlock = "0.4.1"
|
||||
seccompiler = "0.5.0"
|
||||
|
||||
@@ -4,57 +4,8 @@ mod landlock;
|
||||
mod linux_run_main;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub use linux_run_main::run_main;
|
||||
|
||||
use std::future::Future;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Helper that consolidates the common boilerplate found in several Codex
|
||||
/// binaries (`codex`, `codex-exec`, `codex-tui`) around dispatching to the
|
||||
/// `codex-linux-sandbox` sub-command.
|
||||
///
|
||||
/// When the current executable is invoked through the hard-link or alias
|
||||
/// named `codex-linux-sandbox` we *directly* execute [`run_main`](crate::run_main)
|
||||
/// (which never returns). Otherwise we:
|
||||
/// 1. Construct a Tokio multi-thread runtime.
|
||||
/// 2. Derive the path to the current executable (so children can re-invoke
|
||||
/// the sandbox) when running on Linux.
|
||||
/// 3. Execute the provided async `main_fn` inside that runtime, forwarding
|
||||
/// any error.
|
||||
///
|
||||
/// This function eliminates duplicated code across the various `main.rs`
|
||||
/// entry-points.
|
||||
pub fn run_with_sandbox<F, Fut>(main_fn: F) -> anyhow::Result<()>
|
||||
where
|
||||
F: FnOnce(Option<PathBuf>) -> Fut,
|
||||
Fut: Future<Output = anyhow::Result<()>>,
|
||||
{
|
||||
use std::path::Path;
|
||||
|
||||
// Determine if we were invoked via the special alias.
|
||||
let argv0 = std::env::args().next().unwrap_or_default();
|
||||
let exe_name = Path::new(&argv0)
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if exe_name == "codex-linux-sandbox" {
|
||||
// Safety: [`run_main`] never returns.
|
||||
crate::run_main();
|
||||
}
|
||||
|
||||
// Regular invocation – create a Tokio runtime and execute the provided
|
||||
// async entry-point.
|
||||
let runtime = tokio::runtime::Runtime::new()?;
|
||||
runtime.block_on(async move {
|
||||
let codex_linux_sandbox_exe: Option<PathBuf> = if cfg!(target_os = "linux") {
|
||||
std::env::current_exe().ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
main_fn(codex_linux_sandbox_exe).await
|
||||
})
|
||||
pub fn run_main() -> ! {
|
||||
linux_run_main::run_main();
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
//! program. The utility connects, issues a `tools/list` request and prints the
|
||||
//! server's response as pretty JSON.
|
||||
|
||||
use std::ffi::OsString;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
@@ -37,7 +38,7 @@ async fn main() -> Result<()> {
|
||||
.try_init();
|
||||
|
||||
// Collect command-line arguments excluding the program name itself.
|
||||
let mut args: Vec<String> = std::env::args().skip(1).collect();
|
||||
let mut args: Vec<OsString> = std::env::args_os().skip(1).collect();
|
||||
|
||||
if args.is_empty() || args[0] == "--help" || args[0] == "-h" {
|
||||
eprintln!("Usage: mcp-client <program> [args..]\n\nExample: mcp-client codex-mcp-server");
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
//! issue requests and receive strongly-typed results.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsString;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
@@ -82,8 +83,8 @@ impl McpClient {
|
||||
/// Caller is responsible for sending the `initialize` request. See
|
||||
/// [`initialize`](Self::initialize) for details.
|
||||
pub async fn new_stdio_client(
|
||||
program: String,
|
||||
args: Vec<String>,
|
||||
program: OsString,
|
||||
args: Vec<OsString>,
|
||||
env: Option<HashMap<String, String>>,
|
||||
) -> std::io::Result<Self> {
|
||||
let mut child = Command::new(program)
|
||||
|
||||
@@ -16,8 +16,8 @@ workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
mcp-types = { path = "../mcp-types" }
|
||||
schemars = "0.8.22"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
@@ -37,6 +37,7 @@ uuid = { version = "1", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
mcp_test_support = { path = "tests/common" }
|
||||
pretty_assertions = "1.4.1"
|
||||
tempfile = "3"
|
||||
tokio-test = "0.4"
|
||||
|
||||
@@ -168,7 +168,7 @@ impl CodexToolCallParam {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct CodexToolCallReplyParam {
|
||||
pub struct CodexToolCallReplyParam {
|
||||
/// The *session id* for this conversation.
|
||||
pub session_id: String,
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::codex_wrapper::CodexConversation;
|
||||
use codex_core::codex_wrapper::init_codex;
|
||||
use codex_core::config::Config as CodexConfig;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
@@ -20,11 +21,13 @@ use mcp_types::CallToolResult;
|
||||
use mcp_types::ContentBlock;
|
||||
use mcp_types::RequestId;
|
||||
use mcp_types::TextContent;
|
||||
use serde_json::json;
|
||||
use tokio::sync::Mutex;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::exec_approval::handle_exec_approval_request;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::OutgoingNotificationMeta;
|
||||
use crate::patch_approval::handle_patch_approval_request;
|
||||
|
||||
pub(crate) const INVALID_PARAMS_ERROR_CODE: i64 = -32602;
|
||||
@@ -39,8 +42,14 @@ pub async fn run_codex_tool_session(
|
||||
config: CodexConfig,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
session_map: Arc<Mutex<HashMap<Uuid, Arc<Codex>>>>,
|
||||
running_requests_id_to_codex_uuid: Arc<Mutex<HashMap<RequestId, Uuid>>>,
|
||||
) {
|
||||
let (codex, first_event, _ctrl_c, session_id) = match init_codex(config).await {
|
||||
let CodexConversation {
|
||||
codex,
|
||||
session_configured,
|
||||
session_id,
|
||||
..
|
||||
} = match init_codex(config).await {
|
||||
Ok(res) => res,
|
||||
Err(e) => {
|
||||
let result = CallToolResult {
|
||||
@@ -63,8 +72,12 @@ pub async fn run_codex_tool_session(
|
||||
session_map.lock().await.insert(session_id, codex.clone());
|
||||
drop(session_map);
|
||||
|
||||
// Send initial SessionConfigured event.
|
||||
outgoing.send_event_as_notification(&first_event).await;
|
||||
outgoing
|
||||
.send_event_as_notification(
|
||||
&session_configured,
|
||||
Some(OutgoingNotificationMeta::new(Some(id.clone()))),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Use the original MCP request ID as the `sub_id` for the Codex submission so that
|
||||
// any events emitted for this tool-call can be correlated with the
|
||||
@@ -73,7 +86,10 @@ pub async fn run_codex_tool_session(
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(n) => n.to_string(),
|
||||
};
|
||||
|
||||
running_requests_id_to_codex_uuid
|
||||
.lock()
|
||||
.await
|
||||
.insert(id.clone(), session_id);
|
||||
let submission = Submission {
|
||||
id: sub_id.clone(),
|
||||
op: Op::UserInput {
|
||||
@@ -85,9 +101,12 @@ pub async fn run_codex_tool_session(
|
||||
|
||||
if let Err(e) = codex.submit_with_id(submission).await {
|
||||
tracing::error!("Failed to submit initial prompt: {e}");
|
||||
// unregister the id so we don't keep it in the map
|
||||
running_requests_id_to_codex_uuid.lock().await.remove(&id);
|
||||
return;
|
||||
}
|
||||
|
||||
run_codex_tool_session_inner(codex, outgoing, id).await;
|
||||
run_codex_tool_session_inner(codex, outgoing, id, running_requests_id_to_codex_uuid).await;
|
||||
}
|
||||
|
||||
pub async fn run_codex_tool_session_reply(
|
||||
@@ -95,7 +114,13 @@ pub async fn run_codex_tool_session_reply(
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
request_id: RequestId,
|
||||
prompt: String,
|
||||
running_requests_id_to_codex_uuid: Arc<Mutex<HashMap<RequestId, Uuid>>>,
|
||||
session_id: Uuid,
|
||||
) {
|
||||
running_requests_id_to_codex_uuid
|
||||
.lock()
|
||||
.await
|
||||
.insert(request_id.clone(), session_id);
|
||||
if let Err(e) = codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text { text: prompt }],
|
||||
@@ -103,15 +128,28 @@ pub async fn run_codex_tool_session_reply(
|
||||
.await
|
||||
{
|
||||
tracing::error!("Failed to submit user input: {e}");
|
||||
// unregister the id so we don't keep it in the map
|
||||
running_requests_id_to_codex_uuid
|
||||
.lock()
|
||||
.await
|
||||
.remove(&request_id);
|
||||
return;
|
||||
}
|
||||
|
||||
run_codex_tool_session_inner(codex, outgoing, request_id).await;
|
||||
run_codex_tool_session_inner(
|
||||
codex,
|
||||
outgoing,
|
||||
request_id,
|
||||
running_requests_id_to_codex_uuid,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn run_codex_tool_session_inner(
|
||||
codex: Arc<Codex>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
request_id: RequestId,
|
||||
running_requests_id_to_codex_uuid: Arc<Mutex<HashMap<RequestId, Uuid>>>,
|
||||
) {
|
||||
let request_id_str = match &request_id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
@@ -123,12 +161,18 @@ async fn run_codex_tool_session_inner(
|
||||
loop {
|
||||
match codex.next_event().await {
|
||||
Ok(event) => {
|
||||
outgoing.send_event_as_notification(&event).await;
|
||||
outgoing
|
||||
.send_event_as_notification(
|
||||
&event,
|
||||
Some(OutgoingNotificationMeta::new(Some(request_id.clone()))),
|
||||
)
|
||||
.await;
|
||||
|
||||
match event.msg {
|
||||
EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
command,
|
||||
cwd,
|
||||
call_id,
|
||||
reason: _,
|
||||
}) => {
|
||||
handle_exec_approval_request(
|
||||
@@ -139,16 +183,27 @@ async fn run_codex_tool_session_inner(
|
||||
request_id.clone(),
|
||||
request_id_str.clone(),
|
||||
event.id.clone(),
|
||||
call_id,
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
EventMsg::Error(err_event) => {
|
||||
// Return a response to conclude the tool call when the Codex session reports an error (e.g., interruption).
|
||||
let result = json!({
|
||||
"error": err_event.message,
|
||||
});
|
||||
outgoing.send_response(request_id.clone(), result).await;
|
||||
break;
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
}) => {
|
||||
handle_patch_approval_request(
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
@@ -178,6 +233,11 @@ async fn run_codex_tool_session_inner(
|
||||
outgoing
|
||||
.send_response(request_id.clone(), result.into())
|
||||
.await;
|
||||
// unregister the id so we don't keep it in the map
|
||||
running_requests_id_to_codex_uuid
|
||||
.lock()
|
||||
.await
|
||||
.remove(&request_id);
|
||||
break;
|
||||
}
|
||||
EventMsg::SessionConfigured(_) => {
|
||||
@@ -192,8 +252,7 @@ async fn run_codex_tool_session_inner(
|
||||
EventMsg::AgentMessage(AgentMessageEvent { .. }) => {
|
||||
// TODO: think how we want to support this in the MCP
|
||||
}
|
||||
EventMsg::Error(_)
|
||||
| EventMsg::TaskStarted
|
||||
EventMsg::TaskStarted
|
||||
| EventMsg::TokenCount(_)
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::McpToolCallBegin(_)
|
||||
@@ -203,7 +262,8 @@ async fn run_codex_tool_session_inner(
|
||||
| EventMsg::BackgroundEvent(_)
|
||||
| EventMsg::PatchApplyBegin(_)
|
||||
| EventMsg::PatchApplyEnd(_)
|
||||
| EventMsg::GetHistoryEntryResponse(_) => {
|
||||
| EventMsg::GetHistoryEntryResponse(_)
|
||||
| EventMsg::ShutdownComplete => {
|
||||
// For now, we do not do anything extra for these
|
||||
// events. Note that
|
||||
// send(codex_event_to_notification(&event)) above has
|
||||
|
||||
@@ -32,6 +32,7 @@ pub struct ExecApprovalElicitRequestParams {
|
||||
pub codex_elicitation: String,
|
||||
pub codex_mcp_tool_call_id: String,
|
||||
pub codex_event_id: String,
|
||||
pub codex_call_id: String,
|
||||
pub codex_command: Vec<String>,
|
||||
pub codex_cwd: PathBuf,
|
||||
}
|
||||
@@ -45,6 +46,7 @@ pub struct ExecApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) async fn handle_exec_approval_request(
|
||||
command: Vec<String>,
|
||||
cwd: PathBuf,
|
||||
@@ -53,6 +55,7 @@ pub(crate) async fn handle_exec_approval_request(
|
||||
request_id: RequestId,
|
||||
tool_call_id: String,
|
||||
event_id: String,
|
||||
call_id: String,
|
||||
) {
|
||||
let escaped_command =
|
||||
shlex::try_join(command.iter().map(|s| s.as_str())).unwrap_or_else(|_| command.join(" "));
|
||||
@@ -71,6 +74,7 @@ pub(crate) async fn handle_exec_approval_request(
|
||||
codex_elicitation: "exec-approval".to_string(),
|
||||
codex_mcp_tool_call_id: tool_call_id.clone(),
|
||||
codex_event_id: event_id.clone(),
|
||||
codex_call_id: call_id,
|
||||
codex_command: command,
|
||||
codex_cwd: cwd,
|
||||
};
|
||||
|
||||
@@ -13,6 +13,7 @@ use tokio::sync::mpsc;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
mod codex_tool_config;
|
||||
mod codex_tool_runner;
|
||||
@@ -27,6 +28,7 @@ use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
|
||||
pub use crate::codex_tool_config::CodexToolCallParam;
|
||||
pub use crate::codex_tool_config::CodexToolCallReplyParam;
|
||||
pub use crate::exec_approval::ExecApprovalElicitRequestParams;
|
||||
pub use crate::exec_approval::ExecApprovalResponse;
|
||||
pub use crate::patch_approval::PatchApprovalElicitRequestParams;
|
||||
@@ -42,6 +44,7 @@ pub async fn run_main(codex_linux_sandbox_exe: Option<PathBuf>) -> IoResult<()>
|
||||
// control the log level with `RUST_LOG`.
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
// Set up channels.
|
||||
@@ -81,7 +84,7 @@ pub async fn run_main(codex_linux_sandbox_exe: Option<PathBuf>) -> IoResult<()>
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n),
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_mcp_server::run_main;
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
codex_linux_sandbox::run_with_sandbox(|codex_linux_sandbox_exe| async move {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
run_main(codex_linux_sandbox_exe).await?;
|
||||
Ok(())
|
||||
})
|
||||
|
||||
@@ -10,6 +10,7 @@ use crate::outgoing_message::OutgoingMessageSender;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::config::Config as CodexConfig;
|
||||
use codex_core::protocol::Submission;
|
||||
use mcp_types::CallToolRequestParams;
|
||||
use mcp_types::CallToolResult;
|
||||
use mcp_types::ClientRequest;
|
||||
@@ -35,6 +36,7 @@ pub(crate) struct MessageProcessor {
|
||||
initialized: bool,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
session_map: Arc<Mutex<HashMap<Uuid, Arc<Codex>>>>,
|
||||
running_requests_id_to_codex_uuid: Arc<Mutex<HashMap<RequestId, Uuid>>>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
@@ -49,6 +51,7 @@ impl MessageProcessor {
|
||||
initialized: false,
|
||||
codex_linux_sandbox_exe,
|
||||
session_map: Arc::new(Mutex::new(HashMap::new())),
|
||||
running_requests_id_to_codex_uuid: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +119,7 @@ impl MessageProcessor {
|
||||
}
|
||||
|
||||
/// Handle a fire-and-forget JSON-RPC notification.
|
||||
pub(crate) fn process_notification(&mut self, notification: JSONRPCNotification) {
|
||||
pub(crate) async fn process_notification(&mut self, notification: JSONRPCNotification) {
|
||||
let server_notification = match ServerNotification::try_from(notification) {
|
||||
Ok(n) => n,
|
||||
Err(e) => {
|
||||
@@ -129,7 +132,7 @@ impl MessageProcessor {
|
||||
// handler so additional logic can be implemented incrementally.
|
||||
match server_notification {
|
||||
ServerNotification::CancelledNotification(params) => {
|
||||
self.handle_cancelled_notification(params);
|
||||
self.handle_cancelled_notification(params).await;
|
||||
}
|
||||
ServerNotification::ProgressNotification(params) => {
|
||||
self.handle_progress_notification(params);
|
||||
@@ -379,6 +382,7 @@ impl MessageProcessor {
|
||||
// Clone outgoing and session map to move into async task.
|
||||
let outgoing = self.outgoing.clone();
|
||||
let session_map = self.session_map.clone();
|
||||
let running_requests_id_to_codex_uuid = self.running_requests_id_to_codex_uuid.clone();
|
||||
|
||||
// Spawn an async task to handle the Codex session so that we do not
|
||||
// block the synchronous message-processing loop.
|
||||
@@ -390,6 +394,7 @@ impl MessageProcessor {
|
||||
config,
|
||||
outgoing,
|
||||
session_map,
|
||||
running_requests_id_to_codex_uuid,
|
||||
)
|
||||
.await;
|
||||
});
|
||||
@@ -464,13 +469,12 @@ impl MessageProcessor {
|
||||
|
||||
// Clone outgoing and session map to move into async task.
|
||||
let outgoing = self.outgoing.clone();
|
||||
let running_requests_id_to_codex_uuid = self.running_requests_id_to_codex_uuid.clone();
|
||||
|
||||
// Spawn an async task to handle the Codex session so that we do not
|
||||
// block the synchronous message-processing loop.
|
||||
task::spawn(async move {
|
||||
let codex = {
|
||||
let session_map = session_map_mutex.lock().await;
|
||||
let codex = match session_map.get(&session_id) {
|
||||
Some(codex) => codex,
|
||||
match session_map.get(&session_id).cloned() {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
tracing::warn!("Session not found for session_id: {session_id}");
|
||||
let result = CallToolResult {
|
||||
@@ -482,21 +486,32 @@ impl MessageProcessor {
|
||||
is_error: Some(true),
|
||||
structured_content: None,
|
||||
};
|
||||
// unwrap_or_default is fine here because we know the result is valid JSON
|
||||
outgoing
|
||||
.send_response(request_id, serde_json::to_value(result).unwrap_or_default())
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
crate::codex_tool_runner::run_codex_tool_session_reply(
|
||||
codex.clone(),
|
||||
outgoing,
|
||||
request_id,
|
||||
prompt.clone(),
|
||||
)
|
||||
.await;
|
||||
// Spawn the long-running reply handler.
|
||||
tokio::spawn({
|
||||
let codex = codex.clone();
|
||||
let outgoing = outgoing.clone();
|
||||
let prompt = prompt.clone();
|
||||
let running_requests_id_to_codex_uuid = running_requests_id_to_codex_uuid.clone();
|
||||
|
||||
async move {
|
||||
crate::codex_tool_runner::run_codex_tool_session_reply(
|
||||
codex,
|
||||
outgoing,
|
||||
request_id,
|
||||
prompt,
|
||||
running_requests_id_to_codex_uuid,
|
||||
session_id,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -518,11 +533,58 @@ impl MessageProcessor {
|
||||
// Notification handlers
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
fn handle_cancelled_notification(
|
||||
async fn handle_cancelled_notification(
|
||||
&self,
|
||||
params: <mcp_types::CancelledNotification as mcp_types::ModelContextProtocolNotification>::Params,
|
||||
) {
|
||||
tracing::info!("notifications/cancelled -> params: {:?}", params);
|
||||
let request_id = params.request_id;
|
||||
// Create a stable string form early for logging and submission id.
|
||||
let request_id_string = match &request_id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(i) => i.to_string(),
|
||||
};
|
||||
|
||||
// Obtain the session_id while holding the first lock, then release.
|
||||
let session_id = {
|
||||
let map_guard = self.running_requests_id_to_codex_uuid.lock().await;
|
||||
match map_guard.get(&request_id) {
|
||||
Some(id) => *id, // Uuid is Copy
|
||||
None => {
|
||||
tracing::warn!("Session not found for request_id: {}", request_id_string);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
tracing::info!("session_id: {session_id}");
|
||||
|
||||
// Obtain the Codex Arc while holding the session_map lock, then release.
|
||||
let codex_arc = {
|
||||
let sessions_guard = self.session_map.lock().await;
|
||||
match sessions_guard.get(&session_id) {
|
||||
Some(codex) => Arc::clone(codex),
|
||||
None => {
|
||||
tracing::warn!("Session not found for session_id: {session_id}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Submit interrupt to Codex.
|
||||
let err = codex_arc
|
||||
.submit_with_id(Submission {
|
||||
id: request_id_string,
|
||||
op: codex_core::protocol::Op::Interrupt,
|
||||
})
|
||||
.await;
|
||||
if let Err(e) = err {
|
||||
tracing::error!("Failed to submit interrupt to Codex: {e}");
|
||||
return;
|
||||
}
|
||||
// unregister the id so we don't keep it in the map
|
||||
self.running_requests_id_to_codex_uuid
|
||||
.lock()
|
||||
.await
|
||||
.remove(&request_id);
|
||||
}
|
||||
|
||||
fn handle_progress_notification(
|
||||
|
||||
@@ -18,6 +18,7 @@ use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::warn;
|
||||
|
||||
/// Sends messages to the client and manages request callbacks.
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_request_id: AtomicI64,
|
||||
sender: mpsc::Sender<OutgoingMessage>,
|
||||
@@ -78,16 +79,47 @@ impl OutgoingMessageSender {
|
||||
let _ = self.sender.send(outgoing_message).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn send_event_as_notification(&self, event: &Event) {
|
||||
#[expect(clippy::expect_used)]
|
||||
let params = Some(serde_json::to_value(event).expect("Event must serialize"));
|
||||
pub(crate) async fn send_event_as_notification(
|
||||
&self,
|
||||
event: &Event,
|
||||
meta: Option<OutgoingNotificationMeta>,
|
||||
) {
|
||||
#[allow(clippy::expect_used)]
|
||||
let event_json = serde_json::to_value(event).expect("Event must serialize");
|
||||
|
||||
let params = if let Ok(params) = serde_json::to_value(OutgoingNotificationParams {
|
||||
meta,
|
||||
event: event_json.clone(),
|
||||
}) {
|
||||
params
|
||||
} else {
|
||||
warn!("Failed to serialize event as OutgoingNotificationParams");
|
||||
event_json
|
||||
};
|
||||
|
||||
let outgoing_message = OutgoingMessage::Notification(OutgoingNotification {
|
||||
method: "codex/event".to_string(),
|
||||
params: Some(params.clone()),
|
||||
});
|
||||
let _ = self.sender.send(outgoing_message).await;
|
||||
|
||||
self.send_event_as_notification_new_schema(event, Some(params.clone()))
|
||||
.await;
|
||||
}
|
||||
|
||||
// should be backwards compatible.
|
||||
// it will replace send_event_as_notification eventually.
|
||||
async fn send_event_as_notification_new_schema(
|
||||
&self,
|
||||
event: &Event,
|
||||
params: Option<serde_json::Value>,
|
||||
) {
|
||||
let outgoing_message = OutgoingMessage::Notification(OutgoingNotification {
|
||||
method: event.msg.to_string(),
|
||||
params,
|
||||
});
|
||||
let _ = self.sender.send(outgoing_message).await;
|
||||
}
|
||||
|
||||
pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
let _ = self.sender.send(outgoing_message).await;
|
||||
@@ -152,6 +184,30 @@ pub(crate) struct OutgoingNotification {
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingNotificationParams {
|
||||
#[serde(rename = "_meta", default, skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<OutgoingNotificationMeta>,
|
||||
|
||||
#[serde(flatten)]
|
||||
pub event: serde_json::Value,
|
||||
}
|
||||
|
||||
// Additional mcp-specific data to be added to a [`codex_core::protocol::Event`] as notification.params._meta
|
||||
// MCP Spec: https://modelcontextprotocol.io/specification/2025-06-18/basic#meta
|
||||
// Typescript Schema: https://github.com/modelcontextprotocol/modelcontextprotocol/blob/0695a497eb50a804fc0e88c18a93a21a675d6b3e/schema/2025-06-18/schema.ts
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(crate) struct OutgoingNotificationMeta {
|
||||
pub request_id: Option<RequestId>,
|
||||
}
|
||||
|
||||
impl OutgoingNotificationMeta {
|
||||
pub(crate) fn new(request_id: Option<RequestId>) -> Self {
|
||||
Self { request_id }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingResponse {
|
||||
pub id: RequestId,
|
||||
@@ -163,3 +219,113 @@ pub(crate) struct OutgoingError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_event_as_notification() {
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingMessage>(2);
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
|
||||
let event = Event {
|
||||
id: "1".to_string(),
|
||||
msg: EventMsg::SessionConfigured(SessionConfiguredEvent {
|
||||
session_id: Uuid::new_v4(),
|
||||
model: "gpt-4o".to_string(),
|
||||
history_log_id: 1,
|
||||
history_entry_count: 1000,
|
||||
}),
|
||||
};
|
||||
|
||||
outgoing_message_sender
|
||||
.send_event_as_notification(&event, None)
|
||||
.await;
|
||||
|
||||
let result = outgoing_rx.recv().await.unwrap();
|
||||
let OutgoingMessage::Notification(OutgoingNotification { method, params }) = result else {
|
||||
panic!("expected Notification for first message");
|
||||
};
|
||||
assert_eq!(method, "codex/event");
|
||||
|
||||
let Ok(expected_params) = serde_json::to_value(&event) else {
|
||||
panic!("Event must serialize");
|
||||
};
|
||||
assert_eq!(params, Some(expected_params.clone()));
|
||||
|
||||
let result2 = outgoing_rx.recv().await.unwrap();
|
||||
let OutgoingMessage::Notification(OutgoingNotification {
|
||||
method: method2,
|
||||
params: params2,
|
||||
}) = result2
|
||||
else {
|
||||
panic!("expected Notification for second message");
|
||||
};
|
||||
assert_eq!(method2, event.msg.to_string());
|
||||
assert_eq!(params2, Some(expected_params));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_send_event_as_notification_with_meta() {
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingMessage>(2);
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
|
||||
let session_configured_event = SessionConfiguredEvent {
|
||||
session_id: Uuid::new_v4(),
|
||||
model: "gpt-4o".to_string(),
|
||||
history_log_id: 1,
|
||||
history_entry_count: 1000,
|
||||
};
|
||||
let event = Event {
|
||||
id: "1".to_string(),
|
||||
msg: EventMsg::SessionConfigured(session_configured_event.clone()),
|
||||
};
|
||||
let meta = OutgoingNotificationMeta {
|
||||
request_id: Some(RequestId::String("123".to_string())),
|
||||
};
|
||||
|
||||
outgoing_message_sender
|
||||
.send_event_as_notification(&event, Some(meta))
|
||||
.await;
|
||||
|
||||
let result = outgoing_rx.recv().await.unwrap();
|
||||
let OutgoingMessage::Notification(OutgoingNotification { method, params }) = result else {
|
||||
panic!("expected Notification for first message");
|
||||
};
|
||||
assert_eq!(method, "codex/event");
|
||||
let expected_params = json!({
|
||||
"_meta": {
|
||||
"requestId": "123",
|
||||
},
|
||||
"id": "1",
|
||||
"msg": {
|
||||
"session_id": session_configured_event.session_id,
|
||||
"model": session_configured_event.model,
|
||||
"history_log_id": session_configured_event.history_log_id,
|
||||
"history_entry_count": session_configured_event.history_entry_count,
|
||||
"type": "session_configured",
|
||||
}
|
||||
});
|
||||
assert_eq!(params.unwrap(), expected_params);
|
||||
|
||||
let result2 = outgoing_rx.recv().await.unwrap();
|
||||
let OutgoingMessage::Notification(OutgoingNotification {
|
||||
method: method2,
|
||||
params: params2,
|
||||
}) = result2
|
||||
else {
|
||||
panic!("expected Notification for second message");
|
||||
};
|
||||
assert_eq!(method2, event.msg.to_string());
|
||||
assert_eq!(params2.unwrap(), expected_params);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ pub struct PatchApprovalElicitRequestParams {
|
||||
pub codex_elicitation: String,
|
||||
pub codex_mcp_tool_call_id: String,
|
||||
pub codex_event_id: String,
|
||||
pub codex_call_id: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub codex_reason: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -41,6 +42,7 @@ pub struct PatchApprovalResponse {
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) async fn handle_patch_approval_request(
|
||||
call_id: String,
|
||||
reason: Option<String>,
|
||||
grant_root: Option<PathBuf>,
|
||||
changes: HashMap<PathBuf, FileChange>,
|
||||
@@ -66,6 +68,7 @@ pub(crate) async fn handle_patch_approval_request(
|
||||
codex_elicitation: "patch-approval".to_string(),
|
||||
codex_mcp_tool_call_id: tool_call_id.clone(),
|
||||
codex_event_id: event_id.clone(),
|
||||
codex_call_id: call_id,
|
||||
codex_reason: reason,
|
||||
codex_grant_root: grant_root,
|
||||
codex_changes: changes,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
mod common;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
@@ -26,11 +24,11 @@ use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::MockServer;
|
||||
|
||||
use crate::common::McpProcess;
|
||||
use crate::common::create_apply_patch_sse_response;
|
||||
use crate::common::create_final_assistant_message_sse_response;
|
||||
use crate::common::create_mock_chat_completions_server;
|
||||
use crate::common::create_shell_sse_response;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_apply_patch_sse_response;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::create_shell_sse_response;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
@@ -171,6 +169,7 @@ fn create_expected_elicitation_request(
|
||||
codex_event_id,
|
||||
codex_command: command,
|
||||
codex_cwd: workdir.to_path_buf(),
|
||||
codex_call_id: "call1234".to_string(),
|
||||
})?),
|
||||
})
|
||||
}
|
||||
@@ -384,6 +383,7 @@ fn create_expected_patch_approval_elicitation_request(
|
||||
codex_reason: reason,
|
||||
codex_grant_root: grant_root,
|
||||
codex_changes: changes,
|
||||
codex_call_id: "call1234".to_string(),
|
||||
})?),
|
||||
})
|
||||
}
|
||||
|
||||
24
codex-rs/mcp-server/tests/common/Cargo.toml
Normal file
24
codex-rs/mcp-server/tests/common/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "mcp_test_support"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
assert_cmd = "2"
|
||||
codex-mcp-server = { path = "../.." }
|
||||
mcp-types = { path = "../../../mcp-types" }
|
||||
pretty_assertions = "1.4.1"
|
||||
serde_json = "1"
|
||||
shlex = "1.3.0"
|
||||
tempfile = "3"
|
||||
tokio = { version = "1", features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
wiremock = "0.6"
|
||||
@@ -12,6 +12,7 @@ use tokio::process::ChildStdout;
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_mcp_server::CodexToolCallParam;
|
||||
use codex_mcp_server::CodexToolCallReplyParam;
|
||||
use mcp_types::CallToolRequestParams;
|
||||
use mcp_types::ClientCapabilities;
|
||||
use mcp_types::Implementation;
|
||||
@@ -154,6 +155,25 @@ impl McpProcess {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_codex_reply_tool_call(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
prompt: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let codex_tool_call_params = CallToolRequestParams {
|
||||
name: "codex-reply".to_string(),
|
||||
arguments: Some(serde_json::to_value(CodexToolCallReplyParam {
|
||||
prompt: prompt.to_string(),
|
||||
session_id: session_id.to_string(),
|
||||
})?),
|
||||
};
|
||||
self.send_request(
|
||||
mcp_types::CallToolRequest::METHOD,
|
||||
Some(serde_json::to_value(codex_tool_call_params)?),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn send_request(
|
||||
&mut self,
|
||||
method: &str,
|
||||
@@ -198,7 +218,6 @@ impl McpProcess {
|
||||
let message = serde_json::from_str::<JSONRPCMessage>(&line)?;
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<JSONRPCRequest> {
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
@@ -247,4 +266,78 @@ impl McpProcess {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_configured_response_message(
|
||||
&mut self,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut sid_old: Option<String> = None;
|
||||
let mut sid_new: Option<String> = None;
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
eprint!("message: {message:?}");
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if let Some(params) = notification.params {
|
||||
// Back-compat schema: method == "codex/event" and msg.type == "session_configured"
|
||||
if notification.method == "codex/event" {
|
||||
if let Some(msg) = params.get("msg") {
|
||||
if msg.get("type").and_then(|v| v.as_str())
|
||||
== Some("session_configured")
|
||||
{
|
||||
if let Some(session_id) =
|
||||
msg.get("session_id").and_then(|v| v.as_str())
|
||||
{
|
||||
sid_old = Some(session_id.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// New schema: method is the Display of EventMsg::SessionConfigured => "SessionConfigured"
|
||||
if notification.method == "session_configured" {
|
||||
if let Some(msg) = params.get("msg") {
|
||||
if let Some(session_id) =
|
||||
msg.get("session_id").and_then(|v| v.as_str())
|
||||
{
|
||||
sid_new = Some(session_id.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if sid_old.is_some() && sid_new.is_some() {
|
||||
// Both seen, they must match
|
||||
assert_eq!(
|
||||
sid_old.as_ref().unwrap(),
|
||||
sid_new.as_ref().unwrap(),
|
||||
"session_id mismatch between old and new schema"
|
||||
);
|
||||
return Ok(sid_old.unwrap());
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
&mut self,
|
||||
method: &str,
|
||||
params: Option<serde_json::Value>,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method: method.to_string(),
|
||||
params,
|
||||
}))
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
176
codex-rs/mcp-server/tests/interrupt.rs
Normal file
176
codex-rs/mcp-server/tests/interrupt.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
#![cfg(unix)]
|
||||
// Support code lives in the `mcp_test_support` crate under tests/common.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_mcp_server::CodexToolCallParam;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::create_shell_sse_response;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_shell_command_interruption() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Err(err) = shell_command_interruption().await {
|
||||
panic!("failure: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
// Use a cross-platform blocking command. On Windows plain `sleep` is not guaranteed to exist
|
||||
// (MSYS/GNU coreutils may be absent) and the failure causes the tool call to finish immediately,
|
||||
// which triggers a second model request before the test sends the explicit follow-up. That
|
||||
// prematurely consumes the second mocked SSE response and leads to a third POST (panic: no response for 2).
|
||||
// Powershell Start-Sleep is always available on Windows runners. On Unix we keep using `sleep`.
|
||||
#[cfg(target_os = "windows")]
|
||||
let shell_command = vec![
|
||||
"powershell".to_string(),
|
||||
"-Command".to_string(),
|
||||
"Start-Sleep -Seconds 60".to_string(),
|
||||
];
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let shell_command = vec!["sleep".to_string(), "60".to_string()];
|
||||
let workdir_for_shell_function_call = TempDir::new()?;
|
||||
|
||||
// Create mock server with a single SSE response: the long sleep command
|
||||
let server = create_mock_chat_completions_server(vec![
|
||||
create_shell_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(workdir_for_shell_function_call.path()),
|
||||
Some(60_000), // 60 seconds timeout in ms
|
||||
"call_sleep",
|
||||
)?,
|
||||
create_shell_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(workdir_for_shell_function_call.path()),
|
||||
Some(60_000), // 60 seconds timeout in ms
|
||||
"call_sleep",
|
||||
)?,
|
||||
])
|
||||
.await;
|
||||
|
||||
// Create Codex configuration
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), server.uri())?;
|
||||
let mut mcp_process = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp_process.initialize()).await??;
|
||||
|
||||
// Send codex tool call that triggers "sleep 60"
|
||||
let codex_request_id = mcp_process
|
||||
.send_codex_tool_call(CodexToolCallParam {
|
||||
cwd: None,
|
||||
prompt: "First Run: run `sleep 60`".to_string(),
|
||||
model: None,
|
||||
profile: None,
|
||||
approval_policy: None,
|
||||
sandbox: None,
|
||||
config: None,
|
||||
base_instructions: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let session_id = mcp_process
|
||||
.read_stream_until_configured_response_message()
|
||||
.await?;
|
||||
|
||||
// Give the command a moment to start
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
|
||||
// Send interrupt notification
|
||||
mcp_process
|
||||
.send_notification(
|
||||
"notifications/cancelled",
|
||||
Some(json!({ "requestId": codex_request_id })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Expect Codex to return an error or interruption response
|
||||
let codex_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp_process.read_stream_until_response_message(RequestId::Integer(codex_request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert!(
|
||||
codex_response
|
||||
.result
|
||||
.as_object()
|
||||
.map(|o| o.contains_key("error"))
|
||||
.unwrap_or(false),
|
||||
"Expected an interruption or error result, got: {codex_response:?}"
|
||||
);
|
||||
|
||||
let codex_reply_request_id = mcp_process
|
||||
.send_codex_reply_tool_call(&session_id, "Second Run: run `sleep 60`")
|
||||
.await?;
|
||||
|
||||
// Give the command a moment to start
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
|
||||
// Send interrupt notification
|
||||
mcp_process
|
||||
.send_notification(
|
||||
"notifications/cancelled",
|
||||
Some(json!({ "requestId": codex_reply_request_id })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Expect Codex to return an error or interruption response
|
||||
let codex_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp_process.read_stream_until_response_message(RequestId::Integer(codex_reply_request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert!(
|
||||
codex_response
|
||||
.result
|
||||
.as_object()
|
||||
.map(|o| o.contains_key("error"))
|
||||
.unwrap_or(false),
|
||||
"Expected an interruption or error result, got: {codex_response:?}"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: String) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -19,6 +19,7 @@ anyhow = "1"
|
||||
base64 = "0.22.1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-ansi-escape = { path = "../ansi-escape" }
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-common = { path = "../common", features = [
|
||||
"cli",
|
||||
@@ -26,7 +27,6 @@ codex-common = { path = "../common", features = [
|
||||
"sandbox_summary",
|
||||
] }
|
||||
codex-file-search = { path = "../file-search" }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
codex-login = { path = "../login" }
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = { version = "0.28.1", features = ["bracketed-paste"] }
|
||||
@@ -35,15 +35,16 @@ lazy_static = "1"
|
||||
mcp-types = { path = "../mcp-types" }
|
||||
path-clean = "1.0.1"
|
||||
ratatui = { version = "0.29.0", features = [
|
||||
"unstable-widget-ref",
|
||||
"scrolling-regions",
|
||||
"unstable-rendered-line-info",
|
||||
"unstable-widget-ref",
|
||||
] }
|
||||
ratatui-image = "8.0.0"
|
||||
regex-lite = "0.1"
|
||||
serde_json = { version = "1", features = ["preserve_order"] }
|
||||
shlex = "1.3.0"
|
||||
strum = "0.27.1"
|
||||
strum_macros = "0.27.1"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
tokio = { version = "1", features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
@@ -58,7 +59,10 @@ tui-input = "0.14.0"
|
||||
tui-markdown = "0.3.3"
|
||||
tui-textarea = "0.7.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.1"
|
||||
uuid = "1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "1.43.1"
|
||||
|
||||
@@ -6,7 +6,6 @@ use crate::get_git_diff::get_git_diff;
|
||||
use crate::git_warning_screen::GitWarningOutcome;
|
||||
use crate::git_warning_screen::GitWarningScreen;
|
||||
use crate::login_screen::LoginScreen;
|
||||
use crate::mouse_capture::MouseCapture;
|
||||
use crate::scroll_event_helper::ScrollEventHelper;
|
||||
use crate::slash_command::SlashCommand;
|
||||
use crate::tui;
|
||||
@@ -89,32 +88,51 @@ impl App<'_> {
|
||||
{
|
||||
let app_event_tx = app_event_tx.clone();
|
||||
std::thread::spawn(move || {
|
||||
while let Ok(event) = crossterm::event::read() {
|
||||
match event {
|
||||
crossterm::event::Event::Key(key_event) => {
|
||||
app_event_tx.send(AppEvent::KeyEvent(key_event));
|
||||
}
|
||||
crossterm::event::Event::Resize(_, _) => {
|
||||
app_event_tx.send(AppEvent::RequestRedraw);
|
||||
}
|
||||
crossterm::event::Event::Mouse(MouseEvent {
|
||||
kind: MouseEventKind::ScrollUp,
|
||||
..
|
||||
}) => {
|
||||
scroll_event_helper.scroll_up();
|
||||
}
|
||||
crossterm::event::Event::Mouse(MouseEvent {
|
||||
kind: MouseEventKind::ScrollDown,
|
||||
..
|
||||
}) => {
|
||||
scroll_event_helper.scroll_down();
|
||||
}
|
||||
crossterm::event::Event::Paste(pasted) => {
|
||||
app_event_tx.send(AppEvent::Paste(pasted));
|
||||
}
|
||||
_ => {
|
||||
// Ignore any other events.
|
||||
loop {
|
||||
// This timeout is necessary to avoid holding the event lock
|
||||
// that crossterm::event::read() acquires. In particular,
|
||||
// reading the cursor position (crossterm::cursor::position())
|
||||
// needs to acquire the event lock, and so will fail if it
|
||||
// can't acquire it within 2 sec. Resizing the terminal
|
||||
// crashes the app if the cursor position can't be read.
|
||||
if let Ok(true) = crossterm::event::poll(Duration::from_millis(100)) {
|
||||
if let Ok(event) = crossterm::event::read() {
|
||||
match event {
|
||||
crossterm::event::Event::Key(key_event) => {
|
||||
app_event_tx.send(AppEvent::KeyEvent(key_event));
|
||||
}
|
||||
crossterm::event::Event::Resize(_, _) => {
|
||||
app_event_tx.send(AppEvent::RequestRedraw);
|
||||
}
|
||||
crossterm::event::Event::Mouse(MouseEvent {
|
||||
kind: MouseEventKind::ScrollUp,
|
||||
..
|
||||
}) => {
|
||||
scroll_event_helper.scroll_up();
|
||||
}
|
||||
crossterm::event::Event::Mouse(MouseEvent {
|
||||
kind: MouseEventKind::ScrollDown,
|
||||
..
|
||||
}) => {
|
||||
scroll_event_helper.scroll_down();
|
||||
}
|
||||
crossterm::event::Event::Paste(pasted) => {
|
||||
// Many terminals convert newlines to \r when
|
||||
// pasting, e.g. [iTerm2][]. But [tui-textarea
|
||||
// expects \n][tui-textarea]. This seems like a bug
|
||||
// in tui-textarea IMO, but work around it for now.
|
||||
// [tui-textarea]: https://github.com/rhysd/tui-textarea/blob/4d18622eeac13b309e0ff6a55a46ac6706da68cf/src/textarea.rs#L782-L783
|
||||
// [iTerm2]: https://github.com/gnachman/iTerm2/blob/5d0c0d9f68523cbd0494dad5422998964a2ecd8d/sources/iTermPasteHelper.m#L206-L216
|
||||
let pasted = pasted.replace("\r", "\n");
|
||||
app_event_tx.send(AppEvent::Paste(pasted));
|
||||
}
|
||||
_ => {
|
||||
// Ignore any other events.
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Timeout expired, no `Event` is available
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -197,17 +215,17 @@ impl App<'_> {
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn run(
|
||||
&mut self,
|
||||
terminal: &mut tui::Tui,
|
||||
mouse_capture: &mut MouseCapture,
|
||||
) -> Result<()> {
|
||||
pub(crate) fn run(&mut self, terminal: &mut tui::Tui) -> Result<()> {
|
||||
// Insert an event to trigger the first render.
|
||||
let app_event_tx = self.app_event_tx.clone();
|
||||
app_event_tx.send(AppEvent::RequestRedraw);
|
||||
|
||||
while let Ok(event) = self.app_event_rx.recv() {
|
||||
match event {
|
||||
AppEvent::InsertHistory(lines) => {
|
||||
crate::insert_history::insert_history_lines(terminal, lines);
|
||||
self.app_event_tx.send(AppEvent::RequestRedraw);
|
||||
}
|
||||
AppEvent::RequestRedraw => {
|
||||
self.schedule_redraw();
|
||||
}
|
||||
@@ -223,9 +241,7 @@ impl App<'_> {
|
||||
} => {
|
||||
match &mut self.app_state {
|
||||
AppState::Chat { widget } => {
|
||||
if widget.on_ctrl_c() {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
widget.on_ctrl_c();
|
||||
}
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {
|
||||
// No-op.
|
||||
@@ -289,11 +305,6 @@ impl App<'_> {
|
||||
self.app_state = AppState::Chat { widget: new_widget };
|
||||
self.app_event_tx.send(AppEvent::RequestRedraw);
|
||||
}
|
||||
SlashCommand::ToggleMouseMode => {
|
||||
if let Err(e) = mouse_capture.toggle() {
|
||||
tracing::error!("Failed to toggle mouse mode: {e}");
|
||||
}
|
||||
}
|
||||
SlashCommand::Quit => {
|
||||
break;
|
||||
}
|
||||
@@ -318,6 +329,11 @@ impl App<'_> {
|
||||
widget.add_diff_output(text);
|
||||
}
|
||||
}
|
||||
SlashCommand::Compact => {
|
||||
if let AppState::Chat { widget } = &mut self.app_state {
|
||||
widget.start_compact();
|
||||
}
|
||||
}
|
||||
},
|
||||
AppEvent::StartFileSearch(query) => {
|
||||
self.file_search.on_user_query(query);
|
||||
@@ -327,6 +343,11 @@ impl App<'_> {
|
||||
widget.apply_file_search_result(query, matches);
|
||||
}
|
||||
}
|
||||
AppEvent::CompactComplete(result) => {
|
||||
if let AppState::Chat { widget } = &mut self.app_state {
|
||||
widget.apply_compact_summary(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
terminal.clear()?;
|
||||
@@ -334,6 +355,15 @@ impl App<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn token_usage(&self) -> codex_core::protocol::TokenUsage {
|
||||
match &self.app_state {
|
||||
AppState::Chat { widget } => widget.token_usage().clone(),
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {
|
||||
codex_core::protocol::TokenUsage::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn draw_next_frame(&mut self, terminal: &mut tui::Tui) -> Result<()> {
|
||||
// TODO: add a throttle to avoid redrawing too often
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use codex_core::protocol::Event;
|
||||
use codex_file_search::FileMatch;
|
||||
use crossterm::event::KeyEvent;
|
||||
use ratatui::text::Line;
|
||||
|
||||
use crate::slash_command::SlashCommand;
|
||||
|
||||
@@ -49,4 +50,11 @@ pub(crate) enum AppEvent {
|
||||
query: String,
|
||||
matches: Vec<FileMatch>,
|
||||
},
|
||||
|
||||
/// Result of the asynchronous `/compact` summarization.
|
||||
CompactComplete(Result<String, String>),
|
||||
|
||||
/// Insert the most recently appended history entry directly into the
|
||||
/// terminal scrollback. Carries already formatted lines.
|
||||
InsertHistory(Vec<Line<'static>>),
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use crate::user_approval_widget::UserApprovalWidget;
|
||||
|
||||
use super::BottomPane;
|
||||
use super::BottomPaneView;
|
||||
use super::CancellationEvent;
|
||||
|
||||
/// Modal overlay asking the user to approve/deny a sequence of requests.
|
||||
pub(crate) struct ApprovalModalView<'a> {
|
||||
@@ -46,12 +47,14 @@ impl<'a> BottomPaneView<'a> for ApprovalModalView<'a> {
|
||||
self.maybe_advance();
|
||||
}
|
||||
|
||||
fn is_complete(&self) -> bool {
|
||||
self.current.is_complete() && self.queue.is_empty()
|
||||
fn on_ctrl_c(&mut self, _pane: &mut BottomPane<'a>) -> CancellationEvent {
|
||||
self.current.on_ctrl_c();
|
||||
self.queue.clear();
|
||||
CancellationEvent::Handled
|
||||
}
|
||||
|
||||
fn calculate_required_height(&self, area: &Rect) -> u16 {
|
||||
self.current.get_height(area)
|
||||
fn is_complete(&self) -> bool {
|
||||
self.current.is_complete() && self.queue.is_empty()
|
||||
}
|
||||
|
||||
fn render(&self, area: Rect, buf: &mut Buffer) {
|
||||
@@ -63,3 +66,39 @@ impl<'a> BottomPaneView<'a> for ApprovalModalView<'a> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::app_event::AppEvent;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
fn make_exec_request() -> ApprovalRequest {
|
||||
ApprovalRequest::Exec {
|
||||
id: "test".to_string(),
|
||||
command: vec!["echo".to_string(), "hi".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ctrl_c_aborts_and_clears_queue() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let first = make_exec_request();
|
||||
let mut view = ApprovalModalView::new(first, tx);
|
||||
view.enqueue_request(make_exec_request());
|
||||
|
||||
let (tx_raw2, _rx2) = channel::<AppEvent>();
|
||||
let mut pane = BottomPane::new(super::super::BottomPaneParams {
|
||||
app_event_tx: AppEventSender::new(tx_raw2),
|
||||
has_input_focus: true,
|
||||
});
|
||||
assert_eq!(CancellationEvent::Handled, view.on_ctrl_c(&mut pane));
|
||||
assert!(view.queue.is_empty());
|
||||
assert!(view.current.is_complete());
|
||||
assert!(view.is_complete());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
|
||||
use super::BottomPane;
|
||||
use super::CancellationEvent;
|
||||
|
||||
/// Type to use for a method that may require a redraw of the UI.
|
||||
pub(crate) enum ConditionalUpdate {
|
||||
@@ -22,8 +23,10 @@ pub(crate) trait BottomPaneView<'a> {
|
||||
false
|
||||
}
|
||||
|
||||
/// Height required to render the view.
|
||||
fn calculate_required_height(&self, area: &Rect) -> u16;
|
||||
/// Handle Ctrl-C while this view is active.
|
||||
fn on_ctrl_c(&mut self, _pane: &mut BottomPane<'a>) -> CancellationEvent {
|
||||
CancellationEvent::Ignored
|
||||
}
|
||||
|
||||
/// Render the view: this will be displayed in place of the composer.
|
||||
fn render(&self, area: Rect, buf: &mut Buffer);
|
||||
|
||||
@@ -22,11 +22,6 @@ use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use codex_file_search::FileMatch;
|
||||
|
||||
/// Minimum number of visible text rows inside the textarea.
|
||||
const MIN_TEXTAREA_ROWS: usize = 1;
|
||||
/// Rows consumed by the border.
|
||||
const BORDER_LINES: u16 = 2;
|
||||
|
||||
const BASE_PLACEHOLDER_TEXT: &str = "send a message";
|
||||
/// If the pasted content exceeds this number of characters, replace it with a
|
||||
/// placeholder in the UI.
|
||||
@@ -132,10 +127,6 @@ impl ChatComposer<'_> {
|
||||
.on_entry_response(log_id, offset, entry, &mut self.textarea)
|
||||
}
|
||||
|
||||
pub fn set_input_focus(&mut self, has_focus: bool) {
|
||||
self.update_border(has_focus);
|
||||
}
|
||||
|
||||
pub fn handle_paste(&mut self, pasted: String) -> bool {
|
||||
let char_count = pasted.chars().count();
|
||||
if char_count > LARGE_PASTE_CHAR_THRESHOLD {
|
||||
@@ -609,17 +600,6 @@ impl ChatComposer<'_> {
|
||||
self.dismissed_file_popup_token = None;
|
||||
}
|
||||
|
||||
pub fn calculate_required_height(&self, area: &Rect) -> u16 {
|
||||
let rows = self.textarea.lines().len().max(MIN_TEXTAREA_ROWS);
|
||||
let num_popup_rows = match &self.active_popup {
|
||||
ActivePopup::Command(popup) => popup.calculate_required_height(area),
|
||||
ActivePopup::File(popup) => popup.calculate_required_height(area),
|
||||
ActivePopup::None => 0,
|
||||
};
|
||||
|
||||
rows as u16 + BORDER_LINES + num_popup_rows
|
||||
}
|
||||
|
||||
fn update_border(&mut self, has_focus: bool) {
|
||||
struct BlockState {
|
||||
right_title: Line<'static>,
|
||||
@@ -654,13 +634,6 @@ impl ChatComposer<'_> {
|
||||
.border_style(bs.border_style),
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) fn is_popup_visible(&self) -> bool {
|
||||
match self.active_popup {
|
||||
ActivePopup::Command(_) | ActivePopup::File(_) => true,
|
||||
ActivePopup::None => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for &ChatComposer<'_> {
|
||||
|
||||
@@ -20,6 +20,12 @@ mod command_popup;
|
||||
mod file_search_popup;
|
||||
mod status_indicator_view;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum CancellationEvent {
|
||||
Ignored,
|
||||
Handled,
|
||||
}
|
||||
|
||||
pub(crate) use chat_composer::ChatComposer;
|
||||
pub(crate) use chat_composer::InputResult;
|
||||
|
||||
@@ -65,10 +71,8 @@ impl BottomPane<'_> {
|
||||
if !view.is_complete() {
|
||||
self.active_view = Some(view);
|
||||
} else if self.is_task_running {
|
||||
let height = self.composer.calculate_required_height(&Rect::default());
|
||||
self.active_view = Some(Box::new(StatusIndicatorView::new(
|
||||
self.app_event_tx.clone(),
|
||||
height,
|
||||
)));
|
||||
}
|
||||
self.request_redraw();
|
||||
@@ -82,6 +86,33 @@ impl BottomPane<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle Ctrl-C in the bottom pane. If a modal view is active it gets a
|
||||
/// chance to consume the event (e.g. to dismiss itself).
|
||||
pub(crate) fn on_ctrl_c(&mut self) -> CancellationEvent {
|
||||
let mut view = match self.active_view.take() {
|
||||
Some(view) => view,
|
||||
None => return CancellationEvent::Ignored,
|
||||
};
|
||||
|
||||
let event = view.on_ctrl_c(self);
|
||||
match event {
|
||||
CancellationEvent::Handled => {
|
||||
if !view.is_complete() {
|
||||
self.active_view = Some(view);
|
||||
} else if self.is_task_running {
|
||||
self.active_view = Some(Box::new(StatusIndicatorView::new(
|
||||
self.app_event_tx.clone(),
|
||||
)));
|
||||
}
|
||||
self.show_ctrl_c_quit_hint();
|
||||
}
|
||||
CancellationEvent::Ignored => {
|
||||
self.active_view = Some(view);
|
||||
}
|
||||
}
|
||||
event
|
||||
}
|
||||
|
||||
pub fn handle_paste(&mut self, pasted: String) {
|
||||
if self.active_view.is_none() {
|
||||
let needs_redraw = self.composer.handle_paste(pasted);
|
||||
@@ -106,12 +137,6 @@ impl BottomPane<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the UI to reflect whether this `BottomPane` has input focus.
|
||||
pub(crate) fn set_input_focus(&mut self, has_focus: bool) {
|
||||
self.has_input_focus = has_focus;
|
||||
self.composer.set_input_focus(has_focus);
|
||||
}
|
||||
|
||||
pub(crate) fn show_ctrl_c_quit_hint(&mut self) {
|
||||
self.ctrl_c_quit_hint = true;
|
||||
self.composer
|
||||
@@ -138,10 +163,8 @@ impl BottomPane<'_> {
|
||||
match (running, self.active_view.is_some()) {
|
||||
(true, false) => {
|
||||
// Show status indicator overlay.
|
||||
let height = self.composer.calculate_required_height(&Rect::default());
|
||||
self.active_view = Some(Box::new(StatusIndicatorView::new(
|
||||
self.app_event_tx.clone(),
|
||||
height,
|
||||
)));
|
||||
self.request_redraw();
|
||||
}
|
||||
@@ -203,23 +226,10 @@ impl BottomPane<'_> {
|
||||
}
|
||||
|
||||
/// Height (terminal rows) required by the current bottom pane.
|
||||
pub fn calculate_required_height(&self, area: &Rect) -> u16 {
|
||||
if let Some(view) = &self.active_view {
|
||||
view.calculate_required_height(area)
|
||||
} else {
|
||||
self.composer.calculate_required_height(area)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn request_redraw(&self) {
|
||||
self.app_event_tx.send(AppEvent::RequestRedraw)
|
||||
}
|
||||
|
||||
/// Returns true when a popup inside the composer is visible.
|
||||
pub(crate) fn is_popup_visible(&self) -> bool {
|
||||
self.active_view.is_none() && self.composer.is_popup_visible()
|
||||
}
|
||||
|
||||
// --- History helpers ---
|
||||
|
||||
pub(crate) fn set_history_metadata(&mut self, log_id: u64, entry_count: usize) {
|
||||
@@ -257,3 +267,34 @@ impl WidgetRef for &BottomPane<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::app_event::AppEvent;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
fn exec_request() -> ApprovalRequest {
|
||||
ApprovalRequest::Exec {
|
||||
id: "1".to_string(),
|
||||
command: vec!["echo".into(), "ok".into()],
|
||||
cwd: PathBuf::from("."),
|
||||
reason: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ctrl_c_on_modal_consumes_and_shows_quit_hint() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx,
|
||||
has_input_focus: true,
|
||||
});
|
||||
pane.push_approval_request(exec_request());
|
||||
assert_eq!(CancellationEvent::Handled, pane.on_ctrl_c());
|
||||
assert!(pane.ctrl_c_quit_hint_visible());
|
||||
assert_eq!(CancellationEvent::Ignored, pane.on_ctrl_c());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
@@ -13,9 +12,9 @@ pub(crate) struct StatusIndicatorView {
|
||||
}
|
||||
|
||||
impl StatusIndicatorView {
|
||||
pub fn new(app_event_tx: AppEventSender, height: u16) -> Self {
|
||||
pub fn new(app_event_tx: AppEventSender) -> Self {
|
||||
Self {
|
||||
view: StatusIndicatorWidget::new(app_event_tx, height),
|
||||
view: StatusIndicatorWidget::new(app_event_tx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,11 +33,7 @@ impl BottomPaneView<'_> for StatusIndicatorView {
|
||||
true
|
||||
}
|
||||
|
||||
fn calculate_required_height(&self, _area: &Rect) -> u16 {
|
||||
self.view.get_height()
|
||||
}
|
||||
|
||||
fn render(&self, area: Rect, buf: &mut Buffer) {
|
||||
fn render(&self, area: ratatui::layout::Rect, buf: &mut Buffer) {
|
||||
self.view.render_ref(area, buf);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_core::codex_wrapper::CodexConversation;
|
||||
use codex_core::codex_wrapper::init_codex;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::AgentMessageDeltaEvent;
|
||||
@@ -23,9 +24,6 @@ use codex_core::protocol::TaskCompleteEvent;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use crossterm::event::KeyEvent;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Constraint;
|
||||
use ratatui::layout::Direction;
|
||||
use ratatui::layout::Layout;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::widgets::Widget;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
@@ -36,8 +34,13 @@ use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::bottom_pane::BottomPane;
|
||||
use crate::bottom_pane::BottomPaneParams;
|
||||
use crate::bottom_pane::CancellationEvent;
|
||||
use crate::bottom_pane::InputResult;
|
||||
use crate::compact::Role;
|
||||
use crate::compact::TranscriptEntry;
|
||||
use crate::compact::generate_compact_summary;
|
||||
use crate::conversation_history_widget::ConversationHistoryWidget;
|
||||
use crate::exec_command::strip_bash_lc_and_escape;
|
||||
use crate::history_cell::PatchEventType;
|
||||
use crate::user_approval_widget::ApprovalRequest;
|
||||
use codex_file_search::FileMatch;
|
||||
@@ -47,18 +50,15 @@ pub(crate) struct ChatWidget<'a> {
|
||||
codex_op_tx: UnboundedSender<Op>,
|
||||
conversation_history: ConversationHistoryWidget,
|
||||
bottom_pane: BottomPane<'a>,
|
||||
input_focus: InputFocus,
|
||||
config: Config,
|
||||
initial_user_message: Option<UserMessage>,
|
||||
token_usage: TokenUsage,
|
||||
// Buffer for streaming assistant reasoning text; emitted on final event.
|
||||
reasoning_buffer: String,
|
||||
// Buffer for streaming assistant answer text; emitted on final event.
|
||||
answer_buffer: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
enum InputFocus {
|
||||
HistoryPane,
|
||||
BottomPane,
|
||||
// Transcript of chat for `/compact` summarization.
|
||||
transcript: Vec<TranscriptEntry>,
|
||||
}
|
||||
|
||||
struct UserMessage {
|
||||
@@ -96,19 +96,22 @@ impl ChatWidget<'_> {
|
||||
// Create the Codex asynchronously so the UI loads as quickly as possible.
|
||||
let config_for_agent_loop = config.clone();
|
||||
tokio::spawn(async move {
|
||||
let (codex, session_event, _ctrl_c, _session_id) =
|
||||
match init_codex(config_for_agent_loop).await {
|
||||
Ok(vals) => vals,
|
||||
Err(e) => {
|
||||
// TODO: surface this error to the user.
|
||||
tracing::error!("failed to initialize codex: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let CodexConversation {
|
||||
codex,
|
||||
session_configured,
|
||||
..
|
||||
} = match init_codex(config_for_agent_loop).await {
|
||||
Ok(vals) => vals,
|
||||
Err(e) => {
|
||||
// TODO: surface this error to the user.
|
||||
tracing::error!("failed to initialize codex: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Forward the captured `SessionInitialized` event that was consumed
|
||||
// inside `init_codex()` so it can be rendered in the UI.
|
||||
app_event_tx_clone.send(AppEvent::CodexEvent(session_event.clone()));
|
||||
app_event_tx_clone.send(AppEvent::CodexEvent(session_configured.clone()));
|
||||
let codex = Arc::new(codex);
|
||||
let codex_clone = codex.clone();
|
||||
tokio::spawn(async move {
|
||||
@@ -133,7 +136,6 @@ impl ChatWidget<'_> {
|
||||
app_event_tx,
|
||||
has_input_focus: true,
|
||||
}),
|
||||
input_focus: InputFocus::BottomPane,
|
||||
config,
|
||||
initial_user_message: create_initial_user_message(
|
||||
initial_prompt.unwrap_or_default(),
|
||||
@@ -142,48 +144,29 @@ impl ChatWidget<'_> {
|
||||
token_usage: TokenUsage::default(),
|
||||
reasoning_buffer: String::new(),
|
||||
answer_buffer: String::new(),
|
||||
transcript: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn handle_key_event(&mut self, key_event: KeyEvent) {
|
||||
self.bottom_pane.clear_ctrl_c_quit_hint();
|
||||
// Special-case <Tab>: normally toggles focus between history and bottom panes.
|
||||
// However, when the slash-command popup is visible we forward the key
|
||||
// to the bottom pane so it can handle auto-completion.
|
||||
if matches!(key_event.code, crossterm::event::KeyCode::Tab)
|
||||
&& !self.bottom_pane.is_popup_visible()
|
||||
{
|
||||
self.input_focus = match self.input_focus {
|
||||
InputFocus::HistoryPane => InputFocus::BottomPane,
|
||||
InputFocus::BottomPane => InputFocus::HistoryPane,
|
||||
};
|
||||
self.conversation_history
|
||||
.set_input_focus(self.input_focus == InputFocus::HistoryPane);
|
||||
self.bottom_pane
|
||||
.set_input_focus(self.input_focus == InputFocus::BottomPane);
|
||||
self.request_redraw();
|
||||
return;
|
||||
}
|
||||
|
||||
match self.input_focus {
|
||||
InputFocus::HistoryPane => {
|
||||
let needs_redraw = self.conversation_history.handle_key_event(key_event);
|
||||
if needs_redraw {
|
||||
self.request_redraw();
|
||||
}
|
||||
match self.bottom_pane.handle_key_event(key_event) {
|
||||
InputResult::Submitted(text) => {
|
||||
self.submit_user_message(text.into());
|
||||
}
|
||||
InputFocus::BottomPane => match self.bottom_pane.handle_key_event(key_event) {
|
||||
InputResult::Submitted(text) => {
|
||||
self.submit_user_message(text.into());
|
||||
}
|
||||
InputResult::None => {}
|
||||
},
|
||||
InputResult::None => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn handle_paste(&mut self, text: String) {
|
||||
if matches!(self.input_focus, InputFocus::BottomPane) {
|
||||
self.bottom_pane.handle_paste(text);
|
||||
self.bottom_pane.handle_paste(text);
|
||||
}
|
||||
|
||||
/// Emits the last entry's plain lines from conversation_history, if any.
|
||||
fn emit_last_history_entry(&mut self) {
|
||||
if let Some(lines) = self.conversation_history.last_entry_plain_lines() {
|
||||
self.app_event_tx.send(AppEvent::InsertHistory(lines));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -220,7 +203,14 @@ impl ChatWidget<'_> {
|
||||
|
||||
// Only show text portion in conversation history for now.
|
||||
if !text.is_empty() {
|
||||
self.conversation_history.add_user_message(text);
|
||||
// Forward a copy for history and emit into scrollback.
|
||||
self.conversation_history.add_user_message(text.clone());
|
||||
self.emit_last_history_entry();
|
||||
// Record in transcript for `/compact`.
|
||||
self.transcript.push(TranscriptEntry {
|
||||
role: Role::User,
|
||||
text,
|
||||
});
|
||||
}
|
||||
self.conversation_history.scroll_to_bottom();
|
||||
}
|
||||
@@ -232,6 +222,10 @@ impl ChatWidget<'_> {
|
||||
// Record session information at the top of the conversation.
|
||||
self.conversation_history
|
||||
.add_session_info(&self.config, event.clone());
|
||||
// Immediately surface the session banner / settings summary in
|
||||
// scrollback so the user can review configuration (model,
|
||||
// sandbox, approvals, etc.) before interacting.
|
||||
self.emit_last_history_entry();
|
||||
|
||||
// Forward history metadata to the bottom pane so the chat
|
||||
// composer can navigate through past messages.
|
||||
@@ -247,50 +241,52 @@ impl ChatWidget<'_> {
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { message }) => {
|
||||
// if the answer buffer is empty, this means we haven't received any
|
||||
// delta. Thus, we need to print the message as a new answer.
|
||||
if self.answer_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, message);
|
||||
// Final assistant answer. Prefer the fully provided message.
|
||||
let full = if message.is_empty() {
|
||||
std::mem::take(&mut self.answer_buffer)
|
||||
} else {
|
||||
self.answer_buffer.clear();
|
||||
message
|
||||
};
|
||||
if !full.is_empty() {
|
||||
self.conversation_history
|
||||
.replace_prev_agent_message(&self.config, message);
|
||||
.add_agent_message(&self.config, full.clone());
|
||||
self.emit_last_history_entry();
|
||||
// Record final answer in transcript for `/compact`.
|
||||
self.transcript.push(TranscriptEntry {
|
||||
role: Role::Assistant,
|
||||
text: full,
|
||||
});
|
||||
}
|
||||
self.answer_buffer.clear();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }) => {
|
||||
if self.answer_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, "".to_string());
|
||||
}
|
||||
self.answer_buffer.push_str(&delta.clone());
|
||||
self.conversation_history
|
||||
.replace_prev_agent_message(&self.config, self.answer_buffer.clone());
|
||||
self.request_redraw();
|
||||
// Buffer only – do not emit partial lines. This avoids cases
|
||||
// where long responses appear truncated if the terminal
|
||||
// wrapped early. The full message is emitted on
|
||||
// AgentMessage.
|
||||
self.answer_buffer.push_str(&delta);
|
||||
}
|
||||
EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }) => {
|
||||
if self.reasoning_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_reasoning(&self.config, "".to_string());
|
||||
}
|
||||
self.reasoning_buffer.push_str(&delta.clone());
|
||||
self.conversation_history
|
||||
.replace_prev_agent_reasoning(&self.config, self.reasoning_buffer.clone());
|
||||
self.request_redraw();
|
||||
// Buffer only – disable incremental reasoning streaming so we
|
||||
// avoid truncated intermediate lines. Full text emitted on
|
||||
// AgentReasoning.
|
||||
self.reasoning_buffer.push_str(&delta);
|
||||
}
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent { text }) => {
|
||||
// if the reasoning buffer is empty, this means we haven't received any
|
||||
// delta. Thus, we need to print the message as a new reasoning.
|
||||
if self.reasoning_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_reasoning(&self.config, "".to_string());
|
||||
// Emit full reasoning text once. Some providers might send
|
||||
// final event with empty text if only deltas were used.
|
||||
let full = if text.is_empty() {
|
||||
std::mem::take(&mut self.reasoning_buffer)
|
||||
} else {
|
||||
// else, we rerender one last time.
|
||||
self.reasoning_buffer.clear();
|
||||
text
|
||||
};
|
||||
if !full.is_empty() {
|
||||
self.conversation_history
|
||||
.replace_prev_agent_reasoning(&self.config, text);
|
||||
.add_agent_reasoning(&self.config, full);
|
||||
self.emit_last_history_entry();
|
||||
}
|
||||
self.reasoning_buffer.clear();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::TaskStarted => {
|
||||
@@ -310,14 +306,30 @@ impl ChatWidget<'_> {
|
||||
.set_token_usage(self.token_usage.clone(), self.config.model_context_window);
|
||||
}
|
||||
EventMsg::Error(ErrorEvent { message }) => {
|
||||
self.conversation_history.add_error(message);
|
||||
self.conversation_history.add_error(message.clone());
|
||||
self.emit_last_history_entry();
|
||||
self.bottom_pane.set_task_running(false);
|
||||
}
|
||||
EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
call_id: _,
|
||||
command,
|
||||
cwd,
|
||||
reason,
|
||||
}) => {
|
||||
// Print the command to the history so it is visible in the
|
||||
// transcript *before* the modal asks for approval.
|
||||
let cmdline = strip_bash_lc_and_escape(&command);
|
||||
let text = format!(
|
||||
"command requires approval:\n$ {cmdline}{reason}",
|
||||
reason = reason
|
||||
.as_ref()
|
||||
.map(|r| format!("\n{r}"))
|
||||
.unwrap_or_default()
|
||||
);
|
||||
self.conversation_history.add_background_event(text);
|
||||
self.emit_last_history_entry();
|
||||
self.conversation_history.scroll_to_bottom();
|
||||
|
||||
let request = ApprovalRequest::Exec {
|
||||
id,
|
||||
command,
|
||||
@@ -325,8 +337,10 @@ impl ChatWidget<'_> {
|
||||
reason,
|
||||
};
|
||||
self.bottom_pane.push_approval_request(request);
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id: _,
|
||||
changes,
|
||||
reason,
|
||||
grant_root,
|
||||
@@ -344,6 +358,7 @@ impl ChatWidget<'_> {
|
||||
|
||||
self.conversation_history
|
||||
.add_patch_event(PatchEventType::ApprovalRequest, changes);
|
||||
self.emit_last_history_entry();
|
||||
|
||||
self.conversation_history.scroll_to_bottom();
|
||||
|
||||
@@ -363,6 +378,7 @@ impl ChatWidget<'_> {
|
||||
}) => {
|
||||
self.conversation_history
|
||||
.add_active_exec_command(call_id, command);
|
||||
self.emit_last_history_entry();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
|
||||
@@ -374,6 +390,7 @@ impl ChatWidget<'_> {
|
||||
// summary so the user can follow along.
|
||||
self.conversation_history
|
||||
.add_patch_event(PatchEventType::ApplyBegin { auto_approved }, changes);
|
||||
self.emit_last_history_entry();
|
||||
if !auto_approved {
|
||||
self.conversation_history.scroll_to_bottom();
|
||||
}
|
||||
@@ -397,6 +414,7 @@ impl ChatWidget<'_> {
|
||||
}) => {
|
||||
self.conversation_history
|
||||
.add_active_mcp_tool_call(call_id, server, tool, arguments);
|
||||
self.emit_last_history_entry();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::McpToolCallEnd(mcp_tool_call_end_event) => {
|
||||
@@ -417,9 +435,13 @@ impl ChatWidget<'_> {
|
||||
self.bottom_pane
|
||||
.on_history_entry_response(log_id, offset, entry.map(|e| e.text));
|
||||
}
|
||||
EventMsg::ShutdownComplete => {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
event => {
|
||||
self.conversation_history
|
||||
.add_background_event(format!("{event:?}"));
|
||||
self.emit_last_history_entry();
|
||||
self.request_redraw();
|
||||
}
|
||||
}
|
||||
@@ -436,7 +458,9 @@ impl ChatWidget<'_> {
|
||||
}
|
||||
|
||||
pub(crate) fn add_diff_output(&mut self, diff_output: String) {
|
||||
self.conversation_history.add_diff_output(diff_output);
|
||||
self.conversation_history
|
||||
.add_diff_output(diff_output.clone());
|
||||
self.emit_last_history_entry();
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
@@ -458,21 +482,108 @@ impl ChatWidget<'_> {
|
||||
self.bottom_pane.on_file_search_result(query, matches);
|
||||
}
|
||||
|
||||
// (removed deprecated synchronous `compact` implementation)
|
||||
|
||||
/// Kick off an asynchronous summarization of the current transcript.
|
||||
/// Returns immediately so the UI stays responsive.
|
||||
pub(crate) fn start_compact(&mut self) {
|
||||
// Show status indicator immediately.
|
||||
self.bottom_pane.set_task_running(true);
|
||||
self.bottom_pane
|
||||
.update_status_text("Summarizing context…".to_string());
|
||||
self.request_redraw();
|
||||
|
||||
// Clone data required for the background task.
|
||||
let transcript = self.transcript.clone();
|
||||
let model = self.config.model.clone();
|
||||
let config_clone = self.config.clone();
|
||||
let app_event_tx = self.app_event_tx.clone();
|
||||
|
||||
// Spawn the summarization on a blocking thread to avoid CPU-bound work
|
||||
// stalling the async runtime (and thus the UI).
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let rt = tokio::runtime::Handle::current();
|
||||
rt.block_on(async move {
|
||||
let result = generate_compact_summary(&transcript, &model, &config_clone).await;
|
||||
let evt = match result {
|
||||
Ok(summary) => AppEvent::CompactComplete(Ok(summary)),
|
||||
Err(e) => AppEvent::CompactComplete(Err(format!("{e}"))),
|
||||
};
|
||||
app_event_tx.send(evt);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Apply the completed summary returned by the background task.
|
||||
pub(crate) fn apply_compact_summary(&mut self, result: Result<String, String>) {
|
||||
match result {
|
||||
Ok(summary) => {
|
||||
self.conversation_history.clear_agent_history();
|
||||
self.transcript.clear();
|
||||
// clear session history in backend
|
||||
self.submit_op(Op::EraseConversationHistory);
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, summary.clone());
|
||||
self.transcript = vec![TranscriptEntry {
|
||||
role: Role::Assistant,
|
||||
text: summary,
|
||||
}];
|
||||
|
||||
// Re-configure the Codex session so that the backend agent starts with
|
||||
// a clean conversation context.
|
||||
let op = Op::ConfigureSession {
|
||||
provider: self.config.model_provider.clone(),
|
||||
model: self.config.model.clone(),
|
||||
model_reasoning_effort: self.config.model_reasoning_effort,
|
||||
model_reasoning_summary: self.config.model_reasoning_summary,
|
||||
user_instructions: self.config.user_instructions.clone(),
|
||||
base_instructions: self.config.base_instructions.clone(),
|
||||
approval_policy: self.config.approval_policy,
|
||||
sandbox_policy: self.config.sandbox_policy.clone(),
|
||||
disable_response_storage: self.config.disable_response_storage,
|
||||
notify: self.config.notify.clone(),
|
||||
cwd: self.config.cwd.clone(),
|
||||
resume_path: None,
|
||||
};
|
||||
self.submit_op(op);
|
||||
|
||||
// Reset the recorded token usage because we start a fresh
|
||||
// conversation context. This ensures the *context remaining*
|
||||
// indicator in the composer is updated immediately.
|
||||
self.token_usage = TokenUsage::default();
|
||||
self.bottom_pane
|
||||
.set_token_usage(self.token_usage.clone(), self.config.model_context_window);
|
||||
}
|
||||
Err(msg) => {
|
||||
self.conversation_history.add_error(msg);
|
||||
}
|
||||
}
|
||||
|
||||
// Hide status indicator and refresh UI.
|
||||
self.bottom_pane.set_task_running(false);
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
/// Handle Ctrl-C key press.
|
||||
/// Returns true if the key press was handled, false if it was not.
|
||||
/// If the key press was not handled, the caller should handle it (likely by exiting the process).
|
||||
pub(crate) fn on_ctrl_c(&mut self) -> bool {
|
||||
/// Returns CancellationEvent::Handled if the event was consumed by the UI, or
|
||||
/// CancellationEvent::Ignored if the caller should handle it (e.g. exit).
|
||||
pub(crate) fn on_ctrl_c(&mut self) -> CancellationEvent {
|
||||
match self.bottom_pane.on_ctrl_c() {
|
||||
CancellationEvent::Handled => return CancellationEvent::Handled,
|
||||
CancellationEvent::Ignored => {}
|
||||
}
|
||||
if self.bottom_pane.is_task_running() {
|
||||
self.bottom_pane.clear_ctrl_c_quit_hint();
|
||||
self.submit_op(Op::Interrupt);
|
||||
self.answer_buffer.clear();
|
||||
self.reasoning_buffer.clear();
|
||||
false
|
||||
CancellationEvent::Ignored
|
||||
} else if self.bottom_pane.ctrl_c_quit_hint_visible() {
|
||||
true
|
||||
self.submit_op(Op::Shutdown);
|
||||
CancellationEvent::Handled
|
||||
} else {
|
||||
self.bottom_pane.show_ctrl_c_quit_hint();
|
||||
false
|
||||
CancellationEvent::Ignored
|
||||
}
|
||||
}
|
||||
|
||||
@@ -486,19 +597,18 @@ impl ChatWidget<'_> {
|
||||
tracing::error!("failed to submit op: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn token_usage(&self) -> &TokenUsage {
|
||||
&self.token_usage
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for &ChatWidget<'_> {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
let bottom_height = self.bottom_pane.calculate_required_height(&area);
|
||||
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Min(0), Constraint::Length(bottom_height)])
|
||||
.split(area);
|
||||
|
||||
self.conversation_history.render(chunks[0], buf);
|
||||
(&self.bottom_pane).render(chunks[1], buf);
|
||||
// In the hybrid inline viewport mode we only draw the interactive
|
||||
// bottom pane; history entries are injected directly into scrollback
|
||||
// via `Terminal::insert_before`.
|
||||
(&self.bottom_pane).render(area, buf);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
91
codex-rs/tui/src/compact.rs
Normal file
91
codex-rs/tui/src/compact.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::openai_api_key::get_openai_api_key;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TranscriptEntry {
|
||||
pub role: Role,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
impl TranscriptEntry {
|
||||
fn role_str(&self) -> &'static str {
|
||||
match self.role {
|
||||
Role::User => "user",
|
||||
Role::Assistant => "assistant",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Message<'a> {
|
||||
role: &'a str,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Payload<'a> {
|
||||
model: &'a str,
|
||||
messages: Vec<Message<'a>>,
|
||||
}
|
||||
|
||||
/// Generate a concise summary of the provided transcript using the OpenAI chat
|
||||
/// completions API.
|
||||
pub async fn generate_compact_summary(
|
||||
transcript: &[TranscriptEntry],
|
||||
model: &str,
|
||||
config: &Config,
|
||||
) -> Result<String> {
|
||||
let conversation_text = transcript
|
||||
.iter()
|
||||
.map(|e| format!("{}: {}", e.role_str(), e.text))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let messages = vec![
|
||||
Message {
|
||||
role: "assistant",
|
||||
content: "You are an expert coding assistant. Your goal is to generate a concise, structured summary of the conversation below that captures all essential information needed to continue development after context replacement. Include tasks performed, code areas modified or reviewed, key decisions or assumptions, test results or errors, and outstanding tasks or next steps.".to_string(),
|
||||
},
|
||||
Message {
|
||||
role: "user",
|
||||
content: format!(
|
||||
"Here is the conversation so far:\n{conversation_text}\n\nPlease summarize this conversation, covering:\n1. Tasks performed and outcomes\n2. Code files, modules, or functions modified or examined\n3. Important decisions or assumptions made\n4. Errors encountered and test or build results\n5. Remaining tasks, open questions, or next steps\nProvide the summary in a clear, concise format."
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
let api_key = get_openai_api_key().ok_or_else(|| anyhow!("OpenAI API key not set"))?;
|
||||
let client = reqwest::Client::new();
|
||||
let base = config.model_provider.base_url.trim_end_matches('/');
|
||||
let url = format!("{}/chat/completions", base);
|
||||
|
||||
let payload = Payload { model, messages };
|
||||
let res = client
|
||||
.post(url)
|
||||
.bearer_auth(api_key)
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let body: serde_json::Value = res.json().await?;
|
||||
if let Some(summary) = body
|
||||
.get("choices")
|
||||
.and_then(|c| c.get(0))
|
||||
.and_then(|c| c.get("message"))
|
||||
.and_then(|m| m.get("content"))
|
||||
.and_then(|v| v.as_str())
|
||||
{
|
||||
Ok(summary.to_string())
|
||||
} else {
|
||||
Ok("Unable to generate summary.".to_string())
|
||||
}
|
||||
}
|
||||
@@ -5,8 +5,6 @@ use crate::history_cell::PatchEventType;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use ratatui::prelude::*;
|
||||
use ratatui::style::Style;
|
||||
use ratatui::widgets::*;
|
||||
@@ -47,33 +45,6 @@ impl ConversationHistoryWidget {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn set_input_focus(&mut self, has_input_focus: bool) {
|
||||
self.has_input_focus = has_input_focus;
|
||||
}
|
||||
|
||||
/// Returns true if it needs a redraw.
|
||||
pub(crate) fn handle_key_event(&mut self, key_event: KeyEvent) -> bool {
|
||||
match key_event.code {
|
||||
KeyCode::Up | KeyCode::Char('k') => {
|
||||
self.scroll_up(1);
|
||||
true
|
||||
}
|
||||
KeyCode::Down | KeyCode::Char('j') => {
|
||||
self.scroll_down(1);
|
||||
true
|
||||
}
|
||||
KeyCode::PageUp | KeyCode::Char('b') => {
|
||||
self.scroll_page_up();
|
||||
true
|
||||
}
|
||||
KeyCode::PageDown | KeyCode::Char(' ') => {
|
||||
self.scroll_page_down();
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Negative delta scrolls up; positive delta scrolls down.
|
||||
pub(crate) fn scroll(&mut self, delta: i32) {
|
||||
match delta.cmp(&0) {
|
||||
@@ -122,53 +93,6 @@ impl ConversationHistoryWidget {
|
||||
}
|
||||
}
|
||||
|
||||
/// Scroll up by one full viewport height (Page Up).
|
||||
fn scroll_page_up(&mut self) {
|
||||
let viewport_height = self.last_viewport_height.get().max(1);
|
||||
|
||||
// If we are currently in the "stick to bottom" mode, first convert the
|
||||
// implicit scroll position (`usize::MAX`) into an explicit offset that
|
||||
// represents the very bottom of the scroll region. This mirrors the
|
||||
// logic from `scroll_up()`.
|
||||
if self.scroll_position == usize::MAX {
|
||||
self.scroll_position = self
|
||||
.num_rendered_lines
|
||||
.get()
|
||||
.saturating_sub(viewport_height);
|
||||
}
|
||||
|
||||
// Move up by a full page.
|
||||
self.scroll_position = self.scroll_position.saturating_sub(viewport_height);
|
||||
}
|
||||
|
||||
/// Scroll down by one full viewport height (Page Down).
|
||||
fn scroll_page_down(&mut self) {
|
||||
// Nothing to do if we're already stuck to the bottom.
|
||||
if self.scroll_position == usize::MAX {
|
||||
return;
|
||||
}
|
||||
|
||||
let viewport_height = self.last_viewport_height.get().max(1);
|
||||
let num_lines = self.num_rendered_lines.get();
|
||||
|
||||
// Calculate the maximum explicit scroll offset that is still within
|
||||
// range. This matches the logic in `scroll_down()` and the render
|
||||
// method.
|
||||
let max_scroll = num_lines.saturating_sub(viewport_height);
|
||||
|
||||
// Attempt to move down by a full page.
|
||||
let new_pos = self.scroll_position.saturating_add(viewport_height);
|
||||
|
||||
if new_pos >= max_scroll {
|
||||
// We have reached (or passed) the bottom – switch back to
|
||||
// automatic stick‑to‑bottom mode so that subsequent output keeps
|
||||
// the viewport pinned.
|
||||
self.scroll_position = usize::MAX;
|
||||
} else {
|
||||
self.scroll_position = new_pos;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scroll_to_bottom(&mut self) {
|
||||
self.scroll_position = usize::MAX;
|
||||
}
|
||||
@@ -198,18 +122,14 @@ impl ConversationHistoryWidget {
|
||||
self.add_to_history(HistoryCell::new_agent_message(config, message));
|
||||
}
|
||||
|
||||
pub fn clear_agent_history(&mut self) {
|
||||
self.clear_all();
|
||||
}
|
||||
|
||||
pub fn add_agent_reasoning(&mut self, config: &Config, text: String) {
|
||||
self.add_to_history(HistoryCell::new_agent_reasoning(config, text));
|
||||
}
|
||||
|
||||
pub fn replace_prev_agent_reasoning(&mut self, config: &Config, text: String) {
|
||||
self.replace_last_agent_reasoning(config, text);
|
||||
}
|
||||
|
||||
pub fn replace_prev_agent_message(&mut self, config: &Config, text: String) {
|
||||
self.replace_last_agent_message(config, text);
|
||||
}
|
||||
|
||||
pub fn add_background_event(&mut self, message: String) {
|
||||
self.add_to_history(HistoryCell::new_background_event(message));
|
||||
}
|
||||
@@ -257,40 +177,14 @@ impl ConversationHistoryWidget {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn replace_last_agent_reasoning(&mut self, config: &Config, text: String) {
|
||||
if let Some(idx) = self
|
||||
.entries
|
||||
.iter()
|
||||
.rposition(|entry| matches!(entry.cell, HistoryCell::AgentReasoning { .. }))
|
||||
{
|
||||
let width = self.cached_width.get();
|
||||
let entry = &mut self.entries[idx];
|
||||
entry.cell = HistoryCell::new_agent_reasoning(config, text);
|
||||
let height = if width > 0 {
|
||||
entry.cell.height(width)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
entry.line_count.set(height);
|
||||
}
|
||||
fn clear_all(&mut self) {
|
||||
self.entries.clear();
|
||||
}
|
||||
|
||||
pub fn replace_last_agent_message(&mut self, config: &Config, text: String) {
|
||||
if let Some(idx) = self
|
||||
.entries
|
||||
.iter()
|
||||
.rposition(|entry| matches!(entry.cell, HistoryCell::AgentMessage { .. }))
|
||||
{
|
||||
let width = self.cached_width.get();
|
||||
let entry = &mut self.entries[idx];
|
||||
entry.cell = HistoryCell::new_agent_message(config, text);
|
||||
let height = if width > 0 {
|
||||
entry.cell.height(width)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
entry.line_count.set(height);
|
||||
}
|
||||
/// Return the lines for the most recently appended entry (if any) so the
|
||||
/// parent widget can surface them via the new scrollback insertion path.
|
||||
pub(crate) fn last_entry_plain_lines(&self) -> Option<Vec<Line<'static>>> {
|
||||
self.entries.last().map(|e| e.cell.plain_lines())
|
||||
}
|
||||
|
||||
pub fn record_completed_exec_command(
|
||||
|
||||
@@ -123,6 +123,30 @@ pub(crate) enum HistoryCell {
|
||||
const TOOL_CALL_MAX_LINES: usize = 5;
|
||||
|
||||
impl HistoryCell {
|
||||
/// Return a cloned, plain representation of the cell's lines suitable for
|
||||
/// one‑shot insertion into the terminal scrollback. Image cells are
|
||||
/// represented with a simple placeholder for now.
|
||||
pub(crate) fn plain_lines(&self) -> Vec<Line<'static>> {
|
||||
match self {
|
||||
HistoryCell::WelcomeMessage { view }
|
||||
| HistoryCell::UserPrompt { view }
|
||||
| HistoryCell::AgentMessage { view }
|
||||
| HistoryCell::AgentReasoning { view }
|
||||
| HistoryCell::BackgroundEvent { view }
|
||||
| HistoryCell::GitDiffOutput { view }
|
||||
| HistoryCell::ErrorEvent { view }
|
||||
| HistoryCell::SessionInfo { view }
|
||||
| HistoryCell::CompletedExecCommand { view }
|
||||
| HistoryCell::CompletedMcpToolCall { view }
|
||||
| HistoryCell::PendingPatch { view }
|
||||
| HistoryCell::ActiveExecCommand { view, .. }
|
||||
| HistoryCell::ActiveMcpToolCall { view, .. } => view.lines.clone(),
|
||||
HistoryCell::CompletedMcpToolCallWithImageOutput { .. } => vec![
|
||||
Line::from("tool result (image output omitted)"),
|
||||
Line::from(""),
|
||||
],
|
||||
}
|
||||
}
|
||||
pub(crate) fn new_session_info(
|
||||
config: &Config,
|
||||
event: SessionConfiguredEvent,
|
||||
@@ -156,7 +180,7 @@ impl HistoryCell {
|
||||
("workdir", config.cwd.display().to_string()),
|
||||
("model", config.model.clone()),
|
||||
("provider", config.model_provider_id.clone()),
|
||||
("approval", format!("{:?}", config.approval_policy)),
|
||||
("approval", config.approval_policy.to_string()),
|
||||
("sandbox", summarize_sandbox_policy(&config.sandbox_policy)),
|
||||
];
|
||||
if config.model_provider.wire_api == WireApi::Responses
|
||||
|
||||
245
codex-rs/tui/src/insert_history.rs
Normal file
245
codex-rs/tui/src/insert_history.rs
Normal file
@@ -0,0 +1,245 @@
|
||||
use std::fmt;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
|
||||
use crate::tui;
|
||||
use crossterm::Command;
|
||||
use crossterm::queue;
|
||||
use crossterm::style::Color as CColor;
|
||||
use crossterm::style::Colors;
|
||||
use crossterm::style::Print;
|
||||
use crossterm::style::SetAttribute;
|
||||
use crossterm::style::SetBackgroundColor;
|
||||
use crossterm::style::SetColors;
|
||||
use crossterm::style::SetForegroundColor;
|
||||
use ratatui::layout::Position;
|
||||
use ratatui::layout::Size;
|
||||
use ratatui::prelude::Backend;
|
||||
use ratatui::style::Color;
|
||||
use ratatui::style::Modifier;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Span;
|
||||
|
||||
/// Insert `lines` above the viewport.
|
||||
pub(crate) fn insert_history_lines(terminal: &mut tui::Tui, lines: Vec<Line<'static>>) {
|
||||
let screen_size = terminal.backend().size().unwrap_or(Size::new(0, 0));
|
||||
|
||||
let mut area = terminal.get_frame().area();
|
||||
|
||||
let wrapped_lines = wrapped_line_count(&lines, area.width);
|
||||
let cursor_top = if area.bottom() < screen_size.height {
|
||||
// If the viewport is not at the bottom of the screen, scroll it down to make room.
|
||||
// Don't scroll it past the bottom of the screen.
|
||||
let scroll_amount = wrapped_lines.min(screen_size.height - area.bottom());
|
||||
terminal
|
||||
.backend_mut()
|
||||
.scroll_region_down(area.top()..screen_size.height, scroll_amount)
|
||||
.ok();
|
||||
let cursor_top = area.top() - 1;
|
||||
area.y += scroll_amount;
|
||||
terminal.set_viewport_area(area);
|
||||
cursor_top
|
||||
} else {
|
||||
area.top() - 1
|
||||
};
|
||||
|
||||
// Limit the scroll region to the lines from the top of the screen to the
|
||||
// top of the viewport. With this in place, when we add lines inside this
|
||||
// area, only the lines in this area will be scrolled. We place the cursor
|
||||
// at the end of the scroll region, and add lines starting there.
|
||||
//
|
||||
// ┌─Screen───────────────────────┐
|
||||
// │┌╌Scroll region╌╌╌╌╌╌╌╌╌╌╌╌╌╌┐│
|
||||
// │┆ ┆│
|
||||
// │┆ ┆│
|
||||
// │┆ ┆│
|
||||
// │█╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┘│
|
||||
// │╭─Viewport───────────────────╮│
|
||||
// ││ ││
|
||||
// │╰────────────────────────────╯│
|
||||
// └──────────────────────────────┘
|
||||
queue!(std::io::stdout(), SetScrollRegion(1..area.top())).ok();
|
||||
|
||||
terminal
|
||||
.set_cursor_position(Position::new(0, cursor_top))
|
||||
.ok();
|
||||
|
||||
for line in lines {
|
||||
queue!(std::io::stdout(), Print("\r\n")).ok();
|
||||
write_spans(&mut std::io::stdout(), line.iter()).ok();
|
||||
}
|
||||
|
||||
queue!(std::io::stdout(), ResetScrollRegion).ok();
|
||||
}
|
||||
|
||||
fn wrapped_line_count(lines: &[Line], width: u16) -> u16 {
|
||||
let mut count = 0;
|
||||
for line in lines {
|
||||
count += line_height(line, width);
|
||||
}
|
||||
count
|
||||
}
|
||||
|
||||
fn line_height(line: &Line, width: u16) -> u16 {
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
// get the total display width of the line, accounting for double-width chars
|
||||
let total_width = line
|
||||
.spans
|
||||
.iter()
|
||||
.map(|span| span.content.width())
|
||||
.sum::<usize>();
|
||||
// divide by width to get the number of lines, rounding up
|
||||
if width == 0 {
|
||||
1
|
||||
} else {
|
||||
(total_width as u16).div_ceil(width).max(1)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SetScrollRegion(pub std::ops::Range<u16>);
|
||||
|
||||
impl Command for SetScrollRegion {
|
||||
fn write_ansi(&self, f: &mut impl fmt::Write) -> fmt::Result {
|
||||
write!(f, "\x1b[{};{}r", self.0.start, self.0.end)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn execute_winapi(&self) -> std::io::Result<()> {
|
||||
panic!("tried to execute SetScrollRegion command using WinAPI, use ANSI instead");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn is_ansi_code_supported(&self) -> bool {
|
||||
// TODO(nornagon): is this supported on Windows?
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ResetScrollRegion;
|
||||
|
||||
impl Command for ResetScrollRegion {
|
||||
fn write_ansi(&self, f: &mut impl fmt::Write) -> fmt::Result {
|
||||
write!(f, "\x1b[r")
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn execute_winapi(&self) -> std::io::Result<()> {
|
||||
panic!("tried to execute ResetScrollRegion command using WinAPI, use ANSI instead");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn is_ansi_code_supported(&self) -> bool {
|
||||
// TODO(nornagon): is this supported on Windows?
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
struct ModifierDiff {
|
||||
pub from: Modifier,
|
||||
pub to: Modifier,
|
||||
}
|
||||
|
||||
impl ModifierDiff {
|
||||
fn queue<W>(self, mut w: W) -> io::Result<()>
|
||||
where
|
||||
W: io::Write,
|
||||
{
|
||||
use crossterm::style::Attribute as CAttribute;
|
||||
let removed = self.from - self.to;
|
||||
if removed.contains(Modifier::REVERSED) {
|
||||
queue!(w, SetAttribute(CAttribute::NoReverse))?;
|
||||
}
|
||||
if removed.contains(Modifier::BOLD) {
|
||||
queue!(w, SetAttribute(CAttribute::NormalIntensity))?;
|
||||
if self.to.contains(Modifier::DIM) {
|
||||
queue!(w, SetAttribute(CAttribute::Dim))?;
|
||||
}
|
||||
}
|
||||
if removed.contains(Modifier::ITALIC) {
|
||||
queue!(w, SetAttribute(CAttribute::NoItalic))?;
|
||||
}
|
||||
if removed.contains(Modifier::UNDERLINED) {
|
||||
queue!(w, SetAttribute(CAttribute::NoUnderline))?;
|
||||
}
|
||||
if removed.contains(Modifier::DIM) {
|
||||
queue!(w, SetAttribute(CAttribute::NormalIntensity))?;
|
||||
}
|
||||
if removed.contains(Modifier::CROSSED_OUT) {
|
||||
queue!(w, SetAttribute(CAttribute::NotCrossedOut))?;
|
||||
}
|
||||
if removed.contains(Modifier::SLOW_BLINK) || removed.contains(Modifier::RAPID_BLINK) {
|
||||
queue!(w, SetAttribute(CAttribute::NoBlink))?;
|
||||
}
|
||||
|
||||
let added = self.to - self.from;
|
||||
if added.contains(Modifier::REVERSED) {
|
||||
queue!(w, SetAttribute(CAttribute::Reverse))?;
|
||||
}
|
||||
if added.contains(Modifier::BOLD) {
|
||||
queue!(w, SetAttribute(CAttribute::Bold))?;
|
||||
}
|
||||
if added.contains(Modifier::ITALIC) {
|
||||
queue!(w, SetAttribute(CAttribute::Italic))?;
|
||||
}
|
||||
if added.contains(Modifier::UNDERLINED) {
|
||||
queue!(w, SetAttribute(CAttribute::Underlined))?;
|
||||
}
|
||||
if added.contains(Modifier::DIM) {
|
||||
queue!(w, SetAttribute(CAttribute::Dim))?;
|
||||
}
|
||||
if added.contains(Modifier::CROSSED_OUT) {
|
||||
queue!(w, SetAttribute(CAttribute::CrossedOut))?;
|
||||
}
|
||||
if added.contains(Modifier::SLOW_BLINK) {
|
||||
queue!(w, SetAttribute(CAttribute::SlowBlink))?;
|
||||
}
|
||||
if added.contains(Modifier::RAPID_BLINK) {
|
||||
queue!(w, SetAttribute(CAttribute::RapidBlink))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn write_spans<'a, I>(mut writer: &mut impl Write, content: I) -> io::Result<()>
|
||||
where
|
||||
I: Iterator<Item = &'a Span<'a>>,
|
||||
{
|
||||
let mut fg = Color::Reset;
|
||||
let mut bg = Color::Reset;
|
||||
let mut modifier = Modifier::empty();
|
||||
for span in content {
|
||||
let mut next_modifier = modifier;
|
||||
next_modifier.insert(span.style.add_modifier);
|
||||
next_modifier.remove(span.style.sub_modifier);
|
||||
if next_modifier != modifier {
|
||||
let diff = ModifierDiff {
|
||||
from: modifier,
|
||||
to: next_modifier,
|
||||
};
|
||||
diff.queue(&mut writer)?;
|
||||
modifier = next_modifier;
|
||||
}
|
||||
let next_fg = span.style.fg.unwrap_or(Color::Reset);
|
||||
let next_bg = span.style.bg.unwrap_or(Color::Reset);
|
||||
if next_fg != fg || next_bg != bg {
|
||||
queue!(
|
||||
writer,
|
||||
SetColors(Colors::new(next_fg.into(), next_bg.into()))
|
||||
)?;
|
||||
fg = next_fg;
|
||||
bg = next_bg;
|
||||
}
|
||||
|
||||
queue!(writer, Print(span.content.clone()))?;
|
||||
}
|
||||
|
||||
queue!(
|
||||
writer,
|
||||
SetForegroundColor(CColor::Reset),
|
||||
SetBackgroundColor(CColor::Reset),
|
||||
SetAttribute(crossterm::style::Attribute::Reset),
|
||||
)
|
||||
}
|
||||
@@ -27,16 +27,17 @@ mod cell_widget;
|
||||
mod chatwidget;
|
||||
mod citation_regex;
|
||||
mod cli;
|
||||
mod compact;
|
||||
mod conversation_history_widget;
|
||||
mod exec_command;
|
||||
mod file_search;
|
||||
mod get_git_diff;
|
||||
mod git_warning_screen;
|
||||
mod history_cell;
|
||||
mod insert_history;
|
||||
mod log_layer;
|
||||
mod login_screen;
|
||||
mod markdown;
|
||||
mod mouse_capture;
|
||||
mod scroll_event_helper;
|
||||
mod slash_command;
|
||||
mod status_indicator_widget;
|
||||
@@ -47,7 +48,10 @@ mod user_approval_widget;
|
||||
|
||||
pub use cli::Cli;
|
||||
|
||||
pub fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> std::io::Result<()> {
|
||||
pub fn run_main(
|
||||
cli: Cli,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
) -> std::io::Result<codex_core::protocol::TokenUsage> {
|
||||
let (sandbox_mode, approval_policy) = if cli.full_auto {
|
||||
(
|
||||
Some(SandboxMode::WorkspaceWrite),
|
||||
@@ -147,24 +151,8 @@ pub fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> std::io::
|
||||
// `--allow-no-git-exec` flag.
|
||||
let show_git_warning = !cli.skip_git_repo_check && !is_inside_git_repo(&config);
|
||||
|
||||
try_run_ratatui_app(cli, config, show_login_screen, show_git_warning, log_rx);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::print_stderr,
|
||||
reason = "Resort to stderr in exceptional situations."
|
||||
)]
|
||||
fn try_run_ratatui_app(
|
||||
cli: Cli,
|
||||
config: Config,
|
||||
show_login_screen: bool,
|
||||
show_git_warning: bool,
|
||||
log_rx: tokio::sync::mpsc::UnboundedReceiver<String>,
|
||||
) {
|
||||
if let Err(report) = run_ratatui_app(cli, config, show_login_screen, show_git_warning, log_rx) {
|
||||
eprintln!("Error: {report:?}");
|
||||
}
|
||||
run_ratatui_app(cli, config, show_login_screen, show_git_warning, log_rx)
|
||||
.map_err(|err| std::io::Error::other(err.to_string()))
|
||||
}
|
||||
|
||||
fn run_ratatui_app(
|
||||
@@ -173,16 +161,15 @@ fn run_ratatui_app(
|
||||
show_login_screen: bool,
|
||||
show_git_warning: bool,
|
||||
mut log_rx: tokio::sync::mpsc::UnboundedReceiver<String>,
|
||||
) -> color_eyre::Result<()> {
|
||||
) -> color_eyre::Result<codex_core::protocol::TokenUsage> {
|
||||
color_eyre::install()?;
|
||||
|
||||
// Forward panic reports through the tracing stack so that they appear in
|
||||
// the status indicator instead of breaking the alternate screen – the
|
||||
// normal colour‑eyre hook writes to stderr which would corrupt the UI.
|
||||
// Forward panic reports through tracing so they appear in the UI status
|
||||
// line instead of interleaving raw panic output with the interface.
|
||||
std::panic::set_hook(Box::new(|info| {
|
||||
tracing::error!("panic: {info}");
|
||||
}));
|
||||
let (mut terminal, mut mouse_capture) = tui::init(&config)?;
|
||||
let mut terminal = tui::init(&config)?;
|
||||
terminal.clear()?;
|
||||
|
||||
let Cli { prompt, images, .. } = cli;
|
||||
@@ -204,10 +191,12 @@ fn run_ratatui_app(
|
||||
});
|
||||
}
|
||||
|
||||
let app_result = app.run(&mut terminal, &mut mouse_capture);
|
||||
let app_result = app.run(&mut terminal);
|
||||
let usage = app.token_usage();
|
||||
|
||||
restore();
|
||||
app_result
|
||||
// ignore error when collecting usage – report underlying error instead
|
||||
app_result.map(|_| usage)
|
||||
}
|
||||
|
||||
#[expect(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use clap::Parser;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_tui::Cli;
|
||||
use codex_tui::run_main;
|
||||
@@ -13,14 +14,15 @@ struct TopCli {
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
codex_linux_sandbox::run_with_sandbox(|codex_linux_sandbox_exe| async move {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
let top_cli = TopCli::parse();
|
||||
let mut inner = top_cli.inner;
|
||||
inner
|
||||
.config_overrides
|
||||
.raw_overrides
|
||||
.splice(0..0, top_cli.config_overrides.raw_overrides);
|
||||
run_main(inner, codex_linux_sandbox_exe)?;
|
||||
let usage = run_main(inner, codex_linux_sandbox_exe)?;
|
||||
println!("{}", codex_core::protocol::FinalOutput::from(usage));
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
use crossterm::event::DisableMouseCapture;
|
||||
use crossterm::event::EnableMouseCapture;
|
||||
use ratatui::crossterm::execute;
|
||||
use std::io::Result;
|
||||
use std::io::stdout;
|
||||
|
||||
pub(crate) struct MouseCapture {
|
||||
mouse_capture_is_active: bool,
|
||||
}
|
||||
|
||||
impl MouseCapture {
|
||||
pub(crate) fn new_with_capture(mouse_capture_is_active: bool) -> Result<Self> {
|
||||
if mouse_capture_is_active {
|
||||
enable_capture()?;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
mouse_capture_is_active,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MouseCapture {
|
||||
/// Idempotent method to set the mouse capture state.
|
||||
pub fn set_active(&mut self, is_active: bool) -> Result<()> {
|
||||
match (self.mouse_capture_is_active, is_active) {
|
||||
(true, true) => {}
|
||||
(false, false) => {}
|
||||
(true, false) => {
|
||||
disable_capture()?;
|
||||
self.mouse_capture_is_active = false;
|
||||
}
|
||||
(false, true) => {
|
||||
enable_capture()?;
|
||||
self.mouse_capture_is_active = true;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn toggle(&mut self) -> Result<()> {
|
||||
self.set_active(!self.mouse_capture_is_active)
|
||||
}
|
||||
|
||||
pub(crate) fn disable(&mut self) -> Result<()> {
|
||||
if self.mouse_capture_is_active {
|
||||
disable_capture()?;
|
||||
self.mouse_capture_is_active = false;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MouseCapture {
|
||||
fn drop(&mut self) {
|
||||
if self.disable().is_err() {
|
||||
// The user is likely shutting down, so ignore any errors so the
|
||||
// shutdown process can complete.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn enable_capture() -> Result<()> {
|
||||
execute!(stdout(), EnableMouseCapture)
|
||||
}
|
||||
|
||||
fn disable_capture() -> Result<()> {
|
||||
execute!(stdout(), DisableMouseCapture)
|
||||
}
|
||||
@@ -14,8 +14,8 @@ pub enum SlashCommand {
|
||||
// more frequently used commands should be listed first.
|
||||
New,
|
||||
Diff,
|
||||
Compact,
|
||||
Quit,
|
||||
ToggleMouseMode,
|
||||
}
|
||||
|
||||
impl SlashCommand {
|
||||
@@ -23,13 +23,11 @@ impl SlashCommand {
|
||||
pub fn description(self) -> &'static str {
|
||||
match self {
|
||||
SlashCommand::New => "Start a new chat.",
|
||||
SlashCommand::ToggleMouseMode => {
|
||||
"Toggle mouse mode (enable for scrolling, disable for text selection)"
|
||||
}
|
||||
SlashCommand::Quit => "Exit the application.",
|
||||
SlashCommand::Diff => {
|
||||
"Show git diff of the working directory (including untracked files)"
|
||||
}
|
||||
SlashCommand::Compact => "Condense context into a summary.",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -34,11 +34,6 @@ pub(crate) struct StatusIndicatorWidget {
|
||||
/// time).
|
||||
text: String,
|
||||
|
||||
/// Height in terminal rows – matches the height of the textarea at the
|
||||
/// moment the task started so the UI does not jump when we toggle between
|
||||
/// input mode and loading mode.
|
||||
height: u16,
|
||||
|
||||
frame_idx: Arc<AtomicUsize>,
|
||||
running: Arc<AtomicBool>,
|
||||
// Keep one sender alive to prevent the channel from closing while the
|
||||
@@ -50,7 +45,7 @@ pub(crate) struct StatusIndicatorWidget {
|
||||
|
||||
impl StatusIndicatorWidget {
|
||||
/// Create a new status indicator and start the animation timer.
|
||||
pub(crate) fn new(app_event_tx: AppEventSender, height: u16) -> Self {
|
||||
pub(crate) fn new(app_event_tx: AppEventSender) -> Self {
|
||||
let frame_idx = Arc::new(AtomicUsize::new(0));
|
||||
let running = Arc::new(AtomicBool::new(true));
|
||||
|
||||
@@ -72,18 +67,12 @@ impl StatusIndicatorWidget {
|
||||
|
||||
Self {
|
||||
text: String::from("waiting for logs…"),
|
||||
height: height.max(3),
|
||||
frame_idx,
|
||||
running,
|
||||
_app_event_tx: app_event_tx,
|
||||
}
|
||||
}
|
||||
|
||||
/// Preferred height in terminal rows.
|
||||
pub(crate) fn get_height(&self) -> u16 {
|
||||
self.height
|
||||
}
|
||||
|
||||
/// Update the line that is displayed in the widget.
|
||||
pub(crate) fn update_text(&mut self, text: String) {
|
||||
self.text = text.replace(['\n', '\r'], " ");
|
||||
|
||||
@@ -4,31 +4,39 @@ use std::io::stdout;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use crossterm::event::DisableBracketedPaste;
|
||||
use crossterm::event::DisableMouseCapture;
|
||||
use crossterm::event::EnableBracketedPaste;
|
||||
use ratatui::Terminal;
|
||||
use ratatui::TerminalOptions;
|
||||
use ratatui::Viewport;
|
||||
use ratatui::backend::CrosstermBackend;
|
||||
use ratatui::crossterm::execute;
|
||||
use ratatui::crossterm::terminal::EnterAlternateScreen;
|
||||
use ratatui::crossterm::terminal::LeaveAlternateScreen;
|
||||
use ratatui::crossterm::terminal::disable_raw_mode;
|
||||
use ratatui::crossterm::terminal::enable_raw_mode;
|
||||
|
||||
use crate::mouse_capture::MouseCapture;
|
||||
|
||||
/// A type alias for the terminal type used in this application
|
||||
pub type Tui = Terminal<CrosstermBackend<Stdout>>;
|
||||
|
||||
/// Initialize the terminal
|
||||
pub fn init(config: &Config) -> Result<(Tui, MouseCapture)> {
|
||||
execute!(stdout(), EnterAlternateScreen)?;
|
||||
/// Initialize the terminal (inline viewport; history stays in normal scrollback)
|
||||
pub fn init(_config: &Config) -> Result<Tui> {
|
||||
execute!(stdout(), EnableBracketedPaste)?;
|
||||
let mouse_capture = MouseCapture::new_with_capture(!config.tui.disable_mouse_capture)?;
|
||||
|
||||
enable_raw_mode()?;
|
||||
set_panic_hook();
|
||||
let tui = Terminal::new(CrosstermBackend::new(stdout()))?;
|
||||
Ok((tui, mouse_capture))
|
||||
|
||||
// Reserve a fixed number of lines for the interactive viewport (composer,
|
||||
// status, popups). History is injected above using `insert_before`. This
|
||||
// is an initial step of the refactor – later the height can become
|
||||
// dynamic. For now a conservative default keeps enough room for the
|
||||
// multi‑line composer while not occupying the whole screen.
|
||||
const BOTTOM_VIEWPORT_HEIGHT: u16 = 8;
|
||||
let backend = CrosstermBackend::new(stdout());
|
||||
let tui = Terminal::with_options(
|
||||
backend,
|
||||
TerminalOptions {
|
||||
viewport: Viewport::Inline(BOTTOM_VIEWPORT_HEIGHT),
|
||||
},
|
||||
)?;
|
||||
Ok(tui)
|
||||
}
|
||||
|
||||
fn set_panic_hook() {
|
||||
@@ -41,14 +49,7 @@ fn set_panic_hook() {
|
||||
|
||||
/// Restore the terminal to its original state
|
||||
pub fn restore() -> Result<()> {
|
||||
// We are shutting down, and we cannot reference the `MouseCapture`, so we
|
||||
// categorically disable mouse capture just to be safe.
|
||||
if execute!(stdout(), DisableMouseCapture).is_err() {
|
||||
// It is possible that `DisableMouseCapture` is written more than once
|
||||
// on shutdown, so ignore the error in this case.
|
||||
}
|
||||
execute!(stdout(), DisableBracketedPaste)?;
|
||||
execute!(stdout(), LeaveAlternateScreen)?;
|
||||
disable_raw_mode()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -116,10 +116,6 @@ pub(crate) struct UserApprovalWidget<'a> {
|
||||
done: bool,
|
||||
}
|
||||
|
||||
// Number of lines automatically added by ratatui’s [`Block`] when
|
||||
// borders are enabled (one at the top, one at the bottom).
|
||||
const BORDER_LINES: u16 = 2;
|
||||
|
||||
impl UserApprovalWidget<'_> {
|
||||
pub(crate) fn new(approval_request: ApprovalRequest, app_event_tx: AppEventSender) -> Self {
|
||||
let input = Input::default();
|
||||
@@ -190,28 +186,6 @@ impl UserApprovalWidget<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_height(&self, area: &Rect) -> u16 {
|
||||
let confirmation_prompt_height =
|
||||
self.get_confirmation_prompt_height(area.width - BORDER_LINES);
|
||||
|
||||
match self.mode {
|
||||
Mode::Select => {
|
||||
let num_option_lines = SELECT_OPTIONS.len() as u16;
|
||||
confirmation_prompt_height + num_option_lines + BORDER_LINES
|
||||
}
|
||||
Mode::Input => {
|
||||
// 1. "Give the model feedback ..." prompt
|
||||
// 2. A single‑line input field (we allocate exactly one row;
|
||||
// the `tui-input` widget will scroll horizontally if the
|
||||
// text exceeds the width).
|
||||
const INPUT_PROMPT_LINES: u16 = 1;
|
||||
const INPUT_FIELD_LINES: u16 = 1;
|
||||
|
||||
confirmation_prompt_height + INPUT_PROMPT_LINES + INPUT_FIELD_LINES + BORDER_LINES
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_confirmation_prompt_height(&self, width: u16) -> u16 {
|
||||
// Should cache this for last value of width.
|
||||
self.confirmation_prompt.line_count(width) as u16
|
||||
@@ -229,6 +203,12 @@ impl UserApprovalWidget<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle Ctrl-C pressed by the user while the modal is visible.
|
||||
/// Behaves like pressing Escape: abort the request and close the modal.
|
||||
pub(crate) fn on_ctrl_c(&mut self) {
|
||||
self.send_decision(ReviewDecision::Abort);
|
||||
}
|
||||
|
||||
fn handle_select_key(&mut self, key_event: KeyEvent) {
|
||||
match key_event.code {
|
||||
KeyCode::Up => {
|
||||
@@ -291,7 +271,28 @@ impl UserApprovalWidget<'_> {
|
||||
self.send_decision_with_feedback(decision, String::new())
|
||||
}
|
||||
|
||||
fn send_decision_with_feedback(&mut self, decision: ReviewDecision, _feedback: String) {
|
||||
fn send_decision_with_feedback(&mut self, decision: ReviewDecision, feedback: String) {
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
match &self.approval_request {
|
||||
ApprovalRequest::Exec { command, .. } => {
|
||||
let cmd = strip_bash_lc_and_escape(command);
|
||||
lines.push(Line::from("approval decision"));
|
||||
lines.push(Line::from(format!("$ {cmd}")));
|
||||
lines.push(Line::from(format!("decision: {decision:?}")));
|
||||
}
|
||||
ApprovalRequest::ApplyPatch { .. } => {
|
||||
lines.push(Line::from(format!("patch approval decision: {decision:?}")));
|
||||
}
|
||||
}
|
||||
if !feedback.trim().is_empty() {
|
||||
lines.push(Line::from("feedback:"));
|
||||
for l in feedback.lines() {
|
||||
lines.push(Line::from(l.to_string()));
|
||||
}
|
||||
}
|
||||
lines.push(Line::from(""));
|
||||
self.app_event_tx.send(AppEvent::InsertHistory(lines));
|
||||
|
||||
let op = match &self.approval_request {
|
||||
ApprovalRequest::Exec { id, .. } => Op::ExecApproval {
|
||||
id: id.clone(),
|
||||
@@ -303,12 +304,6 @@ impl UserApprovalWidget<'_> {
|
||||
},
|
||||
};
|
||||
|
||||
// Ignore feedback for now – the current `Op` variants do not carry it.
|
||||
|
||||
// Forward the Op to the agent. The caller (ChatWidget) will trigger a
|
||||
// redraw after it processes the resulting state change, so we avoid
|
||||
// issuing an extra Redraw here to prevent a transient frame where the
|
||||
// modal is still visible.
|
||||
self.app_event_tx.send(AppEvent::CodexOp(op));
|
||||
self.done = true;
|
||||
}
|
||||
@@ -333,7 +328,32 @@ impl WidgetRef for &UserApprovalWidget<'_> {
|
||||
.borders(Borders::ALL)
|
||||
.border_type(BorderType::Rounded);
|
||||
let inner = outer.inner(area);
|
||||
let prompt_height = self.get_confirmation_prompt_height(inner.width);
|
||||
|
||||
// Determine how many rows we can allocate for the static confirmation
|
||||
// prompt while *always* keeping enough space for the interactive
|
||||
// response area (select list or input field). When the full prompt
|
||||
// would exceed the available height we truncate it so the response
|
||||
// options never get pushed out of view. This keeps the approval modal
|
||||
// usable even when the overall bottom viewport is small.
|
||||
|
||||
// Full height of the prompt (may be larger than the available area).
|
||||
let full_prompt_height = self.get_confirmation_prompt_height(inner.width);
|
||||
|
||||
// Minimum rows that must remain for the interactive section.
|
||||
let min_response_rows = match self.mode {
|
||||
Mode::Select => SELECT_OPTIONS.len() as u16,
|
||||
// In input mode we need exactly two rows: one for the guidance
|
||||
// prompt and one for the single-line input field.
|
||||
Mode::Input => 2,
|
||||
};
|
||||
|
||||
// Clamp prompt height so confirmation + response never exceed the
|
||||
// available space. `saturating_sub` avoids underflow when the area is
|
||||
// too small even for the minimal layout – in this unlikely case we
|
||||
// fall back to zero-height prompt so at least the options are
|
||||
// visible.
|
||||
let prompt_height = full_prompt_height.min(inner.height.saturating_sub(min_response_rows));
|
||||
|
||||
let chunks = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Length(prompt_height), Constraint::Min(0)])
|
||||
@@ -342,8 +362,7 @@ impl WidgetRef for &UserApprovalWidget<'_> {
|
||||
let response_chunk = chunks[1];
|
||||
|
||||
// Build the inner lines based on the mode. Collect them into a List of
|
||||
// non-wrapping lines rather than a Paragraph because get_height(Rect)
|
||||
// depends on this behavior for its calculation.
|
||||
// non-wrapping lines rather than a Paragraph for predictable layout.
|
||||
let lines = match self.mode {
|
||||
Mode::Select => SELECT_OPTIONS
|
||||
.iter()
|
||||
|
||||
Reference in New Issue
Block a user