mirror of
https://github.com/openai/codex.git
synced 2026-02-08 09:53:39 +00:00
Compare commits
68 Commits
codex-conc
...
stream-con
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3b90ca959b | ||
|
|
c6cfdf705c | ||
|
|
622a84f4ba | ||
|
|
34704ff055 | ||
|
|
e0303dbac0 | ||
|
|
d31e149cb1 | ||
|
|
136b3ee5bf | ||
|
|
0bf33f7359 | ||
|
|
0c9d8f13e5 | ||
|
|
4259e5787f | ||
|
|
fcdb1c4b4d | ||
|
|
906d449760 | ||
|
|
c85c6dfccd | ||
|
|
c1e9083cbd | ||
|
|
063083af15 | ||
|
|
f58401e203 | ||
|
|
84bcadb8d9 | ||
|
|
e38ce39c51 | ||
|
|
1a33de34b0 | ||
|
|
bd171e5206 | ||
|
|
3f13ebce10 | ||
|
|
f68bf94db1 | ||
|
|
e054715bea | ||
|
|
c72fe752cc | ||
|
|
985c97985b | ||
|
|
7dec04ae4f | ||
|
|
7279080edd | ||
|
|
c182126bca | ||
|
|
89ab5c3f74 | ||
|
|
6db597ec0c | ||
|
|
2899817c94 | ||
|
|
a362ad00ce | ||
|
|
c515d2869e | ||
|
|
bfbe523f81 | ||
|
|
95423b26d7 | ||
|
|
64cfbbd3c8 | ||
|
|
a6139aa003 | ||
|
|
5bab2bd2f8 | ||
|
|
dc15a5cf0b | ||
|
|
1f3318c1c5 | ||
|
|
1294def888 | ||
|
|
ab70497539 | ||
|
|
e3565a3f43 | ||
|
|
2576fadc74 | ||
|
|
78a1d49fac | ||
|
|
d62b703a21 | ||
|
|
4c9f7b6bcc | ||
|
|
75eecb656e | ||
|
|
2a40d07a06 | ||
|
|
2e07f4b033 | ||
|
|
324926e240 | ||
|
|
9805ad1fbc | ||
|
|
792efc990c | ||
|
|
ec6a4f9e2a | ||
|
|
c01b9d2d2a | ||
|
|
d5efc45869 | ||
|
|
dbcb9e7ca6 | ||
|
|
8d413194f3 | ||
|
|
19d3e17572 | ||
|
|
a5b3c151ac | ||
|
|
0110749efa | ||
|
|
bea4a5358a | ||
|
|
4c13829e8b | ||
|
|
5ccd02b0fe | ||
|
|
21c334ae54 | ||
|
|
66ea94f723 | ||
|
|
ae6becc58d | ||
|
|
3a456c1fbb |
4
.github/actions/codex/bun.lock
vendored
4
.github/actions/codex/bun.lock
vendored
@@ -11,7 +11,7 @@
|
||||
"@types/bun": "^1.2.19",
|
||||
"@types/node": "^24.1.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript": "^5.9.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -68,7 +68,7 @@
|
||||
|
||||
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
|
||||
|
||||
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
|
||||
"undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="],
|
||||
|
||||
|
||||
2
.github/actions/codex/package.json
vendored
2
.github/actions/codex/package.json
vendored
@@ -16,6 +16,6 @@
|
||||
"@types/bun": "^1.2.19",
|
||||
"@types/node": "^24.1.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3"
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -11,6 +11,8 @@
|
||||
"editor.defaultFormatter": "tamasfe.even-better-toml",
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
"evenBetterToml.formatter.reorderArrays": true,
|
||||
// Array order for options in ~/.codex/config.toml such as `notify` and the
|
||||
// `args` for an MCP server is significant, so we disable reordering.
|
||||
"evenBetterToml.formatter.reorderArrays": false,
|
||||
"evenBetterToml.formatter.reorderKeys": true,
|
||||
}
|
||||
|
||||
@@ -147,4 +147,8 @@ const READ_ONLY_SEATBELT_POLICY = `
|
||||
(sysctl-name "kern.version")
|
||||
(sysctl-name "sysctl.proc_cputype")
|
||||
(sysctl-name-prefix "hw.perflevel")
|
||||
)`.trim();
|
||||
)
|
||||
|
||||
; Added on top of Chrome profile
|
||||
; Needed for python multiprocessing on MacOS for the SemLock
|
||||
(allow ipc-posix-sem)`.trim();
|
||||
|
||||
133
codex-rs/Cargo.lock
generated
133
codex-rs/Cargo.lock
generated
@@ -661,7 +661,7 @@ dependencies = [
|
||||
"clap",
|
||||
"codex-core",
|
||||
"serde",
|
||||
"toml 0.9.2",
|
||||
"toml 0.9.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -699,6 +699,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"sha1",
|
||||
"shlex",
|
||||
"similar",
|
||||
"strum_macros 0.27.2",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
@@ -706,7 +707,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tokio-test",
|
||||
"tokio-util",
|
||||
"toml 0.9.2",
|
||||
"toml 0.9.4",
|
||||
"tracing",
|
||||
"tree-sitter",
|
||||
"tree-sitter-bash",
|
||||
@@ -830,7 +831,8 @@ dependencies = [
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tokio-test",
|
||||
"toml 0.9.2",
|
||||
"tokio-util",
|
||||
"toml 0.9.4",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
@@ -859,6 +861,7 @@ dependencies = [
|
||||
"mcp-types",
|
||||
"path-clean",
|
||||
"pretty_assertions",
|
||||
"rand 0.8.5",
|
||||
"ratatui",
|
||||
"ratatui-image",
|
||||
"regex-lite",
|
||||
@@ -868,16 +871,18 @@ dependencies = [
|
||||
"shlex",
|
||||
"strum 0.27.2",
|
||||
"strum_macros 0.27.2",
|
||||
"supports-color",
|
||||
"textwrap 0.16.2",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
"tracing-subscriber",
|
||||
"tui-input",
|
||||
"tui-markdown",
|
||||
"tui-textarea",
|
||||
"unicode-segmentation",
|
||||
"unicode-width 0.1.14",
|
||||
"uuid",
|
||||
"vt100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1470,7 +1475,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1550,7 +1555,7 @@ checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1753,7 +1758,7 @@ version = "0.2.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cba6ae63eb948698e300f645f87c70f76630d505f23b8907cf1e193ee85048c1"
|
||||
dependencies = [
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2014,7 +2019,7 @@ dependencies = [
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2 0.6.0",
|
||||
"socket2",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
@@ -2333,9 +2338,15 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is_ci"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
|
||||
|
||||
[[package]]
|
||||
name = "is_terminal_polyfill"
|
||||
version = "1.70.1"
|
||||
@@ -3383,7 +3394,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "ratatui"
|
||||
version = "0.29.0"
|
||||
source = "git+https://github.com/nornagon/ratatui?branch=nornagon-v0.29.0-patch#bca287ddc5d38fe088c79e2eda22422b96226f2e"
|
||||
source = "git+https://github.com/nornagon/ratatui?branch=nornagon-v0.29.0-patch#9b2ad1298408c45918ee9f8241a6f95498cdbed2"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cassowary",
|
||||
@@ -3397,7 +3408,7 @@ dependencies = [
|
||||
"strum 0.26.3",
|
||||
"unicode-segmentation",
|
||||
"unicode-truncate",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3711,7 +3722,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3724,7 +3735,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.4",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3988,9 +3999,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.141"
|
||||
version = "1.0.142"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
|
||||
checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"itoa",
|
||||
@@ -4174,14 +4185,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.5.10"
|
||||
name = "smawk"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
@@ -4235,7 +4242,7 @@ dependencies = [
|
||||
"starlark_syntax",
|
||||
"static_assertions",
|
||||
"strsim 0.10.0",
|
||||
"textwrap",
|
||||
"textwrap 0.11.0",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
@@ -4371,6 +4378,15 @@ version = "2.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||
|
||||
[[package]]
|
||||
name = "supports-color"
|
||||
version = "3.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6"
|
||||
dependencies = [
|
||||
"is_ci",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
@@ -4485,7 +4501,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4524,6 +4540,17 @@ dependencies = [
|
||||
"unicode-width 0.1.14",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.16.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057"
|
||||
dependencies = [
|
||||
"smawk",
|
||||
"unicode-linebreak",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
@@ -4638,9 +4665,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.46.1"
|
||||
version = "1.47.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
|
||||
checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
@@ -4651,9 +4678,9 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"slab",
|
||||
"socket2 0.5.10",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4738,9 +4765,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.9.2"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac"
|
||||
checksum = "41ae868b5a0f67631c14589f7e250c1ea2c574ee5ba21c6c8dd4b1485705a5a1"
|
||||
dependencies = [
|
||||
"indexmap 2.10.0",
|
||||
"serde",
|
||||
@@ -4969,7 +4996,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "911e93158bf80bbc94bad533b2b16e3d711e1132d69a6a6980c3920a63422c19"
|
||||
dependencies = [
|
||||
"ratatui",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4988,17 +5015,6 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tui-textarea"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a5318dd619ed73c52a9417ad19046724effc1287fb75cdcc4eca1d6ac1acbae"
|
||||
dependencies = [
|
||||
"crossterm",
|
||||
"ratatui",
|
||||
"unicode-width 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.18.0"
|
||||
@@ -5017,6 +5033,12 @@ version = "1.0.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-linebreak"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.12.0"
|
||||
@@ -5042,9 +5064,9 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
|
||||
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
@@ -5129,6 +5151,27 @@ version = "0.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
||||
|
||||
[[package]]
|
||||
name = "vt100"
|
||||
version = "0.16.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "054ff75fb8fa83e609e685106df4faeffdf3a735d3c74ebce97ec557d5d36fd9"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"unicode-width 0.2.1",
|
||||
"vte",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "vte"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5924018406ce0063cd67f8e008104968b74b563ee1b85dde3ed1f7cb87d3dbd"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wait-timeout"
|
||||
version = "0.2.1"
|
||||
@@ -5317,7 +5360,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -483,6 +483,19 @@ Setting `hide_agent_reasoning` to `true` suppresses these events in **both** the
|
||||
hide_agent_reasoning = true # defaults to false
|
||||
```
|
||||
|
||||
## show_raw_agent_reasoning
|
||||
|
||||
Surfaces the model’s raw chain-of-thought ("raw reasoning content") when available.
|
||||
|
||||
Notes:
|
||||
- Only takes effect if the selected model/provider actually emits raw reasoning content. Many models do not. When unsupported, this option has no visible effect.
|
||||
- Raw reasoning may include intermediate thoughts or sensitive context. Enable only if acceptable for your workflow.
|
||||
|
||||
Example:
|
||||
```toml
|
||||
show_raw_agent_reasoning = true # defaults to false
|
||||
```
|
||||
|
||||
## model_context_window
|
||||
|
||||
The size of the context window for the model, in tokens.
|
||||
|
||||
@@ -34,6 +34,7 @@ serde_json = "1"
|
||||
serde_bytes = "0.11"
|
||||
sha1 = "0.10.6"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
strum_macros = "0.27.2"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3", features = ["formatting", "local-offset", "macros"] }
|
||||
@@ -45,7 +46,7 @@ tokio = { version = "1", features = [
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = "0.7.14"
|
||||
toml = "0.9.2"
|
||||
toml = "0.9.4"
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tree-sitter = "0.25.8"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
|
||||
@@ -1,16 +1,31 @@
|
||||
Please resolve the user's task by editing and testing the code files in your current code execution session.
|
||||
You are a deployed coding agent.
|
||||
Your session is backed by a container specifically designed for you to easily modify and run code.
|
||||
The repo(s) are already cloned in your working directory, and you must fully solve the problem for your answer to be considered correct.
|
||||
You are operating as and within the Codex CLI, an open-source, terminal-based agentic coding assistant built by OpenAI. It wraps OpenAI models to enable natural language interaction with a local codebase. You are expected to be precise, safe, and helpful.
|
||||
|
||||
Your capabilities:
|
||||
- Receive user prompts, project context, and files.
|
||||
- Stream responses and emit function calls (e.g., shell commands, code edits).
|
||||
- Run commands, like apply_patch, and manage user approvals based on policy.
|
||||
- Work inside a workspace with sandboxing instructions specified by the policy described in (## Sandbox environment and approval instructions)
|
||||
|
||||
Within this context, Codex refers to the open-source agentic coding interface (not the old Codex language model built by OpenAI).
|
||||
|
||||
## General guidelines
|
||||
As a deployed coding agent, please continue working on the user's task until their query is resolved, before ending your turn and yielding back to the user. Only terminate your turn when you are sure that the task is solved. If you are not sure about file content or codebase structure pertaining to the user's request, use your tools to read files and gather the relevant information. Do NOT guess or make up an answer.
|
||||
|
||||
After a user sends their first message, you should immediately provide a brief message acknowledging their request to set the tone and expectation of future work to be done (no more than 8-10 words). This should be done before performing work like exploring the codebase, writing or reading files, or other tool calls needed to complete the task. Use a natural, collaborative tone similar to how a teammate would receive a task during a pair programming session.
|
||||
|
||||
Please resolve the user's task by editing the code files in your current code execution session. Your session allows for you to modify and run code. The repo(s) are already cloned in your working directory, and you must fully solve the problem for your answer to be considered correct.
|
||||
|
||||
### Task execution
|
||||
You MUST adhere to the following criteria when executing the task:
|
||||
|
||||
- Working on the repo(s) in the current environment is allowed, even if they are proprietary.
|
||||
- Analyzing code for vulnerabilities is allowed.
|
||||
- Showing user code and tool call details is allowed.
|
||||
- User instructions may overwrite the _CODING GUIDELINES_ section in this developer message.
|
||||
- `user_instructions` are not part of the user's request, but guidance for how to complete the task.
|
||||
- Do not cite `user_instructions` back to the user unless a specific piece is relevant.
|
||||
- Do not use \`ls -R\`, \`find\`, or \`grep\` - these are slow in large repos. Use \`rg\` and \`rg --files\`.
|
||||
- Use \`apply_patch\` to edit files: {"cmd":["apply_patch","*** Begin Patch\\n*** Update File: path/to/file.py\\n@@ def example():\\n- pass\\n+ return 123\\n*** End Patch"]}
|
||||
- Use the \`apply_patch\` shell command to edit files: {"command":["apply_patch","*** Begin Patch\\n*** Update File: path/to/file.py\\n@@ def example():\\n- pass\\n+ return 123\\n*** End Patch"]}
|
||||
- If completing the user's task requires writing or modifying files:
|
||||
- Your code and final answer should follow these _CODING GUIDELINES_:
|
||||
- Fix the problem at the root cause rather than applying surface-level patches, when possible.
|
||||
@@ -33,23 +48,22 @@ You MUST adhere to the following criteria when executing the task:
|
||||
- If completing the user's task DOES NOT require writing or modifying files (e.g., the user asks a question about the code base):
|
||||
- Respond in a friendly tune as a remote teammate, who is knowledgeable, capable and eager to help with coding.
|
||||
- When your task involves writing or modifying files:
|
||||
- Do NOT tell the user to "save the file" or "copy the code into a file" if you already created or modified the file using \`apply_patch\`. Instead, reference the file as already saved.
|
||||
- Do NOT tell the user to "save the file" or "copy the code into a file" if you already created or modified the file using the `apply_patch` shell command. Instead, reference the file as already saved.
|
||||
- Do NOT show the full contents of large files you have already written, unless the user explicitly asks for them.
|
||||
|
||||
§ `apply-patch` Specification
|
||||
## Using the shell command `apply_patch` to edit files
|
||||
`apply_patch` is a shell command for editing files. Your patch language is a stripped‑down, file‑oriented diff format designed to be easy to parse and safe to apply. You can think of it as a high‑level envelope:
|
||||
|
||||
Your patch language is a stripped‑down, file‑oriented diff format designed to be easy to parse and safe to apply. You can think of it as a high‑level envelope:
|
||||
|
||||
**_ Begin Patch
|
||||
*** Begin Patch
|
||||
[ one or more file sections ]
|
||||
_** End Patch
|
||||
*** End Patch
|
||||
|
||||
Within that envelope, you get a sequence of file operations.
|
||||
You MUST include a header to specify the action you are taking.
|
||||
Each operation starts with one of three headers:
|
||||
|
||||
**_ Add File: <path> - create a new file. Every following line is a + line (the initial contents).
|
||||
_** Delete File: <path> - remove an existing file. Nothing follows.
|
||||
*** Add File: <path> - create a new file. Every following line is a + line (the initial contents).
|
||||
*** Delete File: <path> - remove an existing file. Nothing follows.
|
||||
\*\*\* Update File: <path> - patch an existing file in place (optionally with a rename).
|
||||
|
||||
May be immediately followed by \*\*\* Move to: <new path> if you want to rename the file.
|
||||
@@ -63,45 +77,60 @@ Within a hunk each line starts with:
|
||||
At the end of a truncated hunk you can emit \*\*\* End of File.
|
||||
|
||||
Patch := Begin { FileOp } End
|
||||
Begin := "**_ Begin Patch" NEWLINE
|
||||
End := "_** End Patch" NEWLINE
|
||||
Begin := "*** Begin Patch" NEWLINE
|
||||
End := "*** End Patch" NEWLINE
|
||||
FileOp := AddFile | DeleteFile | UpdateFile
|
||||
AddFile := "**_ Add File: " path NEWLINE { "+" line NEWLINE }
|
||||
DeleteFile := "_** Delete File: " path NEWLINE
|
||||
UpdateFile := "**_ Update File: " path NEWLINE [ MoveTo ] { Hunk }
|
||||
MoveTo := "_** Move to: " newPath NEWLINE
|
||||
AddFile := "*** Add File: " path NEWLINE { "+" line NEWLINE }
|
||||
DeleteFile := "*** Delete File: " path NEWLINE
|
||||
UpdateFile := "*** Update File: " path NEWLINE [ MoveTo ] { Hunk }
|
||||
MoveTo := "*** Move to: " newPath NEWLINE
|
||||
Hunk := "@@" [ header ] NEWLINE { HunkLine } [ "*** End of File" NEWLINE ]
|
||||
HunkLine := (" " | "-" | "+") text NEWLINE
|
||||
|
||||
A full patch can combine several operations:
|
||||
|
||||
**_ Begin Patch
|
||||
_** Add File: hello.txt
|
||||
*** Begin Patch
|
||||
*** Add File: hello.txt
|
||||
+Hello world
|
||||
**_ Update File: src/app.py
|
||||
_** Move to: src/main.py
|
||||
*** Update File: src/app.py
|
||||
*** Move to: src/main.py
|
||||
@@ def greet():
|
||||
-print("Hi")
|
||||
+print("Hello, world!")
|
||||
**_ Delete File: obsolete.txt
|
||||
_** End Patch
|
||||
*** Delete File: obsolete.txt
|
||||
*** End Patch
|
||||
|
||||
It is important to remember:
|
||||
|
||||
- You must include a header with your intended action (Add/Delete/Update)
|
||||
- You must prefix new lines with `+` even when creating a new file
|
||||
- You must follow this schema exactly when providing a patch
|
||||
|
||||
You can invoke apply_patch like:
|
||||
You can invoke apply_patch with the following shell command:
|
||||
|
||||
```
|
||||
shell {"command":["apply_patch","*** Begin Patch\n*** Add File: hello.txt\n+Hello, world!\n*** End Patch\n"]}
|
||||
```
|
||||
|
||||
Plan updates
|
||||
## Sandbox environment and approval instructions
|
||||
|
||||
You are running in a sandboxed workspace backed by version control. The sandbox might be configured by the user to restrict certain behaviors, like accessing the internet or writing to files outside the current directory.
|
||||
|
||||
Commands that are blocked by sandbox settings will be automatically sent to the user for approval. The result of the request will be returned (i.e. the command result, or the request denial).
|
||||
The user also has an opportunity to approve the same command for the rest of the session.
|
||||
|
||||
Guidance on running within the sandbox:
|
||||
- When running commands that will likely require approval, attempt to use simple, precise commands, to reduce frequency of approval requests.
|
||||
- When approval is denied or a command fails due to a permission error, do not retry the exact command in a different way. Move on and continue trying to address the user's request.
|
||||
|
||||
|
||||
## Tools available
|
||||
### Plan updates
|
||||
|
||||
A tool named `update_plan` is available. Use it to keep an up‑to‑date, step‑by‑step plan for the task so you can follow your progress. When making your plans, keep in mind that you are a deployed coding agent - `update_plan` calls should not involve doing anything that you aren't capable of doing. For example, `update_plan` calls should NEVER contain tasks to merge your own pull requests. Only stop to ask the user if you genuinely need their feedback on a change.
|
||||
|
||||
- At the start of the task, call `update_plan` with an initial plan: a short list of 1‑sentence steps with a `status` for each step (`pending`, `in_progress`, or `completed`). There should always be exactly one `in_progress` step until everything is done.
|
||||
- At the start of any nontrivial task, call `update_plan` with an initial plan: a short list of 1‑sentence steps with a `status` for each step (`pending`, `in_progress`, or `completed`). There should always be exactly one `in_progress` step until everything is done.
|
||||
- Whenever you finish a step, call `update_plan` again, marking the finished step as `completed` and the next step as `in_progress`.
|
||||
- If your plan needs to change, call `update_plan` with the revised steps and include an `explanation` describing the change.
|
||||
- When all steps are complete, make a final `update_plan` call with all steps marked `completed`.
|
||||
|
||||
|
||||
@@ -21,7 +21,9 @@ use crate::client_common::ResponseEvent;
|
||||
use crate::client_common::ResponseStream;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::models::ContentItem;
|
||||
use crate::models::ReasoningItemContent;
|
||||
use crate::models::ResponseItem;
|
||||
use crate::openai_tools::create_tools_json_for_chat_completions_api;
|
||||
use crate::util::backoff;
|
||||
@@ -29,7 +31,7 @@ use crate::util::backoff;
|
||||
/// Implementation for the classic Chat Completions API.
|
||||
pub(crate) async fn stream_chat_completions(
|
||||
prompt: &Prompt,
|
||||
model: &str,
|
||||
model_family: &ModelFamily,
|
||||
include_plan_tool: bool,
|
||||
client: &reqwest::Client,
|
||||
provider: &ModelProviderInfo,
|
||||
@@ -37,10 +39,10 @@ pub(crate) async fn stream_chat_completions(
|
||||
// Build messages array
|
||||
let mut messages = Vec::<serde_json::Value>::new();
|
||||
|
||||
let full_instructions = prompt.get_full_instructions(model);
|
||||
let full_instructions = prompt.get_full_instructions(model_family);
|
||||
messages.push(json!({"role": "system", "content": full_instructions}));
|
||||
|
||||
if let Some(instr) = &prompt.user_instructions {
|
||||
if let Some(instr) = &prompt.get_formatted_user_instructions() {
|
||||
messages.push(json!({"role": "user", "content": instr}));
|
||||
}
|
||||
|
||||
@@ -110,9 +112,10 @@ pub(crate) async fn stream_chat_completions(
|
||||
}
|
||||
}
|
||||
|
||||
let tools_json = create_tools_json_for_chat_completions_api(prompt, model, include_plan_tool)?;
|
||||
let tools_json =
|
||||
create_tools_json_for_chat_completions_api(prompt, model_family, include_plan_tool)?;
|
||||
let payload = json!({
|
||||
"model": model,
|
||||
"model": model_family.slug,
|
||||
"messages": messages,
|
||||
"stream": true,
|
||||
"tools": tools_json,
|
||||
@@ -120,7 +123,7 @@ pub(crate) async fn stream_chat_completions(
|
||||
|
||||
debug!(
|
||||
"POST to {}: {}",
|
||||
provider.get_full_url(),
|
||||
provider.get_full_url(&None),
|
||||
serde_json::to_string_pretty(&payload).unwrap_or_default()
|
||||
);
|
||||
|
||||
@@ -129,7 +132,7 @@ pub(crate) async fn stream_chat_completions(
|
||||
loop {
|
||||
attempt += 1;
|
||||
|
||||
let req_builder = provider.create_request_builder(client)?;
|
||||
let req_builder = provider.create_request_builder(client, &None).await?;
|
||||
|
||||
let res = req_builder
|
||||
.header(reqwest::header::ACCEPT, "text/event-stream")
|
||||
@@ -207,6 +210,8 @@ async fn process_chat_sse<S>(
|
||||
}
|
||||
|
||||
let mut fn_call_state = FunctionCallState::default();
|
||||
let mut assistant_text = String::new();
|
||||
let mut reasoning_text = String::new();
|
||||
|
||||
loop {
|
||||
let sse = match timeout(idle_timeout, stream.next()).await {
|
||||
@@ -235,6 +240,31 @@ async fn process_chat_sse<S>(
|
||||
|
||||
// OpenAI Chat streaming sends a literal string "[DONE]" when finished.
|
||||
if sse.data.trim() == "[DONE]" {
|
||||
// Emit any finalized items before closing so downstream consumers receive
|
||||
// terminal events for both assistant content and raw reasoning.
|
||||
if !assistant_text.is_empty() {
|
||||
let item = ResponseItem::Message {
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: std::mem::take(&mut assistant_text),
|
||||
}],
|
||||
id: None,
|
||||
};
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
|
||||
if !reasoning_text.is_empty() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut reasoning_text),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::Completed {
|
||||
response_id: String::new(),
|
||||
@@ -254,21 +284,47 @@ async fn process_chat_sse<S>(
|
||||
let choice_opt = chunk.get("choices").and_then(|c| c.get(0));
|
||||
|
||||
if let Some(choice) = choice_opt {
|
||||
// Handle assistant content tokens.
|
||||
// Handle assistant content tokens as streaming deltas.
|
||||
if let Some(content) = choice
|
||||
.get("delta")
|
||||
.and_then(|d| d.get("content"))
|
||||
.and_then(|c| c.as_str())
|
||||
{
|
||||
let item = ResponseItem::Message {
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: content.to_string(),
|
||||
}],
|
||||
id: None,
|
||||
};
|
||||
if !content.is_empty() {
|
||||
assistant_text.push_str(content);
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::OutputTextDelta(content.to_string())))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
// Forward any reasoning/thinking deltas if present.
|
||||
// Some providers stream `reasoning` as a plain string while others
|
||||
// nest the text under an object (e.g. `{ "reasoning": { "text": "…" } }`).
|
||||
if let Some(reasoning_val) = choice.get("delta").and_then(|d| d.get("reasoning")) {
|
||||
let mut maybe_text = reasoning_val.as_str().map(|s| s.to_string());
|
||||
|
||||
if maybe_text.is_none() && reasoning_val.is_object() {
|
||||
if let Some(s) = reasoning_val
|
||||
.get("text")
|
||||
.and_then(|t| t.as_str())
|
||||
.filter(|s| !s.is_empty())
|
||||
{
|
||||
maybe_text = Some(s.to_string());
|
||||
} else if let Some(s) = reasoning_val
|
||||
.get("content")
|
||||
.and_then(|t| t.as_str())
|
||||
.filter(|s| !s.is_empty())
|
||||
{
|
||||
maybe_text = Some(s.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(reasoning) = maybe_text {
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::ReasoningContentDelta(reasoning)))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle streaming function / tool calls.
|
||||
@@ -305,7 +361,21 @@ async fn process_chat_sse<S>(
|
||||
if let Some(finish_reason) = choice.get("finish_reason").and_then(|v| v.as_str()) {
|
||||
match finish_reason {
|
||||
"tool_calls" if fn_call_state.active => {
|
||||
// Build the FunctionCall response item.
|
||||
// First, flush the terminal raw reasoning so UIs can finalize
|
||||
// the reasoning stream before any exec/tool events begin.
|
||||
if !reasoning_text.is_empty() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut reasoning_text),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
|
||||
// Then emit the FunctionCall response item.
|
||||
let item = ResponseItem::FunctionCall {
|
||||
id: None,
|
||||
name: fn_call_state.name.clone().unwrap_or_else(|| "".to_string()),
|
||||
@@ -313,11 +383,33 @@ async fn process_chat_sse<S>(
|
||||
call_id: fn_call_state.call_id.clone().unwrap_or_else(String::new),
|
||||
};
|
||||
|
||||
// Emit it downstream.
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
"stop" => {
|
||||
// Regular turn without tool-call.
|
||||
// Regular turn without tool-call. Emit the final assistant message
|
||||
// as a single OutputItemDone so non-delta consumers see the result.
|
||||
if !assistant_text.is_empty() {
|
||||
let item = ResponseItem::Message {
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: std::mem::take(&mut assistant_text),
|
||||
}],
|
||||
id: None,
|
||||
};
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
// Also emit a terminal Reasoning item so UIs can finalize raw reasoning.
|
||||
if !reasoning_text.is_empty() {
|
||||
let item = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut reasoning_text),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
let _ = tx_event.send(Ok(ResponseEvent::OutputItemDone(item))).await;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -355,10 +447,17 @@ async fn process_chat_sse<S>(
|
||||
/// The adapter is intentionally *lossless*: callers who do **not** opt in via
|
||||
/// [`AggregateStreamExt::aggregate()`] keep receiving the original unmodified
|
||||
/// events.
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
enum AggregateMode {
|
||||
AggregatedOnly,
|
||||
Streaming,
|
||||
}
|
||||
pub(crate) struct AggregatedChatStream<S> {
|
||||
inner: S,
|
||||
cumulative: String,
|
||||
pending_completed: Option<ResponseEvent>,
|
||||
cumulative_reasoning: String,
|
||||
pending: std::collections::VecDeque<ResponseEvent>,
|
||||
mode: AggregateMode,
|
||||
}
|
||||
|
||||
impl<S> Stream for AggregatedChatStream<S>
|
||||
@@ -370,8 +469,8 @@ where
|
||||
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let this = self.get_mut();
|
||||
|
||||
// First, flush any buffered Completed event from the previous call.
|
||||
if let Some(ev) = this.pending_completed.take() {
|
||||
// First, flush any buffered events from the previous call.
|
||||
if let Some(ev) = this.pending.pop_front() {
|
||||
return Poll::Ready(Some(Ok(ev)));
|
||||
}
|
||||
|
||||
@@ -388,16 +487,21 @@ where
|
||||
let is_assistant_delta = matches!(&item, crate::models::ResponseItem::Message { role, .. } if role == "assistant");
|
||||
|
||||
if is_assistant_delta {
|
||||
if let crate::models::ResponseItem::Message { content, .. } = &item {
|
||||
if let Some(text) = content.iter().find_map(|c| match c {
|
||||
crate::models::ContentItem::OutputText { text } => Some(text),
|
||||
_ => None,
|
||||
}) {
|
||||
this.cumulative.push_str(text);
|
||||
// Only use the final assistant message if we have not
|
||||
// seen any deltas; otherwise, deltas already built the
|
||||
// cumulative text and this would duplicate it.
|
||||
if this.cumulative.is_empty() {
|
||||
if let crate::models::ResponseItem::Message { content, .. } = &item {
|
||||
if let Some(text) = content.iter().find_map(|c| match c {
|
||||
crate::models::ContentItem::OutputText { text } => Some(text),
|
||||
_ => None,
|
||||
}) {
|
||||
this.cumulative.push_str(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Swallow partial assistant chunk; keep polling.
|
||||
// Swallow assistant message here; emit on Completed.
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -408,24 +512,50 @@ where
|
||||
response_id,
|
||||
token_usage,
|
||||
}))) => {
|
||||
// Build any aggregated items in the correct order: Reasoning first, then Message.
|
||||
let mut emitted_any = false;
|
||||
|
||||
if !this.cumulative_reasoning.is_empty()
|
||||
&& matches!(this.mode, AggregateMode::AggregatedOnly)
|
||||
{
|
||||
let aggregated_reasoning = crate::models::ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![
|
||||
crate::models::ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut this.cumulative_reasoning),
|
||||
},
|
||||
]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
this.pending
|
||||
.push_back(ResponseEvent::OutputItemDone(aggregated_reasoning));
|
||||
emitted_any = true;
|
||||
}
|
||||
|
||||
if !this.cumulative.is_empty() {
|
||||
let aggregated_item = crate::models::ResponseItem::Message {
|
||||
let aggregated_message = crate::models::ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![crate::models::ContentItem::OutputText {
|
||||
text: std::mem::take(&mut this.cumulative),
|
||||
}],
|
||||
};
|
||||
this.pending
|
||||
.push_back(ResponseEvent::OutputItemDone(aggregated_message));
|
||||
emitted_any = true;
|
||||
}
|
||||
|
||||
// Buffer Completed so it is returned *after* the aggregated message.
|
||||
this.pending_completed = Some(ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage,
|
||||
// Always emit Completed last when anything was aggregated.
|
||||
if emitted_any {
|
||||
this.pending.push_back(ResponseEvent::Completed {
|
||||
response_id: response_id.clone(),
|
||||
token_usage: token_usage.clone(),
|
||||
});
|
||||
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::OutputItemDone(
|
||||
aggregated_item,
|
||||
))));
|
||||
// Return the first pending event now.
|
||||
if let Some(ev) = this.pending.pop_front() {
|
||||
return Poll::Ready(Some(Ok(ev)));
|
||||
}
|
||||
}
|
||||
|
||||
// Nothing aggregated – forward Completed directly.
|
||||
@@ -439,10 +569,27 @@ where
|
||||
// will never appear in a Chat Completions stream.
|
||||
continue;
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(_))))
|
||||
| Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryDelta(_)))) => {
|
||||
// Deltas are ignored here since aggregation waits for the
|
||||
// final OutputItemDone.
|
||||
Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(delta)))) => {
|
||||
// Always accumulate deltas so we can emit a final OutputItemDone at Completed.
|
||||
this.cumulative.push_str(&delta);
|
||||
if matches!(this.mode, AggregateMode::Streaming) {
|
||||
// In streaming mode, also forward the delta immediately.
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(delta))));
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ReasoningContentDelta(delta)))) => {
|
||||
// Always accumulate reasoning deltas so we can emit a final Reasoning item at Completed.
|
||||
this.cumulative_reasoning.push_str(&delta);
|
||||
if matches!(this.mode, AggregateMode::Streaming) {
|
||||
// In streaming mode, also forward the delta immediately.
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::ReasoningContentDelta(delta))));
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryDelta(_)))) => {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@@ -472,12 +619,24 @@ pub(crate) trait AggregateStreamExt: Stream<Item = Result<ResponseEvent>> + Size
|
||||
/// }
|
||||
/// ```
|
||||
fn aggregate(self) -> AggregatedChatStream<Self> {
|
||||
AggregatedChatStream {
|
||||
inner: self,
|
||||
cumulative: String::new(),
|
||||
pending_completed: None,
|
||||
}
|
||||
AggregatedChatStream::new(self, AggregateMode::AggregatedOnly)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AggregateStreamExt for T where T: Stream<Item = Result<ResponseEvent>> + Sized {}
|
||||
|
||||
impl<S> AggregatedChatStream<S> {
|
||||
fn new(inner: S, mode: AggregateMode) -> Self {
|
||||
AggregatedChatStream {
|
||||
inner,
|
||||
cumulative: String::new(),
|
||||
cumulative_reasoning: String::new(),
|
||||
pending: std::collections::VecDeque::new(),
|
||||
mode,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn streaming_mode(inner: S) -> Self {
|
||||
Self::new(inner, AggregateMode::Streaming)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ use crate::config::Config;
|
||||
use crate::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::EnvVarError;
|
||||
use crate::error::Result;
|
||||
use crate::flags::CODEX_RS_SSE_FIXTURE;
|
||||
use crate::model_provider_info::ModelProviderInfo;
|
||||
@@ -83,7 +82,7 @@ impl ModelClient {
|
||||
// Create the raw streaming connection first.
|
||||
let response_stream = stream_chat_completions(
|
||||
prompt,
|
||||
&self.config.model,
|
||||
&self.config.model_family,
|
||||
self.config.include_plan_tool,
|
||||
&self.client,
|
||||
&self.provider,
|
||||
@@ -93,7 +92,11 @@ impl ModelClient {
|
||||
// Wrap it with the aggregation adapter so callers see *only*
|
||||
// the final assistant message per turn (matching the
|
||||
// behaviour of the Responses API).
|
||||
let mut aggregated = response_stream.aggregate();
|
||||
let mut aggregated = if self.config.show_raw_agent_reasoning {
|
||||
crate::chat_completions::AggregatedChatStream::streaming_mode(response_stream)
|
||||
} else {
|
||||
response_stream.aggregate()
|
||||
};
|
||||
|
||||
// Bridge the aggregated stream back into a standard
|
||||
// `ResponseStream` by forwarding events through a channel.
|
||||
@@ -122,32 +125,23 @@ impl ModelClient {
|
||||
return stream_from_fixture(path, self.provider.clone()).await;
|
||||
}
|
||||
|
||||
let auth = self.auth.as_ref().ok_or_else(|| {
|
||||
CodexErr::EnvVar(EnvVarError {
|
||||
var: "OPENAI_API_KEY".to_string(),
|
||||
instructions: Some("Create an API key (https://platform.openai.com) and export it as an environment variable.".to_string()),
|
||||
})
|
||||
})?;
|
||||
let auth = self.auth.clone();
|
||||
|
||||
let store = prompt.store && auth.mode != AuthMode::ChatGPT;
|
||||
let auth_mode = auth.as_ref().map(|a| a.mode);
|
||||
|
||||
let base_url = match self.provider.base_url.clone() {
|
||||
Some(url) => url,
|
||||
None => match auth.mode {
|
||||
AuthMode::ChatGPT => "https://chatgpt.com/backend-api/codex".to_string(),
|
||||
AuthMode::ApiKey => "https://api.openai.com/v1".to_string(),
|
||||
},
|
||||
};
|
||||
let store = prompt.store && auth_mode != Some(AuthMode::ChatGPT);
|
||||
|
||||
let token = auth.get_token().await?;
|
||||
|
||||
let full_instructions = prompt.get_full_instructions(&self.config.model);
|
||||
let full_instructions = prompt.get_full_instructions(&self.config.model_family);
|
||||
let tools_json = create_tools_json_for_responses_api(
|
||||
prompt,
|
||||
&self.config.model,
|
||||
&self.config.model_family,
|
||||
self.config.include_plan_tool,
|
||||
)?;
|
||||
let reasoning = create_reasoning_param_for_request(&self.config, self.effort, self.summary);
|
||||
let reasoning = create_reasoning_param_for_request(
|
||||
&self.config.model_family,
|
||||
self.effort,
|
||||
self.summary,
|
||||
);
|
||||
|
||||
// Request encrypted COT if we are not storing responses,
|
||||
// otherwise reasoning items will be referenced by ID
|
||||
@@ -158,11 +152,11 @@ impl ModelClient {
|
||||
};
|
||||
|
||||
let mut input_with_instructions = Vec::with_capacity(prompt.input.len() + 1);
|
||||
if let Some(ui) = &prompt.user_instructions {
|
||||
if let Some(ui) = prompt.get_formatted_user_instructions() {
|
||||
input_with_instructions.push(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText { text: ui.clone() }],
|
||||
content: vec![ContentItem::InputText { text: ui }],
|
||||
});
|
||||
}
|
||||
input_with_instructions.extend(prompt.input.clone());
|
||||
@@ -180,35 +174,36 @@ impl ModelClient {
|
||||
include,
|
||||
};
|
||||
|
||||
trace!(
|
||||
"POST to {}: {}",
|
||||
self.provider.get_full_url(),
|
||||
serde_json::to_string(&payload)?
|
||||
);
|
||||
|
||||
let mut attempt = 0;
|
||||
let max_retries = self.provider.request_max_retries();
|
||||
|
||||
trace!(
|
||||
"POST to {}: {}",
|
||||
self.provider.get_full_url(&auth),
|
||||
serde_json::to_string(&payload)?
|
||||
);
|
||||
|
||||
loop {
|
||||
attempt += 1;
|
||||
|
||||
let mut req_builder = self
|
||||
.client
|
||||
.post(format!("{base_url}/responses"))
|
||||
.provider
|
||||
.create_request_builder(&self.client, &auth)
|
||||
.await?;
|
||||
|
||||
req_builder = req_builder
|
||||
.header("OpenAI-Beta", "responses=experimental")
|
||||
.header("session_id", self.session_id.to_string())
|
||||
.bearer_auth(&token)
|
||||
.header(reqwest::header::ACCEPT, "text/event-stream")
|
||||
.json(&payload);
|
||||
|
||||
if auth.mode == AuthMode::ChatGPT {
|
||||
if let Some(account_id) = auth.get_account_id().await {
|
||||
req_builder = req_builder.header("chatgpt-account-id", account_id);
|
||||
}
|
||||
if let Some(auth) = auth.as_ref()
|
||||
&& auth.mode == AuthMode::ChatGPT
|
||||
&& let Some(account_id) = auth.get_account_id().await
|
||||
{
|
||||
req_builder = req_builder.header("chatgpt-account-id", account_id);
|
||||
}
|
||||
|
||||
req_builder = self.provider.apply_http_headers(req_builder);
|
||||
|
||||
let originator = self
|
||||
.config
|
||||
.internal_originator
|
||||
@@ -446,6 +441,14 @@ async fn process_sse<S>(
|
||||
}
|
||||
}
|
||||
}
|
||||
"response.reasoning_text.delta" => {
|
||||
if let Some(delta) = event.delta {
|
||||
let event = ResponseEvent::ReasoningContentDelta(delta);
|
||||
if tx_event.send(Ok(event)).await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
"response.created" => {
|
||||
if event.response.is_some() {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::Created {})).await;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use crate::error::Result;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::models::ResponseItem;
|
||||
use crate::protocol::TokenUsage;
|
||||
use codex_apply_patch::APPLY_PATCH_TOOL_INSTRUCTIONS;
|
||||
@@ -17,6 +18,10 @@ use tokio::sync::mpsc;
|
||||
/// with this content.
|
||||
const BASE_INSTRUCTIONS: &str = include_str!("../prompt.md");
|
||||
|
||||
/// wraps user instructions message in a tag for the model to parse more easily.
|
||||
const USER_INSTRUCTIONS_START: &str = "<user_instructions>\n\n";
|
||||
const USER_INSTRUCTIONS_END: &str = "\n\n</user_instructions>";
|
||||
|
||||
/// API request payload for a single model turn.
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct Prompt {
|
||||
@@ -38,17 +43,23 @@ pub struct Prompt {
|
||||
}
|
||||
|
||||
impl Prompt {
|
||||
pub(crate) fn get_full_instructions(&self, model: &str) -> Cow<'_, str> {
|
||||
pub(crate) fn get_full_instructions(&self, model: &ModelFamily) -> Cow<'_, str> {
|
||||
let base = self
|
||||
.base_instructions_override
|
||||
.as_deref()
|
||||
.unwrap_or(BASE_INSTRUCTIONS);
|
||||
let mut sections: Vec<&str> = vec![base];
|
||||
if model.starts_with("gpt-4.1") {
|
||||
if model.needs_special_apply_patch_instructions {
|
||||
sections.push(APPLY_PATCH_TOOL_INSTRUCTIONS);
|
||||
}
|
||||
Cow::Owned(sections.join("\n"))
|
||||
}
|
||||
|
||||
pub(crate) fn get_formatted_user_instructions(&self) -> Option<String> {
|
||||
self.user_instructions
|
||||
.as_ref()
|
||||
.map(|ui| format!("{USER_INSTRUCTIONS_START}{ui}{USER_INSTRUCTIONS_END}"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -61,6 +72,7 @@ pub enum ResponseEvent {
|
||||
},
|
||||
OutputTextDelta(String),
|
||||
ReasoningSummaryDelta(String),
|
||||
ReasoningContentDelta(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -134,14 +146,12 @@ pub(crate) struct ResponsesApiRequest<'a> {
|
||||
pub(crate) include: Vec<String>,
|
||||
}
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
pub(crate) fn create_reasoning_param_for_request(
|
||||
config: &Config,
|
||||
model_family: &ModelFamily,
|
||||
effort: ReasoningEffortConfig,
|
||||
summary: ReasoningSummaryConfig,
|
||||
) -> Option<Reasoning> {
|
||||
if model_supports_reasoning_summaries(config) {
|
||||
if model_family.supports_reasoning_summaries {
|
||||
let effort: Option<OpenAiReasoningEffort> = effort.into();
|
||||
let effort = effort?;
|
||||
Some(Reasoning {
|
||||
@@ -153,27 +163,6 @@ pub(crate) fn create_reasoning_param_for_request(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn model_supports_reasoning_summaries(config: &Config) -> bool {
|
||||
// Currently, we hardcode this rule to decide whether to enable reasoning.
|
||||
// We expect reasoning to apply only to OpenAI models, but we do not want
|
||||
// users to have to mess with their config to disable reasoning for models
|
||||
// that do not support it, such as `gpt-4.1`.
|
||||
//
|
||||
// Though if a user is using Codex with non-OpenAI models that, say, happen
|
||||
// to start with "o", then they can set `model_reasoning_effort = "none"` in
|
||||
// config.toml to disable reasoning.
|
||||
//
|
||||
// Converseley, if a user has a non-OpenAI provider that supports reasoning,
|
||||
// they can set the top-level `model_supports_reasoning_summaries = true`
|
||||
// config option to enable reasoning.
|
||||
if config.model_supports_reasoning_summaries {
|
||||
return true;
|
||||
}
|
||||
|
||||
let model = &config.model;
|
||||
model.starts_with("o") || model.starts_with("codex")
|
||||
}
|
||||
|
||||
pub(crate) struct ResponseStream {
|
||||
pub(crate) rx_event: mpsc::Receiver<Result<ResponseEvent>>,
|
||||
}
|
||||
@@ -188,6 +177,9 @@ impl Stream for ResponseStream {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::expect_used)]
|
||||
use crate::model_family::find_family_for_model;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
@@ -197,7 +189,8 @@ mod tests {
|
||||
..Default::default()
|
||||
};
|
||||
let expected = format!("{BASE_INSTRUCTIONS}\n{APPLY_PATCH_TOOL_INSTRUCTIONS}");
|
||||
let full = prompt.get_full_instructions("gpt-4.1");
|
||||
let model_family = find_family_for_model("gpt-4.1").expect("known model slug");
|
||||
let full = prompt.get_full_instructions(&model_family);
|
||||
assert_eq!(full, expected);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,7 @@ use crate::mcp_tool_call::handle_mcp_tool_call;
|
||||
use crate::models::ContentItem;
|
||||
use crate::models::FunctionCallOutputPayload;
|
||||
use crate::models::LocalShellAction;
|
||||
use crate::models::ReasoningItemContent;
|
||||
use crate::models::ReasoningItemReasoningSummary;
|
||||
use crate::models::ResponseInputItem;
|
||||
use crate::models::ResponseItem;
|
||||
@@ -66,6 +67,8 @@ use crate::protocol::AgentMessageDeltaEvent;
|
||||
use crate::protocol::AgentMessageEvent;
|
||||
use crate::protocol::AgentReasoningDeltaEvent;
|
||||
use crate::protocol::AgentReasoningEvent;
|
||||
use crate::protocol::AgentReasoningRawContentDeltaEvent;
|
||||
use crate::protocol::AgentReasoningRawContentEvent;
|
||||
use crate::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::BackgroundEventEvent;
|
||||
@@ -85,11 +88,13 @@ use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::SessionConfiguredEvent;
|
||||
use crate::protocol::Submission;
|
||||
use crate::protocol::TaskCompleteEvent;
|
||||
use crate::protocol::TurnDiffEvent;
|
||||
use crate::rollout::RolloutRecorder;
|
||||
use crate::safety::SafetyCheck;
|
||||
use crate::safety::assess_command_safety;
|
||||
use crate::safety::assess_safety_for_untrusted_command;
|
||||
use crate::shell;
|
||||
use crate::turn_diff_tracker::TurnDiffTracker;
|
||||
use crate::user_notification::UserNotification;
|
||||
use crate::util::backoff;
|
||||
|
||||
@@ -121,7 +126,7 @@ impl Codex {
|
||||
let resume_path = config.experimental_resume.clone();
|
||||
info!("resume_path: {resume_path:?}");
|
||||
let (tx_sub, rx_sub) = async_channel::bounded(64);
|
||||
let (tx_event, rx_event) = async_channel::bounded(1600);
|
||||
let (tx_event, rx_event) = async_channel::unbounded();
|
||||
|
||||
let user_instructions = get_user_instructions(&config).await;
|
||||
|
||||
@@ -225,6 +230,7 @@ pub(crate) struct Session {
|
||||
state: Mutex<State>,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
user_shell: shell::Shell,
|
||||
show_raw_agent_reasoning: bool,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
@@ -362,7 +368,11 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
async fn notify_exec_command_begin(&self, exec_command_context: ExecCommandContext) {
|
||||
async fn on_exec_command_begin(
|
||||
&self,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
exec_command_context: ExecCommandContext,
|
||||
) {
|
||||
let ExecCommandContext {
|
||||
sub_id,
|
||||
call_id,
|
||||
@@ -374,11 +384,15 @@ impl Session {
|
||||
Some(ApplyPatchCommandContext {
|
||||
user_explicitly_approved_this_action,
|
||||
changes,
|
||||
}) => EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
|
||||
call_id,
|
||||
auto_approved: !user_explicitly_approved_this_action,
|
||||
changes,
|
||||
}),
|
||||
}) => {
|
||||
turn_diff_tracker.on_patch_begin(&changes);
|
||||
|
||||
EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
|
||||
call_id,
|
||||
auto_approved: !user_explicitly_approved_this_action,
|
||||
changes,
|
||||
})
|
||||
}
|
||||
None => EventMsg::ExecCommandBegin(ExecCommandBeginEvent {
|
||||
call_id,
|
||||
command: command_for_display.clone(),
|
||||
@@ -392,15 +406,21 @@ impl Session {
|
||||
let _ = self.tx_event.send(event).await;
|
||||
}
|
||||
|
||||
async fn notify_exec_command_end(
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn on_exec_command_end(
|
||||
&self,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
sub_id: &str,
|
||||
call_id: &str,
|
||||
stdout: &str,
|
||||
stderr: &str,
|
||||
exit_code: i32,
|
||||
output: &ExecToolCallOutput,
|
||||
is_apply_patch: bool,
|
||||
) {
|
||||
let ExecToolCallOutput {
|
||||
stdout,
|
||||
stderr,
|
||||
duration,
|
||||
exit_code,
|
||||
} = output;
|
||||
// Because stdout and stderr could each be up to 100 KiB, we send
|
||||
// truncated versions.
|
||||
const MAX_STREAM_OUTPUT: usize = 5 * 1024; // 5KiB
|
||||
@@ -412,14 +432,15 @@ impl Session {
|
||||
call_id: call_id.to_string(),
|
||||
stdout,
|
||||
stderr,
|
||||
success: exit_code == 0,
|
||||
success: *exit_code == 0,
|
||||
})
|
||||
} else {
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id: call_id.to_string(),
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code,
|
||||
duration: *duration,
|
||||
exit_code: *exit_code,
|
||||
})
|
||||
};
|
||||
|
||||
@@ -428,6 +449,20 @@ impl Session {
|
||||
msg,
|
||||
};
|
||||
let _ = self.tx_event.send(event).await;
|
||||
|
||||
// If this is an apply_patch, after we emit the end patch, emit a second event
|
||||
// with the full turn diff if there is one.
|
||||
if is_apply_patch {
|
||||
let unified_diff = turn_diff_tracker.get_unified_diff();
|
||||
if let Ok(Some(unified_diff)) = unified_diff {
|
||||
let msg = EventMsg::TurnDiff(TurnDiffEvent { unified_diff });
|
||||
let event = Event {
|
||||
id: sub_id.into(),
|
||||
msg,
|
||||
};
|
||||
let _ = self.tx_event.send(event).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper that emits a BackgroundEvent with the given message. This keeps
|
||||
@@ -670,7 +705,7 @@ async fn submission_loop(
|
||||
cwd,
|
||||
resume_path,
|
||||
} => {
|
||||
info!(
|
||||
debug!(
|
||||
"Configuring session: model={model}; provider={provider:?}; resume={resume_path:?}"
|
||||
);
|
||||
if !cwd.is_absolute() {
|
||||
@@ -791,6 +826,7 @@ async fn submission_loop(
|
||||
codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(),
|
||||
disable_response_storage,
|
||||
user_shell: default_shell,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
}));
|
||||
|
||||
// Patch restored state into the newly created session.
|
||||
@@ -1001,6 +1037,10 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
.await;
|
||||
|
||||
let last_agent_message: Option<String>;
|
||||
// Although from the perspective of codex.rs, TurnDiffTracker has the lifecycle of a Task which contains
|
||||
// many turns, from the perspective of the user, it is a single turn.
|
||||
let mut turn_diff_tracker = TurnDiffTracker::new();
|
||||
|
||||
loop {
|
||||
// Note that pending_input would be something like a message the user
|
||||
// submitted through the UI while the model was running. Though the UI
|
||||
@@ -1032,7 +1072,7 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
match run_turn(&sess, sub_id.clone(), turn_input).await {
|
||||
match run_turn(&sess, &mut turn_diff_tracker, sub_id.clone(), turn_input).await {
|
||||
Ok(turn_output) => {
|
||||
let mut items_to_record_in_conversation_history = Vec::<ResponseItem>::new();
|
||||
let mut responses = Vec::<ResponseInputItem>::new();
|
||||
@@ -1097,6 +1137,7 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
ResponseItem::Reasoning {
|
||||
id,
|
||||
summary,
|
||||
content,
|
||||
encrypted_content,
|
||||
},
|
||||
None,
|
||||
@@ -1104,6 +1145,7 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
items_to_record_in_conversation_history.push(ResponseItem::Reasoning {
|
||||
id: id.clone(),
|
||||
summary: summary.clone(),
|
||||
content: content.clone(),
|
||||
encrypted_content: encrypted_content.clone(),
|
||||
});
|
||||
}
|
||||
@@ -1158,6 +1200,7 @@ async fn run_task(sess: Arc<Session>, sub_id: String, input: Vec<InputItem>) {
|
||||
|
||||
async fn run_turn(
|
||||
sess: &Session,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
sub_id: String,
|
||||
input: Vec<ResponseItem>,
|
||||
) -> CodexResult<Vec<ProcessedResponseItem>> {
|
||||
@@ -1172,7 +1215,7 @@ async fn run_turn(
|
||||
|
||||
let mut retries = 0;
|
||||
loop {
|
||||
match try_run_turn(sess, &sub_id, &prompt).await {
|
||||
match try_run_turn(sess, turn_diff_tracker, &sub_id, &prompt).await {
|
||||
Ok(output) => return Ok(output),
|
||||
Err(CodexErr::Interrupted) => return Err(CodexErr::Interrupted),
|
||||
Err(CodexErr::EnvVar(var)) => return Err(CodexErr::EnvVar(var)),
|
||||
@@ -1218,6 +1261,7 @@ struct ProcessedResponseItem {
|
||||
|
||||
async fn try_run_turn(
|
||||
sess: &Session,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
sub_id: &str,
|
||||
prompt: &Prompt,
|
||||
) -> CodexResult<Vec<ProcessedResponseItem>> {
|
||||
@@ -1305,7 +1349,8 @@ async fn try_run_turn(
|
||||
match event {
|
||||
ResponseEvent::Created => {}
|
||||
ResponseEvent::OutputItemDone(item) => {
|
||||
let response = handle_response_item(sess, sub_id, item.clone()).await?;
|
||||
let response =
|
||||
handle_response_item(sess, turn_diff_tracker, sub_id, item.clone()).await?;
|
||||
|
||||
output.push(ProcessedResponseItem { item, response });
|
||||
}
|
||||
@@ -1323,9 +1368,24 @@ async fn try_run_turn(
|
||||
.ok();
|
||||
}
|
||||
|
||||
let unified_diff = turn_diff_tracker.get_unified_diff();
|
||||
if let Ok(Some(unified_diff)) = unified_diff {
|
||||
let msg = EventMsg::TurnDiff(TurnDiffEvent { unified_diff });
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg,
|
||||
};
|
||||
let _ = sess.tx_event.send(event).await;
|
||||
}
|
||||
|
||||
return Ok(output);
|
||||
}
|
||||
ResponseEvent::OutputTextDelta(delta) => {
|
||||
{
|
||||
let mut st = sess.state.lock().unwrap();
|
||||
st.history.append_assistant_text(&delta);
|
||||
}
|
||||
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }),
|
||||
@@ -1339,6 +1399,17 @@ async fn try_run_turn(
|
||||
};
|
||||
sess.tx_event.send(event).await.ok();
|
||||
}
|
||||
ResponseEvent::ReasoningContentDelta(delta) => {
|
||||
if sess.show_raw_agent_reasoning {
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::AgentReasoningRawContentDelta(
|
||||
AgentReasoningRawContentDeltaEvent { delta },
|
||||
),
|
||||
};
|
||||
sess.tx_event.send(event).await.ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1427,6 +1498,7 @@ async fn run_compact_task(
|
||||
|
||||
async fn handle_response_item(
|
||||
sess: &Session,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
sub_id: &str,
|
||||
item: ResponseItem,
|
||||
) -> CodexResult<Option<ResponseInputItem>> {
|
||||
@@ -1444,7 +1516,12 @@ async fn handle_response_item(
|
||||
}
|
||||
None
|
||||
}
|
||||
ResponseItem::Reasoning { summary, .. } => {
|
||||
ResponseItem::Reasoning {
|
||||
id: _,
|
||||
summary,
|
||||
content,
|
||||
encrypted_content: _,
|
||||
} => {
|
||||
for item in summary {
|
||||
let text = match item {
|
||||
ReasoningItemReasoningSummary::SummaryText { text } => text,
|
||||
@@ -1455,6 +1532,21 @@ async fn handle_response_item(
|
||||
};
|
||||
sess.tx_event.send(event).await.ok();
|
||||
}
|
||||
if sess.show_raw_agent_reasoning && content.is_some() {
|
||||
let content = content.unwrap();
|
||||
for item in content {
|
||||
let text = match item {
|
||||
ReasoningItemContent::ReasoningText { text } => text,
|
||||
};
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text,
|
||||
}),
|
||||
};
|
||||
sess.tx_event.send(event).await.ok();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
ResponseItem::FunctionCall {
|
||||
@@ -1464,7 +1556,17 @@ async fn handle_response_item(
|
||||
..
|
||||
} => {
|
||||
info!("FunctionCall: {arguments}");
|
||||
Some(handle_function_call(sess, sub_id.to_string(), name, arguments, call_id).await)
|
||||
Some(
|
||||
handle_function_call(
|
||||
sess,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
name,
|
||||
arguments,
|
||||
call_id,
|
||||
)
|
||||
.await,
|
||||
)
|
||||
}
|
||||
ResponseItem::LocalShellCall {
|
||||
id,
|
||||
@@ -1499,6 +1601,7 @@ async fn handle_response_item(
|
||||
handle_container_exec_with_params(
|
||||
exec_params,
|
||||
sess,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
effective_call_id,
|
||||
)
|
||||
@@ -1516,6 +1619,7 @@ async fn handle_response_item(
|
||||
|
||||
async fn handle_function_call(
|
||||
sess: &Session,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
sub_id: String,
|
||||
name: String,
|
||||
arguments: String,
|
||||
@@ -1529,7 +1633,8 @@ async fn handle_function_call(
|
||||
return *output;
|
||||
}
|
||||
};
|
||||
handle_container_exec_with_params(params, sess, sub_id, call_id).await
|
||||
handle_container_exec_with_params(params, sess, turn_diff_tracker, sub_id, call_id)
|
||||
.await
|
||||
}
|
||||
"update_plan" => handle_update_plan(sess, arguments, sub_id, call_id).await,
|
||||
_ => {
|
||||
@@ -1603,6 +1708,7 @@ fn maybe_run_with_user_profile(params: ExecParams, sess: &Session) -> ExecParams
|
||||
async fn handle_container_exec_with_params(
|
||||
params: ExecParams,
|
||||
sess: &Session,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
sub_id: String,
|
||||
call_id: String,
|
||||
) -> ResponseInputItem {
|
||||
@@ -1750,7 +1856,7 @@ async fn handle_container_exec_with_params(
|
||||
},
|
||||
),
|
||||
};
|
||||
sess.notify_exec_command_begin(exec_command_context.clone())
|
||||
sess.on_exec_command_begin(turn_diff_tracker, exec_command_context.clone())
|
||||
.await;
|
||||
|
||||
let params = maybe_run_with_user_profile(params, sess);
|
||||
@@ -1775,23 +1881,22 @@ async fn handle_container_exec_with_params(
|
||||
stdout,
|
||||
stderr,
|
||||
duration,
|
||||
} = output;
|
||||
} = &output;
|
||||
|
||||
sess.notify_exec_command_end(
|
||||
sess.on_exec_command_end(
|
||||
turn_diff_tracker,
|
||||
&sub_id,
|
||||
&call_id,
|
||||
&stdout,
|
||||
&stderr,
|
||||
exit_code,
|
||||
&output,
|
||||
exec_command_context.apply_patch.is_some(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let is_success = exit_code == 0;
|
||||
let is_success = *exit_code == 0;
|
||||
let content = format_exec_output(
|
||||
if is_success { &stdout } else { &stderr },
|
||||
exit_code,
|
||||
duration,
|
||||
if is_success { stdout } else { stderr },
|
||||
*exit_code,
|
||||
*duration,
|
||||
);
|
||||
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
@@ -1803,7 +1908,15 @@ async fn handle_container_exec_with_params(
|
||||
}
|
||||
}
|
||||
Err(CodexErr::Sandbox(error)) => {
|
||||
handle_sandbox_error(params, exec_command_context, error, sandbox_type, sess).await
|
||||
handle_sandbox_error(
|
||||
turn_diff_tracker,
|
||||
params,
|
||||
exec_command_context,
|
||||
error,
|
||||
sandbox_type,
|
||||
sess,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Err(e) => {
|
||||
// Handle non-sandbox errors
|
||||
@@ -1819,6 +1932,7 @@ async fn handle_container_exec_with_params(
|
||||
}
|
||||
|
||||
async fn handle_sandbox_error(
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
params: ExecParams,
|
||||
exec_command_context: ExecCommandContext,
|
||||
error: SandboxErr,
|
||||
@@ -1850,7 +1964,8 @@ async fn handle_sandbox_error(
|
||||
// include additional metadata on the command to indicate whether non-zero
|
||||
// exit codes merit a retry.
|
||||
|
||||
// For now, we categorically ask the user to retry without sandbox.
|
||||
// For now, we categorically ask the user to retry without sandbox and
|
||||
// emit the raw error as a background event.
|
||||
sess.notify_background_event(&sub_id, format!("Execution failed: {error}"))
|
||||
.await;
|
||||
|
||||
@@ -1875,7 +1990,8 @@ async fn handle_sandbox_error(
|
||||
sess.notify_background_event(&sub_id, "retrying command without sandbox")
|
||||
.await;
|
||||
|
||||
sess.notify_exec_command_begin(exec_command_context).await;
|
||||
sess.on_exec_command_begin(turn_diff_tracker, exec_command_context)
|
||||
.await;
|
||||
|
||||
// This is an escalated retry; the policy will not be
|
||||
// examined and the sandbox has been set to `None`.
|
||||
@@ -1900,23 +2016,22 @@ async fn handle_sandbox_error(
|
||||
stdout,
|
||||
stderr,
|
||||
duration,
|
||||
} = retry_output;
|
||||
} = &retry_output;
|
||||
|
||||
sess.notify_exec_command_end(
|
||||
sess.on_exec_command_end(
|
||||
turn_diff_tracker,
|
||||
&sub_id,
|
||||
&call_id,
|
||||
&stdout,
|
||||
&stderr,
|
||||
exit_code,
|
||||
&retry_output,
|
||||
is_apply_patch,
|
||||
)
|
||||
.await;
|
||||
|
||||
let is_success = exit_code == 0;
|
||||
let is_success = *exit_code == 0;
|
||||
let content = format_exec_output(
|
||||
if is_success { &stdout } else { &stderr },
|
||||
exit_code,
|
||||
duration,
|
||||
if is_success { stdout } else { stderr },
|
||||
*exit_code,
|
||||
*duration,
|
||||
);
|
||||
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
|
||||
@@ -10,6 +10,8 @@ use crate::config_types::ShellEnvironmentPolicyToml;
|
||||
use crate::config_types::Tui;
|
||||
use crate::config_types::UriBasedFileOpener;
|
||||
use crate::flags::OPENAI_DEFAULT_MODEL;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::model_family::find_family_for_model;
|
||||
use crate::model_provider_info::ModelProviderInfo;
|
||||
use crate::model_provider_info::built_in_model_providers;
|
||||
use crate::openai_model_info::get_model_info;
|
||||
@@ -33,6 +35,8 @@ pub struct Config {
|
||||
/// Optional override of model selection.
|
||||
pub model: String,
|
||||
|
||||
pub model_family: ModelFamily,
|
||||
|
||||
/// Size of the context window for the model, in tokens.
|
||||
pub model_context_window: Option<u64>,
|
||||
|
||||
@@ -57,6 +61,10 @@ pub struct Config {
|
||||
/// users are only interested in the final agent responses.
|
||||
pub hide_agent_reasoning: bool,
|
||||
|
||||
/// When set to `true`, `AgentReasoningRawContentEvent` events will be shown in the UI/output.
|
||||
/// Defaults to `false`.
|
||||
pub show_raw_agent_reasoning: bool,
|
||||
|
||||
/// Disable server-side response storage (sends the full conversation
|
||||
/// context with every request). Currently necessary for OpenAI customers
|
||||
/// who have opted into Zero Data Retention (ZDR).
|
||||
@@ -134,10 +142,6 @@ pub struct Config {
|
||||
/// request using the Responses API.
|
||||
pub model_reasoning_summary: ReasoningSummary,
|
||||
|
||||
/// When set to `true`, overrides the default heuristic and forces
|
||||
/// `model_supports_reasoning_summaries()` to return `true`.
|
||||
pub model_supports_reasoning_summaries: bool,
|
||||
|
||||
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
|
||||
pub chatgpt_base_url: String,
|
||||
|
||||
@@ -325,6 +329,10 @@ pub struct ConfigToml {
|
||||
/// UI/output. Defaults to `false`.
|
||||
pub hide_agent_reasoning: Option<bool>,
|
||||
|
||||
/// When set to `true`, `AgentReasoningRawContentEvent` events will be shown in the UI/output.
|
||||
/// Defaults to `false`.
|
||||
pub show_raw_agent_reasoning: Option<bool>,
|
||||
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
|
||||
@@ -465,7 +473,19 @@ impl Config {
|
||||
.or(config_profile.model)
|
||||
.or(cfg.model)
|
||||
.unwrap_or_else(default_model);
|
||||
let openai_model_info = get_model_info(&model);
|
||||
let model_family = find_family_for_model(&model).unwrap_or_else(|| {
|
||||
let supports_reasoning_summaries =
|
||||
cfg.model_supports_reasoning_summaries.unwrap_or(false);
|
||||
ModelFamily {
|
||||
slug: model.clone(),
|
||||
family: model.clone(),
|
||||
needs_special_apply_patch_instructions: false,
|
||||
supports_reasoning_summaries,
|
||||
uses_local_shell_tool: false,
|
||||
}
|
||||
});
|
||||
|
||||
let openai_model_info = get_model_info(&model_family);
|
||||
let model_context_window = cfg
|
||||
.model_context_window
|
||||
.or_else(|| openai_model_info.as_ref().map(|info| info.context_window));
|
||||
@@ -480,14 +500,17 @@ impl Config {
|
||||
// Load base instructions override from a file if specified. If the
|
||||
// path is relative, resolve it against the effective cwd so the
|
||||
// behaviour matches other path-like config values.
|
||||
let file_base_instructions = Self::get_base_instructions(
|
||||
cfg.experimental_instructions_file.as_ref(),
|
||||
&resolved_cwd,
|
||||
)?;
|
||||
let experimental_instructions_path = config_profile
|
||||
.experimental_instructions_file
|
||||
.as_ref()
|
||||
.or(cfg.experimental_instructions_file.as_ref());
|
||||
let file_base_instructions =
|
||||
Self::get_base_instructions(experimental_instructions_path, &resolved_cwd)?;
|
||||
let base_instructions = base_instructions.or(file_base_instructions);
|
||||
|
||||
let config = Self {
|
||||
model,
|
||||
model_family,
|
||||
model_context_window,
|
||||
model_max_output_tokens,
|
||||
model_provider_id,
|
||||
@@ -516,6 +539,7 @@ impl Config {
|
||||
codex_linux_sandbox_exe,
|
||||
|
||||
hide_agent_reasoning: cfg.hide_agent_reasoning.unwrap_or(false),
|
||||
show_raw_agent_reasoning: cfg.show_raw_agent_reasoning.unwrap_or(false),
|
||||
model_reasoning_effort: config_profile
|
||||
.model_reasoning_effort
|
||||
.or(cfg.model_reasoning_effort)
|
||||
@@ -525,10 +549,6 @@ impl Config {
|
||||
.or(cfg.model_reasoning_summary)
|
||||
.unwrap_or_default(),
|
||||
|
||||
model_supports_reasoning_summaries: cfg
|
||||
.model_supports_reasoning_summaries
|
||||
.unwrap_or(false),
|
||||
|
||||
chatgpt_base_url: config_profile
|
||||
.chatgpt_base_url
|
||||
.or(cfg.chatgpt_base_url)
|
||||
@@ -869,6 +889,7 @@ disable_response_storage = true
|
||||
assert_eq!(
|
||||
Config {
|
||||
model: "o3".to_string(),
|
||||
model_family: find_family_for_model("o3").expect("known model slug"),
|
||||
model_context_window: Some(200_000),
|
||||
model_max_output_tokens: Some(100_000),
|
||||
model_provider_id: "openai".to_string(),
|
||||
@@ -889,9 +910,9 @@ disable_response_storage = true
|
||||
tui: Tui::default(),
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
show_raw_agent_reasoning: false,
|
||||
model_reasoning_effort: ReasoningEffort::High,
|
||||
model_reasoning_summary: ReasoningSummary::Detailed,
|
||||
model_supports_reasoning_summaries: false,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
experimental_resume: None,
|
||||
base_instructions: None,
|
||||
@@ -919,6 +940,7 @@ disable_response_storage = true
|
||||
)?;
|
||||
let expected_gpt3_profile_config = Config {
|
||||
model: "gpt-3.5-turbo".to_string(),
|
||||
model_family: find_family_for_model("gpt-3.5-turbo").expect("known model slug"),
|
||||
model_context_window: Some(16_385),
|
||||
model_max_output_tokens: Some(4_096),
|
||||
model_provider_id: "openai-chat-completions".to_string(),
|
||||
@@ -939,9 +961,9 @@ disable_response_storage = true
|
||||
tui: Tui::default(),
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
show_raw_agent_reasoning: false,
|
||||
model_reasoning_effort: ReasoningEffort::default(),
|
||||
model_reasoning_summary: ReasoningSummary::default(),
|
||||
model_supports_reasoning_summaries: false,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
experimental_resume: None,
|
||||
base_instructions: None,
|
||||
@@ -984,6 +1006,7 @@ disable_response_storage = true
|
||||
)?;
|
||||
let expected_zdr_profile_config = Config {
|
||||
model: "o3".to_string(),
|
||||
model_family: find_family_for_model("o3").expect("known model slug"),
|
||||
model_context_window: Some(200_000),
|
||||
model_max_output_tokens: Some(100_000),
|
||||
model_provider_id: "openai".to_string(),
|
||||
@@ -1004,9 +1027,9 @@ disable_response_storage = true
|
||||
tui: Tui::default(),
|
||||
codex_linux_sandbox_exe: None,
|
||||
hide_agent_reasoning: false,
|
||||
show_raw_agent_reasoning: false,
|
||||
model_reasoning_effort: ReasoningEffort::default(),
|
||||
model_reasoning_summary: ReasoningSummary::default(),
|
||||
model_supports_reasoning_summaries: false,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
experimental_resume: None,
|
||||
base_instructions: None,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use serde::Deserialize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::config_types::ReasoningEffort;
|
||||
use crate::config_types::ReasoningSummary;
|
||||
@@ -17,4 +18,5 @@ pub struct ConfigProfile {
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
pub experimental_instructions_file: Option<PathBuf>,
|
||||
}
|
||||
|
||||
@@ -24,9 +24,52 @@ impl ConversationHistory {
|
||||
I::Item: std::ops::Deref<Target = ResponseItem>,
|
||||
{
|
||||
for item in items {
|
||||
if is_api_message(&item) {
|
||||
// Note agent-loop.ts also does filtering on some of the fields.
|
||||
self.items.push(item.clone());
|
||||
if !is_api_message(&item) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Merge adjacent assistant messages into a single history entry.
|
||||
// This prevents duplicates when a partial assistant message was
|
||||
// streamed into history earlier in the turn and the final full
|
||||
// message is recorded at turn end.
|
||||
match (&*item, self.items.last_mut()) {
|
||||
(
|
||||
ResponseItem::Message {
|
||||
role: new_role,
|
||||
content: new_content,
|
||||
..
|
||||
},
|
||||
Some(ResponseItem::Message {
|
||||
role: last_role,
|
||||
content: last_content,
|
||||
..
|
||||
}),
|
||||
) if new_role == "assistant" && last_role == "assistant" => {
|
||||
append_text_content(last_content, new_content);
|
||||
}
|
||||
_ => {
|
||||
self.items.push(item.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Append a text `delta` to the latest assistant message, creating a new
|
||||
/// assistant entry if none exists yet (e.g. first delta for this turn).
|
||||
pub(crate) fn append_assistant_text(&mut self, delta: &str) {
|
||||
match self.items.last_mut() {
|
||||
Some(ResponseItem::Message { role, content, .. }) if role == "assistant" => {
|
||||
append_text_delta(content, delta);
|
||||
}
|
||||
_ => {
|
||||
// Start a new assistant message with the delta.
|
||||
self.items.push(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![crate::models::ContentItem::OutputText {
|
||||
text: delta.to_string(),
|
||||
}],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -72,3 +115,140 @@ fn is_api_message(message: &ResponseItem) -> bool {
|
||||
ResponseItem::Other => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to append the textual content from `src` into `dst` in place.
|
||||
fn append_text_content(
|
||||
dst: &mut Vec<crate::models::ContentItem>,
|
||||
src: &Vec<crate::models::ContentItem>,
|
||||
) {
|
||||
for c in src {
|
||||
if let crate::models::ContentItem::OutputText { text } = c {
|
||||
append_text_delta(dst, text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Append a single text delta to the last OutputText item in `content`, or
|
||||
/// push a new OutputText item if none exists.
|
||||
fn append_text_delta(content: &mut Vec<crate::models::ContentItem>, delta: &str) {
|
||||
if let Some(crate::models::ContentItem::OutputText { text }) = content
|
||||
.iter_mut()
|
||||
.rev()
|
||||
.find(|c| matches!(c, crate::models::ContentItem::OutputText { .. }))
|
||||
{
|
||||
text.push_str(delta);
|
||||
} else {
|
||||
content.push(crate::models::ContentItem::OutputText {
|
||||
text: delta.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::models::ContentItem;
|
||||
|
||||
fn assistant_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: text.to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
fn user_msg(text: &str) -> ResponseItem {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: text.to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merges_adjacent_assistant_messages() {
|
||||
let mut h = ConversationHistory::default();
|
||||
let a1 = assistant_msg("Hello");
|
||||
let a2 = assistant_msg(", world!");
|
||||
h.record_items([&a1, &a2]);
|
||||
|
||||
let items = h.contents();
|
||||
assert_eq!(
|
||||
items,
|
||||
vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "Hello, world!".to_string()
|
||||
}]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn append_assistant_text_creates_and_appends() {
|
||||
let mut h = ConversationHistory::default();
|
||||
h.append_assistant_text("Hello");
|
||||
h.append_assistant_text(", world");
|
||||
|
||||
// Now record a final full assistant message and verify it merges.
|
||||
let final_msg = assistant_msg("!");
|
||||
h.record_items([&final_msg]);
|
||||
|
||||
let items = h.contents();
|
||||
assert_eq!(
|
||||
items,
|
||||
vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "Hello, world!".to_string()
|
||||
}]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filters_non_api_messages() {
|
||||
let mut h = ConversationHistory::default();
|
||||
// System message is not an API message; Other is ignored.
|
||||
let system = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "system".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "ignored".to_string(),
|
||||
}],
|
||||
};
|
||||
h.record_items([&system, &ResponseItem::Other]);
|
||||
|
||||
// User and assistant should be retained.
|
||||
let u = user_msg("hi");
|
||||
let a = assistant_msg("hello");
|
||||
h.record_items([&u, &a]);
|
||||
|
||||
let items = h.contents();
|
||||
assert_eq!(
|
||||
items,
|
||||
vec![
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "hi".to_string()
|
||||
}]
|
||||
},
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: "hello".to_string()
|
||||
}]
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -140,11 +140,7 @@ pub async fn process_exec_tool_call(
|
||||
|
||||
let exit_code = raw_output.exit_status.code().unwrap_or(-1);
|
||||
|
||||
// NOTE(ragona): This is much less restrictive than the previous check. If we exec
|
||||
// a command, and it returns anything other than success, we assume that it may have
|
||||
// been a sandboxing error and allow the user to retry. (The user of course may choose
|
||||
// not to retry, or in a non-interactive mode, would automatically reject the approval.)
|
||||
if exit_code != 0 && sandbox_type != SandboxType::None {
|
||||
if exit_code != 0 && is_likely_sandbox_denied(sandbox_type, exit_code) {
|
||||
return Err(CodexErr::Sandbox(SandboxErr::Denied(
|
||||
exit_code, stdout, stderr,
|
||||
)));
|
||||
@@ -223,6 +219,26 @@ fn create_linux_sandbox_command_args(
|
||||
linux_cmd
|
||||
}
|
||||
|
||||
/// We don't have a fully deterministic way to tell if our command failed
|
||||
/// because of the sandbox - a command in the user's zshrc file might hit an
|
||||
/// error, but the command itself might fail or succeed for other reasons.
|
||||
/// For now, we conservatively check for 'command not found' (exit code 127),
|
||||
/// and can add additional cases as necessary.
|
||||
fn is_likely_sandbox_denied(sandbox_type: SandboxType, exit_code: i32) -> bool {
|
||||
if sandbox_type == SandboxType::None {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Quick rejects: well-known non-sandbox shell exit codes
|
||||
// 127: command not found, 2: misuse of shell builtins
|
||||
if exit_code == 127 {
|
||||
return false;
|
||||
}
|
||||
|
||||
// For all other cases, we assume the sandbox is the cause
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RawExecToolCallOutput {
|
||||
pub exit_status: ExitStatus,
|
||||
|
||||
@@ -31,6 +31,7 @@ mod model_provider_info;
|
||||
pub use model_provider_info::ModelProviderInfo;
|
||||
pub use model_provider_info::WireApi;
|
||||
pub use model_provider_info::built_in_model_providers;
|
||||
pub mod model_family;
|
||||
mod models;
|
||||
mod openai_model_info;
|
||||
mod openai_tools;
|
||||
@@ -38,12 +39,13 @@ pub mod plan_tool;
|
||||
mod project_doc;
|
||||
pub mod protocol;
|
||||
mod rollout;
|
||||
mod safety;
|
||||
pub(crate) mod safety;
|
||||
pub mod seatbelt;
|
||||
pub mod shell;
|
||||
pub mod spawn;
|
||||
pub mod turn_diff_tracker;
|
||||
mod user_notification;
|
||||
pub mod util;
|
||||
|
||||
pub use apply_patch::CODEX_APPLY_PATCH_ARG1;
|
||||
pub use client_common::model_supports_reasoning_summaries;
|
||||
pub use safety::get_platform_sandbox;
|
||||
|
||||
93
codex-rs/core/src/model_family.rs
Normal file
93
codex-rs/core/src/model_family.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
/// A model family is a group of models that share certain characteristics.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ModelFamily {
|
||||
/// The full model slug used to derive this model family, e.g.
|
||||
/// "gpt-4.1-2025-04-14".
|
||||
pub slug: String,
|
||||
|
||||
/// The model family name, e.g. "gpt-4.1". Note this should able to be used
|
||||
/// with [`crate::openai_model_info::get_model_info`].
|
||||
pub family: String,
|
||||
|
||||
/// True if the model needs additional instructions on how to use the
|
||||
/// "virtual" `apply_patch` CLI.
|
||||
pub needs_special_apply_patch_instructions: bool,
|
||||
|
||||
// Whether the `reasoning` field can be set when making a request to this
|
||||
// model family. Note it has `effort` and `summary` subfields (though
|
||||
// `summary` is optional).
|
||||
pub supports_reasoning_summaries: bool,
|
||||
|
||||
// This should be set to true when the model expects a tool named
|
||||
// "local_shell" to be provided. Its contract must be understood natively by
|
||||
// the model such that its description can be omitted.
|
||||
// See https://platform.openai.com/docs/guides/tools-local-shell
|
||||
pub uses_local_shell_tool: bool,
|
||||
}
|
||||
|
||||
macro_rules! model_family {
|
||||
(
|
||||
$slug:expr, $family:expr $(, $key:ident : $value:expr )* $(,)?
|
||||
) => {{
|
||||
// defaults
|
||||
let mut mf = ModelFamily {
|
||||
slug: $slug.to_string(),
|
||||
family: $family.to_string(),
|
||||
needs_special_apply_patch_instructions: false,
|
||||
supports_reasoning_summaries: false,
|
||||
uses_local_shell_tool: false,
|
||||
};
|
||||
// apply overrides
|
||||
$(
|
||||
mf.$key = $value;
|
||||
)*
|
||||
Some(mf)
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! simple_model_family {
|
||||
(
|
||||
$slug:expr, $family:expr
|
||||
) => {{
|
||||
Some(ModelFamily {
|
||||
slug: $slug.to_string(),
|
||||
family: $family.to_string(),
|
||||
needs_special_apply_patch_instructions: false,
|
||||
supports_reasoning_summaries: false,
|
||||
uses_local_shell_tool: false,
|
||||
})
|
||||
}};
|
||||
}
|
||||
|
||||
/// Returns a `ModelFamily` for the given model slug, or `None` if the slug
|
||||
/// does not match any known model family.
|
||||
pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
|
||||
if slug.starts_with("o3") {
|
||||
model_family!(
|
||||
slug, "o3",
|
||||
supports_reasoning_summaries: true,
|
||||
)
|
||||
} else if slug.starts_with("o4-mini") {
|
||||
model_family!(
|
||||
slug, "o4-mini",
|
||||
supports_reasoning_summaries: true,
|
||||
)
|
||||
} else if slug.starts_with("codex-mini-latest") {
|
||||
model_family!(
|
||||
slug, "codex-mini-latest",
|
||||
supports_reasoning_summaries: true,
|
||||
uses_local_shell_tool: true,
|
||||
)
|
||||
} else if slug.starts_with("gpt-4.1") {
|
||||
model_family!(
|
||||
slug, "gpt-4.1",
|
||||
needs_special_apply_patch_instructions: true,
|
||||
)
|
||||
} else if slug.starts_with("gpt-4o") {
|
||||
simple_model_family!(slug, "gpt-4o")
|
||||
} else if slug.starts_with("gpt-3.5") {
|
||||
simple_model_family!(slug, "gpt-3.5")
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -5,8 +5,11 @@
|
||||
//! 2. User-defined entries inside `~/.codex/config.toml` under the `model_providers`
|
||||
//! key. These override or extend the defaults at runtime.
|
||||
|
||||
use codex_login::AuthMode;
|
||||
use codex_login::CodexAuth;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::env::VarError;
|
||||
use std::time::Duration;
|
||||
@@ -88,25 +91,30 @@ impl ModelProviderInfo {
|
||||
/// When `require_api_key` is true and the provider declares an `env_key`
|
||||
/// but the variable is missing/empty, returns an [`Err`] identical to the
|
||||
/// one produced by [`ModelProviderInfo::api_key`].
|
||||
pub fn create_request_builder<'a>(
|
||||
pub async fn create_request_builder<'a>(
|
||||
&'a self,
|
||||
client: &'a reqwest::Client,
|
||||
auth: &Option<CodexAuth>,
|
||||
) -> crate::error::Result<reqwest::RequestBuilder> {
|
||||
let url = self.get_full_url();
|
||||
let auth: Cow<'_, Option<CodexAuth>> = if auth.is_some() {
|
||||
Cow::Borrowed(auth)
|
||||
} else {
|
||||
Cow::Owned(self.get_fallback_auth()?)
|
||||
};
|
||||
|
||||
let url = self.get_full_url(&auth);
|
||||
|
||||
let mut builder = client.post(url);
|
||||
|
||||
let api_key = self.api_key()?;
|
||||
if let Some(key) = api_key {
|
||||
builder = builder.bearer_auth(key);
|
||||
if let Some(auth) = auth.as_ref() {
|
||||
builder = builder.bearer_auth(auth.get_token().await?);
|
||||
}
|
||||
|
||||
Ok(self.apply_http_headers(builder))
|
||||
}
|
||||
|
||||
pub(crate) fn get_full_url(&self) -> String {
|
||||
let query_string = self
|
||||
.query_params
|
||||
fn get_query_string(&self) -> String {
|
||||
self.query_params
|
||||
.as_ref()
|
||||
.map_or_else(String::new, |params| {
|
||||
let full_params = params
|
||||
@@ -115,16 +123,29 @@ impl ModelProviderInfo {
|
||||
.collect::<Vec<_>>()
|
||||
.join("&");
|
||||
format!("?{full_params}")
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn get_full_url(&self, auth: &Option<CodexAuth>) -> String {
|
||||
let default_base_url = if matches!(
|
||||
auth,
|
||||
Some(CodexAuth {
|
||||
mode: AuthMode::ChatGPT,
|
||||
..
|
||||
})
|
||||
) {
|
||||
"https://chatgpt.com/backend-api/codex"
|
||||
} else {
|
||||
"https://api.openai.com/v1"
|
||||
};
|
||||
let query_string = self.get_query_string();
|
||||
let base_url = self
|
||||
.base_url
|
||||
.clone()
|
||||
.unwrap_or("https://api.openai.com/v1".to_string());
|
||||
.unwrap_or(default_base_url.to_string());
|
||||
|
||||
match self.wire_api {
|
||||
WireApi::Responses => {
|
||||
format!("{base_url}/responses{query_string}")
|
||||
}
|
||||
WireApi::Responses => format!("{base_url}/responses{query_string}"),
|
||||
WireApi::Chat => format!("{base_url}/chat/completions{query_string}"),
|
||||
}
|
||||
}
|
||||
@@ -132,10 +153,7 @@ impl ModelProviderInfo {
|
||||
/// Apply provider-specific HTTP headers (both static and environment-based)
|
||||
/// onto an existing `reqwest::RequestBuilder` and return the updated
|
||||
/// builder.
|
||||
pub fn apply_http_headers(
|
||||
&self,
|
||||
mut builder: reqwest::RequestBuilder,
|
||||
) -> reqwest::RequestBuilder {
|
||||
fn apply_http_headers(&self, mut builder: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
|
||||
if let Some(extra) = &self.http_headers {
|
||||
for (k, v) in extra {
|
||||
builder = builder.header(k, v);
|
||||
@@ -157,7 +175,7 @@ impl ModelProviderInfo {
|
||||
/// If `env_key` is Some, returns the API key for this provider if present
|
||||
/// (and non-empty) in the environment. If `env_key` is required but
|
||||
/// cannot be found, returns an error.
|
||||
fn api_key(&self) -> crate::error::Result<Option<String>> {
|
||||
pub fn api_key(&self) -> crate::error::Result<Option<String>> {
|
||||
match &self.env_key {
|
||||
Some(env_key) => {
|
||||
let env_value = std::env::var(env_key);
|
||||
@@ -198,6 +216,14 @@ impl ModelProviderInfo {
|
||||
.map(Duration::from_millis)
|
||||
.unwrap_or(Duration::from_millis(DEFAULT_STREAM_IDLE_TIMEOUT_MS))
|
||||
}
|
||||
|
||||
fn get_fallback_auth(&self) -> crate::error::Result<Option<CodexAuth>> {
|
||||
let api_key = self.api_key()?;
|
||||
if let Some(api_key) = api_key {
|
||||
return Ok(Some(CodexAuth::from_api_key(api_key)));
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Built-in default provider list.
|
||||
|
||||
@@ -9,7 +9,7 @@ use serde::ser::Serializer;
|
||||
|
||||
use crate::protocol::InputItem;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ResponseInputItem {
|
||||
Message {
|
||||
@@ -26,7 +26,7 @@ pub enum ResponseInputItem {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ContentItem {
|
||||
InputText { text: String },
|
||||
@@ -34,7 +34,7 @@ pub enum ContentItem {
|
||||
OutputText { text: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ResponseItem {
|
||||
Message {
|
||||
@@ -45,6 +45,8 @@ pub enum ResponseItem {
|
||||
Reasoning {
|
||||
id: String,
|
||||
summary: Vec<ReasoningItemReasoningSummary>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
content: Option<Vec<ReasoningItemContent>>,
|
||||
encrypted_content: Option<String>,
|
||||
},
|
||||
LocalShellCall {
|
||||
@@ -107,7 +109,7 @@ impl From<ResponseInputItem> for ResponseItem {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum LocalShellStatus {
|
||||
Completed,
|
||||
@@ -115,13 +117,13 @@ pub enum LocalShellStatus {
|
||||
Incomplete,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum LocalShellAction {
|
||||
Exec(LocalShellExecAction),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct LocalShellExecAction {
|
||||
pub command: Vec<String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
@@ -130,12 +132,18 @@ pub struct LocalShellExecAction {
|
||||
pub user: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ReasoningItemReasoningSummary {
|
||||
SummaryText { text: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ReasoningItemContent {
|
||||
ReasoningText { text: String },
|
||||
}
|
||||
|
||||
impl From<Vec<InputItem>> for ResponseInputItem {
|
||||
fn from(items: Vec<InputItem>) -> Self {
|
||||
Self::Message {
|
||||
@@ -185,10 +193,9 @@ pub struct ShellToolCallParams {
|
||||
pub timeout_ms: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct FunctionCallOutputPayload {
|
||||
pub content: String,
|
||||
#[expect(dead_code)]
|
||||
pub success: Option<bool>,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use crate::model_family::ModelFamily;
|
||||
|
||||
/// Metadata about a model, particularly OpenAI models.
|
||||
/// We may want to consider including details like the pricing for
|
||||
/// input tokens, output tokens, etc., though users will need to be able to
|
||||
@@ -14,8 +16,8 @@ pub(crate) struct ModelInfo {
|
||||
|
||||
/// Note details such as what a model like gpt-4o is aliased to may be out of
|
||||
/// date.
|
||||
pub(crate) fn get_model_info(name: &str) -> Option<ModelInfo> {
|
||||
match name {
|
||||
pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option<ModelInfo> {
|
||||
match model_family.slug.as_str() {
|
||||
// https://platform.openai.com/docs/models/o3
|
||||
"o3" => Some(ModelInfo {
|
||||
context_window: 200_000,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use crate::client_common::Prompt;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::plan_tool::PLAN_TOOL;
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
@@ -42,8 +42,7 @@ pub(crate) enum JsonSchema {
|
||||
},
|
||||
}
|
||||
|
||||
/// Tool usage specification
|
||||
static DEFAULT_TOOLS: LazyLock<Vec<OpenAiTool>> = LazyLock::new(|| {
|
||||
fn create_shell_tool() -> OpenAiTool {
|
||||
let mut properties = BTreeMap::new();
|
||||
properties.insert(
|
||||
"command".to_string(),
|
||||
@@ -54,38 +53,35 @@ static DEFAULT_TOOLS: LazyLock<Vec<OpenAiTool>> = LazyLock::new(|| {
|
||||
properties.insert("workdir".to_string(), JsonSchema::String);
|
||||
properties.insert("timeout".to_string(), JsonSchema::Number);
|
||||
|
||||
vec![OpenAiTool::Function(ResponsesApiTool {
|
||||
OpenAiTool::Function(ResponsesApiTool {
|
||||
name: "shell",
|
||||
description: "Runs a shell command, and returns its output.",
|
||||
description: "Runs a shell command and returns its output",
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
required: &["command"],
|
||||
additional_properties: false,
|
||||
},
|
||||
})]
|
||||
});
|
||||
|
||||
static DEFAULT_CODEX_MODEL_TOOLS: LazyLock<Vec<OpenAiTool>> =
|
||||
LazyLock::new(|| vec![OpenAiTool::LocalShell {}]);
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns JSON values that are compatible with Function Calling in the
|
||||
/// Responses API:
|
||||
/// https://platform.openai.com/docs/guides/function-calling?api-mode=responses
|
||||
pub(crate) fn create_tools_json_for_responses_api(
|
||||
prompt: &Prompt,
|
||||
model: &str,
|
||||
model_family: &ModelFamily,
|
||||
include_plan_tool: bool,
|
||||
) -> crate::error::Result<Vec<serde_json::Value>> {
|
||||
// Assemble tool list: built-in tools + any extra tools from the prompt.
|
||||
let default_tools = if model.starts_with("codex") {
|
||||
&DEFAULT_CODEX_MODEL_TOOLS
|
||||
} else {
|
||||
&DEFAULT_TOOLS
|
||||
};
|
||||
let mut tools_json = Vec::with_capacity(default_tools.len() + prompt.extra_tools.len());
|
||||
for t in default_tools.iter() {
|
||||
tools_json.push(serde_json::to_value(t)?);
|
||||
let mut openai_tools = vec![create_shell_tool()];
|
||||
if model_family.uses_local_shell_tool {
|
||||
openai_tools.push(OpenAiTool::LocalShell {});
|
||||
}
|
||||
|
||||
let mut tools_json = Vec::with_capacity(openai_tools.len() + prompt.extra_tools.len() + 1);
|
||||
for tool in openai_tools.iter() {
|
||||
tools_json.push(serde_json::to_value(tool)?);
|
||||
}
|
||||
tools_json.extend(
|
||||
prompt
|
||||
@@ -107,13 +103,13 @@ pub(crate) fn create_tools_json_for_responses_api(
|
||||
/// https://platform.openai.com/docs/guides/function-calling?api-mode=chat
|
||||
pub(crate) fn create_tools_json_for_chat_completions_api(
|
||||
prompt: &Prompt,
|
||||
model: &str,
|
||||
model_family: &ModelFamily,
|
||||
include_plan_tool: bool,
|
||||
) -> crate::error::Result<Vec<serde_json::Value>> {
|
||||
// We start with the JSON for the Responses API and than rewrite it to match
|
||||
// the chat completions tool call format.
|
||||
let responses_api_tools_json =
|
||||
create_tools_json_for_responses_api(prompt, model, include_plan_tool)?;
|
||||
create_tools_json_for_responses_api(prompt, model_family, include_plan_tool)?;
|
||||
let tools_json = responses_api_tools_json
|
||||
.into_iter()
|
||||
.filter_map(|mut tool| {
|
||||
|
||||
@@ -359,6 +359,12 @@ pub enum EventMsg {
|
||||
/// Agent reasoning delta event from agent.
|
||||
AgentReasoningDelta(AgentReasoningDeltaEvent),
|
||||
|
||||
/// Raw chain-of-thought from agent.
|
||||
AgentReasoningRawContent(AgentReasoningRawContentEvent),
|
||||
|
||||
/// Agent reasoning content delta event from agent.
|
||||
AgentReasoningRawContentDelta(AgentReasoningRawContentDeltaEvent),
|
||||
|
||||
/// Ack the client's configure message.
|
||||
SessionConfigured(SessionConfiguredEvent),
|
||||
|
||||
@@ -387,6 +393,8 @@ pub enum EventMsg {
|
||||
/// Notification that a patch application has finished.
|
||||
PatchApplyEnd(PatchApplyEndEvent),
|
||||
|
||||
TurnDiff(TurnDiffEvent),
|
||||
|
||||
/// Response to GetHistoryEntryRequest.
|
||||
GetHistoryEntryResponse(GetHistoryEntryResponseEvent),
|
||||
|
||||
@@ -462,6 +470,16 @@ pub struct AgentReasoningEvent {
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AgentReasoningRawContentEvent {
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AgentReasoningRawContentDeltaEvent {
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AgentReasoningDeltaEvent {
|
||||
pub delta: String,
|
||||
@@ -523,6 +541,8 @@ pub struct ExecCommandEndEvent {
|
||||
pub stderr: String,
|
||||
/// The command's exit code.
|
||||
pub exit_code: i32,
|
||||
/// The duration of the command execution.
|
||||
pub duration: Duration,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
@@ -596,6 +616,11 @@ pub struct PatchApplyEndEvent {
|
||||
pub success: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct TurnDiffEvent {
|
||||
pub unified_diff: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct GetHistoryEntryResponseEvent {
|
||||
pub offset: usize,
|
||||
|
||||
@@ -65,3 +65,7 @@
|
||||
(sysctl-name "sysctl.proc_cputype")
|
||||
(sysctl-name-prefix "hw.perflevel")
|
||||
)
|
||||
|
||||
; Added on top of Chrome profile
|
||||
; Needed for python multiprocessing on MacOS for the SemLock
|
||||
(allow ipc-posix-sem)
|
||||
|
||||
887
codex-rs/core/src/turn_diff_tracker.rs
Normal file
887
codex-rs/core/src/turn_diff_tracker.rs
Normal file
@@ -0,0 +1,887 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use sha1::digest::Output;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::protocol::FileChange;
|
||||
|
||||
const ZERO_OID: &str = "0000000000000000000000000000000000000000";
|
||||
const DEV_NULL: &str = "/dev/null";
|
||||
|
||||
struct BaselineFileInfo {
|
||||
path: PathBuf,
|
||||
content: Vec<u8>,
|
||||
mode: FileMode,
|
||||
oid: String,
|
||||
}
|
||||
|
||||
/// Tracks sets of changes to files and exposes the overall unified diff.
|
||||
/// Internally, the way this works is now:
|
||||
/// 1. Maintain an in-memory baseline snapshot of files when they are first seen.
|
||||
/// For new additions, do not create a baseline so that diffs are shown as proper additions (using /dev/null).
|
||||
/// 2. Keep a stable internal filename (uuid) per external path for rename tracking.
|
||||
/// 3. To compute the aggregated unified diff, compare each baseline snapshot to the current file on disk entirely in-memory
|
||||
/// using the `similar` crate and emit unified diffs with rewritten external paths.
|
||||
#[derive(Default)]
|
||||
pub struct TurnDiffTracker {
|
||||
/// Map external path -> internal filename (uuid).
|
||||
external_to_temp_name: HashMap<PathBuf, String>,
|
||||
/// Internal filename -> baseline file info.
|
||||
baseline_file_info: HashMap<String, BaselineFileInfo>,
|
||||
/// Internal filename -> external path as of current accumulated state (after applying all changes).
|
||||
/// This is where renames are tracked.
|
||||
temp_name_to_current_path: HashMap<String, PathBuf>,
|
||||
/// Cache of known git worktree roots to avoid repeated filesystem walks.
|
||||
git_root_cache: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
impl TurnDiffTracker {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Front-run apply patch calls to track the starting contents of any modified files.
|
||||
/// - Creates an in-memory baseline snapshot for files that already exist on disk when first seen.
|
||||
/// - For additions, we intentionally do not create a baseline snapshot so that diffs are proper additions.
|
||||
/// - Also updates internal mappings for move/rename events.
|
||||
pub fn on_patch_begin(&mut self, changes: &HashMap<PathBuf, FileChange>) {
|
||||
for (path, change) in changes.iter() {
|
||||
// Ensure a stable internal filename exists for this external path.
|
||||
if !self.external_to_temp_name.contains_key(path) {
|
||||
let internal = Uuid::new_v4().to_string();
|
||||
self.external_to_temp_name
|
||||
.insert(path.clone(), internal.clone());
|
||||
self.temp_name_to_current_path
|
||||
.insert(internal.clone(), path.clone());
|
||||
|
||||
// If the file exists on disk now, snapshot as baseline; else leave missing to represent /dev/null.
|
||||
let baseline_file_info = if path.exists() {
|
||||
let mode = file_mode_for_path(path);
|
||||
let mode_val = mode.unwrap_or(FileMode::Regular);
|
||||
let content = blob_bytes(path, &mode_val).unwrap_or_default();
|
||||
let oid = if mode == Some(FileMode::Symlink) {
|
||||
format!("{:x}", git_blob_sha1_hex_bytes(&content))
|
||||
} else {
|
||||
self.git_blob_oid_for_path(path)
|
||||
.unwrap_or_else(|| format!("{:x}", git_blob_sha1_hex_bytes(&content)))
|
||||
};
|
||||
Some(BaselineFileInfo {
|
||||
path: path.clone(),
|
||||
content,
|
||||
mode: mode_val,
|
||||
oid,
|
||||
})
|
||||
} else {
|
||||
Some(BaselineFileInfo {
|
||||
path: path.clone(),
|
||||
content: vec![],
|
||||
mode: FileMode::Regular,
|
||||
oid: ZERO_OID.to_string(),
|
||||
})
|
||||
};
|
||||
|
||||
if let Some(baseline_file_info) = baseline_file_info {
|
||||
self.baseline_file_info
|
||||
.insert(internal.clone(), baseline_file_info);
|
||||
}
|
||||
}
|
||||
|
||||
// Track rename/move in current mapping if provided in an Update.
|
||||
if let FileChange::Update {
|
||||
move_path: Some(dest),
|
||||
..
|
||||
} = change
|
||||
{
|
||||
let uuid_filename = match self.external_to_temp_name.get(path) {
|
||||
Some(i) => i.clone(),
|
||||
None => {
|
||||
// This should be rare, but if we haven't mapped the source, create it with no baseline.
|
||||
let i = Uuid::new_v4().to_string();
|
||||
self.baseline_file_info.insert(
|
||||
i.clone(),
|
||||
BaselineFileInfo {
|
||||
path: path.clone(),
|
||||
content: vec![],
|
||||
mode: FileMode::Regular,
|
||||
oid: ZERO_OID.to_string(),
|
||||
},
|
||||
);
|
||||
i
|
||||
}
|
||||
};
|
||||
// Update current external mapping for temp file name.
|
||||
self.temp_name_to_current_path
|
||||
.insert(uuid_filename.clone(), dest.clone());
|
||||
// Update forward file_mapping: external current -> internal name.
|
||||
self.external_to_temp_name.remove(path);
|
||||
self.external_to_temp_name
|
||||
.insert(dest.clone(), uuid_filename);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn get_path_for_internal(&self, internal: &str) -> Option<PathBuf> {
|
||||
self.temp_name_to_current_path
|
||||
.get(internal)
|
||||
.cloned()
|
||||
.or_else(|| {
|
||||
self.baseline_file_info
|
||||
.get(internal)
|
||||
.map(|info| info.path.clone())
|
||||
})
|
||||
}
|
||||
|
||||
/// Find the git worktree root for a file/directory by walking up to the first ancestor containing a `.git` entry.
|
||||
/// Uses a simple cache of known roots and avoids negative-result caching for simplicity.
|
||||
fn find_git_root_cached(&mut self, start: &Path) -> Option<PathBuf> {
|
||||
let dir = if start.is_dir() {
|
||||
start
|
||||
} else {
|
||||
start.parent()?
|
||||
};
|
||||
|
||||
// Fast path: if any cached root is an ancestor of this path, use it.
|
||||
if let Some(root) = self
|
||||
.git_root_cache
|
||||
.iter()
|
||||
.find(|r| dir.starts_with(r))
|
||||
.cloned()
|
||||
{
|
||||
return Some(root);
|
||||
}
|
||||
|
||||
// Walk up to find a `.git` marker.
|
||||
let mut cur = dir.to_path_buf();
|
||||
loop {
|
||||
let git_marker = cur.join(".git");
|
||||
if git_marker.is_dir() || git_marker.is_file() {
|
||||
if !self.git_root_cache.iter().any(|r| r == &cur) {
|
||||
self.git_root_cache.push(cur.clone());
|
||||
}
|
||||
return Some(cur);
|
||||
}
|
||||
|
||||
// On Windows, avoid walking above the drive or UNC share root.
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if is_windows_drive_or_unc_root(&cur) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = cur.parent() {
|
||||
cur = parent.to_path_buf();
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a display string for `path` relative to its git root if found, else absolute.
|
||||
fn relative_to_git_root_str(&mut self, path: &Path) -> String {
|
||||
let s = if let Some(root) = self.find_git_root_cached(path) {
|
||||
if let Ok(rel) = path.strip_prefix(&root) {
|
||||
rel.display().to_string()
|
||||
} else {
|
||||
path.display().to_string()
|
||||
}
|
||||
} else {
|
||||
path.display().to_string()
|
||||
};
|
||||
s.replace('\\', "/")
|
||||
}
|
||||
|
||||
/// Ask git to compute the blob SHA-1 for the file at `path` within its repository.
|
||||
/// Returns None if no repository is found or git invocation fails.
|
||||
fn git_blob_oid_for_path(&mut self, path: &Path) -> Option<String> {
|
||||
let root = self.find_git_root_cached(path)?;
|
||||
// Compute a path relative to the repo root for better portability across platforms.
|
||||
let rel = path.strip_prefix(&root).unwrap_or(path);
|
||||
let output = Command::new("git")
|
||||
.arg("-C")
|
||||
.arg(&root)
|
||||
.arg("hash-object")
|
||||
.arg("--")
|
||||
.arg(rel)
|
||||
.output()
|
||||
.ok()?;
|
||||
if !output.status.success() {
|
||||
return None;
|
||||
}
|
||||
let s = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
if s.len() == 40 { Some(s) } else { None }
|
||||
}
|
||||
|
||||
/// Recompute the aggregated unified diff by comparing all of the in-memory snapshots that were
|
||||
/// collected before the first time they were touched by apply_patch during this turn with
|
||||
/// the current repo state.
|
||||
pub fn get_unified_diff(&mut self) -> Result<Option<String>> {
|
||||
let mut aggregated = String::new();
|
||||
|
||||
// Compute diffs per tracked internal file in a stable order by external path.
|
||||
let mut baseline_file_names: Vec<String> =
|
||||
self.baseline_file_info.keys().cloned().collect();
|
||||
// Sort lexicographically by full repo-relative path to match git behavior.
|
||||
baseline_file_names.sort_by_key(|internal| {
|
||||
self.get_path_for_internal(internal)
|
||||
.map(|p| self.relative_to_git_root_str(&p))
|
||||
.unwrap_or_default()
|
||||
});
|
||||
|
||||
for internal in baseline_file_names {
|
||||
aggregated.push_str(self.get_file_diff(&internal).as_str());
|
||||
if !aggregated.ends_with('\n') {
|
||||
aggregated.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
if aggregated.trim().is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(aggregated))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_file_diff(&mut self, internal_file_name: &str) -> String {
|
||||
let mut aggregated = String::new();
|
||||
|
||||
// Snapshot lightweight fields only.
|
||||
let (baseline_external_path, baseline_mode, left_oid) = {
|
||||
if let Some(info) = self.baseline_file_info.get(internal_file_name) {
|
||||
(info.path.clone(), info.mode, info.oid.clone())
|
||||
} else {
|
||||
(PathBuf::new(), FileMode::Regular, ZERO_OID.to_string())
|
||||
}
|
||||
};
|
||||
let current_external_path = match self.get_path_for_internal(internal_file_name) {
|
||||
Some(p) => p,
|
||||
None => return aggregated,
|
||||
};
|
||||
|
||||
let current_mode = file_mode_for_path(¤t_external_path).unwrap_or(FileMode::Regular);
|
||||
let right_bytes = blob_bytes(¤t_external_path, ¤t_mode);
|
||||
|
||||
// Compute displays with &mut self before borrowing any baseline content.
|
||||
let left_display = self.relative_to_git_root_str(&baseline_external_path);
|
||||
let right_display = self.relative_to_git_root_str(¤t_external_path);
|
||||
|
||||
// Compute right oid before borrowing baseline content.
|
||||
let right_oid = if let Some(b) = right_bytes.as_ref() {
|
||||
if current_mode == FileMode::Symlink {
|
||||
format!("{:x}", git_blob_sha1_hex_bytes(b))
|
||||
} else {
|
||||
self.git_blob_oid_for_path(¤t_external_path)
|
||||
.unwrap_or_else(|| format!("{:x}", git_blob_sha1_hex_bytes(b)))
|
||||
}
|
||||
} else {
|
||||
ZERO_OID.to_string()
|
||||
};
|
||||
|
||||
// Borrow baseline content only after all &mut self uses are done.
|
||||
let left_present = left_oid.as_str() != ZERO_OID;
|
||||
let left_bytes: Option<&[u8]> = if left_present {
|
||||
self.baseline_file_info
|
||||
.get(internal_file_name)
|
||||
.map(|i| i.content.as_slice())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Fast path: identical bytes or both missing.
|
||||
if left_bytes == right_bytes.as_deref() {
|
||||
return aggregated;
|
||||
}
|
||||
|
||||
aggregated.push_str(&format!("diff --git a/{left_display} b/{right_display}\n"));
|
||||
|
||||
let is_add = !left_present && right_bytes.is_some();
|
||||
let is_delete = left_present && right_bytes.is_none();
|
||||
|
||||
if is_add {
|
||||
aggregated.push_str(&format!("new file mode {current_mode}\n"));
|
||||
} else if is_delete {
|
||||
aggregated.push_str(&format!("deleted file mode {baseline_mode}\n"));
|
||||
} else if baseline_mode != current_mode {
|
||||
aggregated.push_str(&format!("old mode {baseline_mode}\n"));
|
||||
aggregated.push_str(&format!("new mode {current_mode}\n"));
|
||||
}
|
||||
|
||||
let left_text = left_bytes.and_then(|b| std::str::from_utf8(b).ok());
|
||||
let right_text = right_bytes
|
||||
.as_deref()
|
||||
.and_then(|b| std::str::from_utf8(b).ok());
|
||||
|
||||
let can_text_diff = matches!(
|
||||
(left_text, right_text, is_add, is_delete),
|
||||
(Some(_), Some(_), _, _) | (_, Some(_), true, _) | (Some(_), _, _, true)
|
||||
);
|
||||
|
||||
if can_text_diff {
|
||||
let l = left_text.unwrap_or("");
|
||||
let r = right_text.unwrap_or("");
|
||||
|
||||
aggregated.push_str(&format!("index {left_oid}..{right_oid}\n"));
|
||||
|
||||
let old_header = if left_present {
|
||||
format!("a/{left_display}")
|
||||
} else {
|
||||
DEV_NULL.to_string()
|
||||
};
|
||||
let new_header = if right_bytes.is_some() {
|
||||
format!("b/{right_display}")
|
||||
} else {
|
||||
DEV_NULL.to_string()
|
||||
};
|
||||
|
||||
let diff = similar::TextDiff::from_lines(l, r);
|
||||
let unified = diff
|
||||
.unified_diff()
|
||||
.context_radius(3)
|
||||
.header(&old_header, &new_header)
|
||||
.to_string();
|
||||
|
||||
aggregated.push_str(&unified);
|
||||
} else {
|
||||
aggregated.push_str(&format!("index {left_oid}..{right_oid}\n"));
|
||||
let old_header = if left_present {
|
||||
format!("a/{left_display}")
|
||||
} else {
|
||||
DEV_NULL.to_string()
|
||||
};
|
||||
let new_header = if right_bytes.is_some() {
|
||||
format!("b/{right_display}")
|
||||
} else {
|
||||
DEV_NULL.to_string()
|
||||
};
|
||||
aggregated.push_str(&format!("--- {old_header}\n"));
|
||||
aggregated.push_str(&format!("+++ {new_header}\n"));
|
||||
aggregated.push_str("Binary files differ\n");
|
||||
}
|
||||
aggregated
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute the Git SHA-1 blob object ID for the given content (bytes).
|
||||
fn git_blob_sha1_hex_bytes(data: &[u8]) -> Output<sha1::Sha1> {
|
||||
// Git blob hash is sha1 of: "blob <len>\0<data>"
|
||||
let header = format!("blob {}\0", data.len());
|
||||
use sha1::Digest;
|
||||
let mut hasher = sha1::Sha1::new();
|
||||
hasher.update(header.as_bytes());
|
||||
hasher.update(data);
|
||||
hasher.finalize()
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum FileMode {
|
||||
Regular,
|
||||
#[cfg(unix)]
|
||||
Executable,
|
||||
Symlink,
|
||||
}
|
||||
|
||||
impl FileMode {
|
||||
fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
FileMode::Regular => "100644",
|
||||
#[cfg(unix)]
|
||||
FileMode::Executable => "100755",
|
||||
FileMode::Symlink => "120000",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FileMode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn file_mode_for_path(path: &Path) -> Option<FileMode> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let meta = fs::symlink_metadata(path).ok()?;
|
||||
let ft = meta.file_type();
|
||||
if ft.is_symlink() {
|
||||
return Some(FileMode::Symlink);
|
||||
}
|
||||
let mode = meta.permissions().mode();
|
||||
let is_exec = (mode & 0o111) != 0;
|
||||
Some(if is_exec {
|
||||
FileMode::Executable
|
||||
} else {
|
||||
FileMode::Regular
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn file_mode_for_path(_path: &Path) -> Option<FileMode> {
|
||||
// Default to non-executable on non-unix.
|
||||
Some(FileMode::Regular)
|
||||
}
|
||||
|
||||
fn blob_bytes(path: &Path, mode: &FileMode) -> Option<Vec<u8>> {
|
||||
if path.exists() {
|
||||
let contents = if *mode == FileMode::Symlink {
|
||||
symlink_blob_bytes(path)
|
||||
.ok_or_else(|| anyhow!("failed to read symlink target for {}", path.display()))
|
||||
} else {
|
||||
fs::read(path)
|
||||
.with_context(|| format!("failed to read current file for diff {}", path.display()))
|
||||
};
|
||||
contents.ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn symlink_blob_bytes(path: &Path) -> Option<Vec<u8>> {
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
let target = std::fs::read_link(path).ok()?;
|
||||
Some(target.as_os_str().as_bytes().to_vec())
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn symlink_blob_bytes(_path: &Path) -> Option<Vec<u8>> {
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn is_windows_drive_or_unc_root(p: &std::path::Path) -> bool {
|
||||
use std::path::Component;
|
||||
let mut comps = p.components();
|
||||
matches!(
|
||||
(comps.next(), comps.next(), comps.next()),
|
||||
(Some(Component::Prefix(_)), Some(Component::RootDir), None)
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::tempdir;
|
||||
|
||||
/// Compute the Git SHA-1 blob object ID for the given content (string).
|
||||
/// This delegates to the bytes version to avoid UTF-8 lossy conversions here.
|
||||
fn git_blob_sha1_hex(data: &str) -> String {
|
||||
format!("{:x}", git_blob_sha1_hex_bytes(data.as_bytes()))
|
||||
}
|
||||
|
||||
fn normalize_diff_for_test(input: &str, root: &Path) -> String {
|
||||
let root_str = root.display().to_string().replace('\\', "/");
|
||||
let replaced = input.replace(&root_str, "<TMP>");
|
||||
// Split into blocks on lines starting with "diff --git ", sort blocks for determinism, and rejoin
|
||||
let mut blocks: Vec<String> = Vec::new();
|
||||
let mut current = String::new();
|
||||
for line in replaced.lines() {
|
||||
if line.starts_with("diff --git ") && !current.is_empty() {
|
||||
blocks.push(current);
|
||||
current = String::new();
|
||||
}
|
||||
if !current.is_empty() {
|
||||
current.push('\n');
|
||||
}
|
||||
current.push_str(line);
|
||||
}
|
||||
if !current.is_empty() {
|
||||
blocks.push(current);
|
||||
}
|
||||
blocks.sort();
|
||||
let mut out = blocks.join("\n");
|
||||
if !out.ends_with('\n') {
|
||||
out.push('\n');
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accumulates_add_and_update() {
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join("a.txt");
|
||||
|
||||
// First patch: add file (baseline should be /dev/null).
|
||||
let add_changes = HashMap::from([(
|
||||
file.clone(),
|
||||
FileChange::Add {
|
||||
content: "foo\n".to_string(),
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&add_changes);
|
||||
|
||||
// Simulate apply: create the file on disk.
|
||||
fs::write(&file, "foo\n").unwrap();
|
||||
let first = acc.get_unified_diff().unwrap().unwrap();
|
||||
let first = normalize_diff_for_test(&first, dir.path());
|
||||
let expected_first = {
|
||||
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
|
||||
let right_oid = git_blob_sha1_hex("foo\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
|
||||
new file mode {mode}
|
||||
index {ZERO_OID}..{right_oid}
|
||||
--- {DEV_NULL}
|
||||
+++ b/<TMP>/a.txt
|
||||
@@ -0,0 +1 @@
|
||||
+foo
|
||||
"#,
|
||||
)
|
||||
};
|
||||
assert_eq!(first, expected_first);
|
||||
|
||||
// Second patch: update the file on disk.
|
||||
let update_changes = HashMap::from([(
|
||||
file.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff: "".to_owned(),
|
||||
move_path: None,
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&update_changes);
|
||||
|
||||
// Simulate apply: append a new line.
|
||||
fs::write(&file, "foo\nbar\n").unwrap();
|
||||
let combined = acc.get_unified_diff().unwrap().unwrap();
|
||||
let combined = normalize_diff_for_test(&combined, dir.path());
|
||||
let expected_combined = {
|
||||
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
|
||||
let right_oid = git_blob_sha1_hex("foo\nbar\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
|
||||
new file mode {mode}
|
||||
index {ZERO_OID}..{right_oid}
|
||||
--- {DEV_NULL}
|
||||
+++ b/<TMP>/a.txt
|
||||
@@ -0,0 +1,2 @@
|
||||
+foo
|
||||
+bar
|
||||
"#,
|
||||
)
|
||||
};
|
||||
assert_eq!(combined, expected_combined);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accumulates_delete() {
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join("b.txt");
|
||||
fs::write(&file, "x\n").unwrap();
|
||||
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
let del_changes = HashMap::from([(file.clone(), FileChange::Delete)]);
|
||||
acc.on_patch_begin(&del_changes);
|
||||
|
||||
// Simulate apply: delete the file from disk.
|
||||
let baseline_mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
|
||||
fs::remove_file(&file).unwrap();
|
||||
let diff = acc.get_unified_diff().unwrap().unwrap();
|
||||
let diff = normalize_diff_for_test(&diff, dir.path());
|
||||
let expected = {
|
||||
let left_oid = git_blob_sha1_hex("x\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/b.txt b/<TMP>/b.txt
|
||||
deleted file mode {baseline_mode}
|
||||
index {left_oid}..{ZERO_OID}
|
||||
--- a/<TMP>/b.txt
|
||||
+++ {DEV_NULL}
|
||||
@@ -1 +0,0 @@
|
||||
-x
|
||||
"#,
|
||||
)
|
||||
};
|
||||
assert_eq!(diff, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accumulates_move_and_update() {
|
||||
let dir = tempdir().unwrap();
|
||||
let src = dir.path().join("src.txt");
|
||||
let dest = dir.path().join("dst.txt");
|
||||
fs::write(&src, "line\n").unwrap();
|
||||
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
let mv_changes = HashMap::from([(
|
||||
src.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff: "".to_owned(),
|
||||
move_path: Some(dest.clone()),
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&mv_changes);
|
||||
|
||||
// Simulate apply: move and update content.
|
||||
fs::rename(&src, &dest).unwrap();
|
||||
fs::write(&dest, "line2\n").unwrap();
|
||||
|
||||
let out = acc.get_unified_diff().unwrap().unwrap();
|
||||
let out = normalize_diff_for_test(&out, dir.path());
|
||||
let expected = {
|
||||
let left_oid = git_blob_sha1_hex("line\n");
|
||||
let right_oid = git_blob_sha1_hex("line2\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/src.txt b/<TMP>/dst.txt
|
||||
index {left_oid}..{right_oid}
|
||||
--- a/<TMP>/src.txt
|
||||
+++ b/<TMP>/dst.txt
|
||||
@@ -1 +1 @@
|
||||
-line
|
||||
+line2
|
||||
"#
|
||||
)
|
||||
};
|
||||
assert_eq!(out, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn move_without_1change_yields_no_diff() {
|
||||
let dir = tempdir().unwrap();
|
||||
let src = dir.path().join("moved.txt");
|
||||
let dest = dir.path().join("renamed.txt");
|
||||
fs::write(&src, "same\n").unwrap();
|
||||
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
let mv_changes = HashMap::from([(
|
||||
src.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff: "".to_owned(),
|
||||
move_path: Some(dest.clone()),
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&mv_changes);
|
||||
|
||||
// Simulate apply: move only, no content change.
|
||||
fs::rename(&src, &dest).unwrap();
|
||||
|
||||
let diff = acc.get_unified_diff().unwrap();
|
||||
assert_eq!(diff, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn move_declared_but_file_only_appears_at_dest_is_add() {
|
||||
let dir = tempdir().unwrap();
|
||||
let src = dir.path().join("src.txt");
|
||||
let dest = dir.path().join("dest.txt");
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
let mv = HashMap::from([(
|
||||
src.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff: "".into(),
|
||||
move_path: Some(dest.clone()),
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&mv);
|
||||
// No file existed initially; create only dest
|
||||
fs::write(&dest, "hello\n").unwrap();
|
||||
let diff = acc.get_unified_diff().unwrap().unwrap();
|
||||
let diff = normalize_diff_for_test(&diff, dir.path());
|
||||
let expected = {
|
||||
let mode = file_mode_for_path(&dest).unwrap_or(FileMode::Regular);
|
||||
let right_oid = git_blob_sha1_hex("hello\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/src.txt b/<TMP>/dest.txt
|
||||
new file mode {mode}
|
||||
index {ZERO_OID}..{right_oid}
|
||||
--- {DEV_NULL}
|
||||
+++ b/<TMP>/dest.txt
|
||||
@@ -0,0 +1 @@
|
||||
+hello
|
||||
"#,
|
||||
)
|
||||
};
|
||||
assert_eq!(diff, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_persists_across_new_baseline_for_new_file() {
|
||||
let dir = tempdir().unwrap();
|
||||
let a = dir.path().join("a.txt");
|
||||
let b = dir.path().join("b.txt");
|
||||
fs::write(&a, "foo\n").unwrap();
|
||||
fs::write(&b, "z\n").unwrap();
|
||||
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
|
||||
// First: update existing a.txt (baseline snapshot is created for a).
|
||||
let update_a = HashMap::from([(
|
||||
a.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff: "".to_owned(),
|
||||
move_path: None,
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&update_a);
|
||||
// Simulate apply: modify a.txt on disk.
|
||||
fs::write(&a, "foo\nbar\n").unwrap();
|
||||
let first = acc.get_unified_diff().unwrap().unwrap();
|
||||
let first = normalize_diff_for_test(&first, dir.path());
|
||||
let expected_first = {
|
||||
let left_oid = git_blob_sha1_hex("foo\n");
|
||||
let right_oid = git_blob_sha1_hex("foo\nbar\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
|
||||
index {left_oid}..{right_oid}
|
||||
--- a/<TMP>/a.txt
|
||||
+++ b/<TMP>/a.txt
|
||||
@@ -1 +1,2 @@
|
||||
foo
|
||||
+bar
|
||||
"#
|
||||
)
|
||||
};
|
||||
assert_eq!(first, expected_first);
|
||||
|
||||
// Next: introduce a brand-new path b.txt into baseline snapshots via a delete change.
|
||||
let del_b = HashMap::from([(b.clone(), FileChange::Delete)]);
|
||||
acc.on_patch_begin(&del_b);
|
||||
// Simulate apply: delete b.txt.
|
||||
let baseline_mode = file_mode_for_path(&b).unwrap_or(FileMode::Regular);
|
||||
fs::remove_file(&b).unwrap();
|
||||
|
||||
let combined = acc.get_unified_diff().unwrap().unwrap();
|
||||
let combined = normalize_diff_for_test(&combined, dir.path());
|
||||
let expected = {
|
||||
let left_oid_a = git_blob_sha1_hex("foo\n");
|
||||
let right_oid_a = git_blob_sha1_hex("foo\nbar\n");
|
||||
let left_oid_b = git_blob_sha1_hex("z\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/a.txt b/<TMP>/a.txt
|
||||
index {left_oid_a}..{right_oid_a}
|
||||
--- a/<TMP>/a.txt
|
||||
+++ b/<TMP>/a.txt
|
||||
@@ -1 +1,2 @@
|
||||
foo
|
||||
+bar
|
||||
diff --git a/<TMP>/b.txt b/<TMP>/b.txt
|
||||
deleted file mode {baseline_mode}
|
||||
index {left_oid_b}..{ZERO_OID}
|
||||
--- a/<TMP>/b.txt
|
||||
+++ {DEV_NULL}
|
||||
@@ -1 +0,0 @@
|
||||
-z
|
||||
"#,
|
||||
)
|
||||
};
|
||||
assert_eq!(combined, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binary_files_differ_update() {
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join("bin.dat");
|
||||
|
||||
// Initial non-UTF8 bytes
|
||||
let left_bytes: Vec<u8> = vec![0xff, 0xfe, 0xfd, 0x00];
|
||||
// Updated non-UTF8 bytes
|
||||
let right_bytes: Vec<u8> = vec![0x01, 0x02, 0x03, 0x00];
|
||||
|
||||
fs::write(&file, &left_bytes).unwrap();
|
||||
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
let update_changes = HashMap::from([(
|
||||
file.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff: "".to_owned(),
|
||||
move_path: None,
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&update_changes);
|
||||
|
||||
// Apply update on disk
|
||||
fs::write(&file, &right_bytes).unwrap();
|
||||
|
||||
let diff = acc.get_unified_diff().unwrap().unwrap();
|
||||
let diff = normalize_diff_for_test(&diff, dir.path());
|
||||
let expected = {
|
||||
let left_oid = format!("{:x}", git_blob_sha1_hex_bytes(&left_bytes));
|
||||
let right_oid = format!("{:x}", git_blob_sha1_hex_bytes(&right_bytes));
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/bin.dat b/<TMP>/bin.dat
|
||||
index {left_oid}..{right_oid}
|
||||
--- a/<TMP>/bin.dat
|
||||
+++ b/<TMP>/bin.dat
|
||||
Binary files differ
|
||||
"#
|
||||
)
|
||||
};
|
||||
assert_eq!(diff, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filenames_with_spaces_add_and_update() {
|
||||
let mut acc = TurnDiffTracker::new();
|
||||
|
||||
let dir = tempdir().unwrap();
|
||||
let file = dir.path().join("name with spaces.txt");
|
||||
|
||||
// First patch: add file (baseline should be /dev/null).
|
||||
let add_changes = HashMap::from([(
|
||||
file.clone(),
|
||||
FileChange::Add {
|
||||
content: "foo\n".to_string(),
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&add_changes);
|
||||
|
||||
// Simulate apply: create the file on disk.
|
||||
fs::write(&file, "foo\n").unwrap();
|
||||
let first = acc.get_unified_diff().unwrap().unwrap();
|
||||
let first = normalize_diff_for_test(&first, dir.path());
|
||||
let expected_first = {
|
||||
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
|
||||
let right_oid = git_blob_sha1_hex("foo\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/name with spaces.txt b/<TMP>/name with spaces.txt
|
||||
new file mode {mode}
|
||||
index {ZERO_OID}..{right_oid}
|
||||
--- {DEV_NULL}
|
||||
+++ b/<TMP>/name with spaces.txt
|
||||
@@ -0,0 +1 @@
|
||||
+foo
|
||||
"#,
|
||||
)
|
||||
};
|
||||
assert_eq!(first, expected_first);
|
||||
|
||||
// Second patch: update the file on disk.
|
||||
let update_changes = HashMap::from([(
|
||||
file.clone(),
|
||||
FileChange::Update {
|
||||
unified_diff: "".to_owned(),
|
||||
move_path: None,
|
||||
},
|
||||
)]);
|
||||
acc.on_patch_begin(&update_changes);
|
||||
|
||||
// Simulate apply: append a new line with a space.
|
||||
fs::write(&file, "foo\nbar baz\n").unwrap();
|
||||
let combined = acc.get_unified_diff().unwrap().unwrap();
|
||||
let combined = normalize_diff_for_test(&combined, dir.path());
|
||||
let expected_combined = {
|
||||
let mode = file_mode_for_path(&file).unwrap_or(FileMode::Regular);
|
||||
let right_oid = git_blob_sha1_hex("foo\nbar baz\n");
|
||||
format!(
|
||||
r#"diff --git a/<TMP>/name with spaces.txt b/<TMP>/name with spaces.txt
|
||||
new file mode {mode}
|
||||
index {ZERO_OID}..{right_oid}
|
||||
--- {DEV_NULL}
|
||||
+++ b/<TMP>/name with spaces.txt
|
||||
@@ -0,0 +1,2 @@
|
||||
+foo
|
||||
+bar baz
|
||||
"#,
|
||||
)
|
||||
};
|
||||
assert_eq!(combined, expected_combined);
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ use chrono::Utc;
|
||||
use codex_core::Codex;
|
||||
use codex_core::CodexSpawnOk;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
@@ -21,8 +22,10 @@ use tempfile::TempDir;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::header_regex;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
use wiremock::matchers::query_param;
|
||||
|
||||
/// Build minimal SSE stream with completed marker using the JSON fixture.
|
||||
fn sse_completed(id: &str) -> String {
|
||||
@@ -373,9 +376,90 @@ async fn includes_user_instructions_message_in_request() {
|
||||
request_body["input"][0]["content"][0]["text"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.starts_with("be nice")
|
||||
.starts_with("<user_instructions>\n\nbe nice")
|
||||
);
|
||||
assert!(
|
||||
request_body["input"][0]["content"][0]["text"]
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.ends_with("</user_instructions>")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
let existing_env_var_with_random_value = if cfg!(windows) { "USERNAME" } else { "USER" };
|
||||
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
|
||||
// First request – must NOT include `previous_response_id`.
|
||||
let first = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
||||
|
||||
// Expect POST to /openai/responses with api-version query param
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/openai/responses"))
|
||||
.and(query_param("api-version", "2025-04-01-preview"))
|
||||
.and(header_regex("Custom-Header", "Value"))
|
||||
.and(header_regex(
|
||||
"Authorization",
|
||||
format!(
|
||||
"Bearer {}",
|
||||
std::env::var(existing_env_var_with_random_value).unwrap()
|
||||
)
|
||||
.as_str(),
|
||||
))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let provider = ModelProviderInfo {
|
||||
name: "custom".to_string(),
|
||||
base_url: Some(format!("{}/openai", server.uri())),
|
||||
// Reuse the existing environment variable to avoid using unsafe code
|
||||
env_key: Some(existing_env_var_with_random_value.to_string()),
|
||||
query_params: Some(std::collections::HashMap::from([(
|
||||
"api-version".to_string(),
|
||||
"2025-04-01-preview".to_string(),
|
||||
)])),
|
||||
env_key_instructions: None,
|
||||
wire_api: WireApi::Responses,
|
||||
http_headers: Some(std::collections::HashMap::from([(
|
||||
"Custom-Header".to_string(),
|
||||
"Value".to_string(),
|
||||
)])),
|
||||
env_http_headers: None,
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_auth: false,
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider = provider;
|
||||
|
||||
let ctrl_c = std::sync::Arc::new(tokio::sync::Notify::new());
|
||||
let CodexSpawnOk { codex, .. } = Codex::spawn(config, None, ctrl_c.clone()).await.unwrap();
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
}
|
||||
|
||||
fn auth_from_token(id_token: String) -> CodexAuth {
|
||||
CodexAuth::new(
|
||||
None,
|
||||
|
||||
69
codex-rs/core/tests/exec.rs
Normal file
69
codex-rs/core/tests/exec.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
#![cfg(target_os = "macos")]
|
||||
#![expect(clippy::expect_used)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_core::exec::ExecParams;
|
||||
use codex_core::exec::SandboxType;
|
||||
use codex_core::exec::process_exec_tool_call;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use tempfile::TempDir;
|
||||
use tokio::sync::Notify;
|
||||
|
||||
use codex_core::get_platform_sandbox;
|
||||
|
||||
async fn run_test_cmd(tmp: TempDir, cmd: Vec<&str>, should_be_ok: bool) {
|
||||
if std::env::var(CODEX_SANDBOX_ENV_VAR) == Ok("seatbelt".to_string()) {
|
||||
eprintln!("{CODEX_SANDBOX_ENV_VAR} is set to 'seatbelt', skipping test.");
|
||||
return;
|
||||
}
|
||||
|
||||
let sandbox_type = get_platform_sandbox().expect("should be able to get sandbox type");
|
||||
assert_eq!(sandbox_type, SandboxType::MacosSeatbelt);
|
||||
|
||||
let params = ExecParams {
|
||||
command: cmd.iter().map(|s| s.to_string()).collect(),
|
||||
cwd: tmp.path().to_path_buf(),
|
||||
timeout_ms: Some(1000),
|
||||
env: HashMap::new(),
|
||||
};
|
||||
|
||||
let ctrl_c = Arc::new(Notify::new());
|
||||
let policy = SandboxPolicy::new_read_only_policy();
|
||||
|
||||
let result = process_exec_tool_call(params, sandbox_type, ctrl_c, &policy, &None, None).await;
|
||||
|
||||
assert!(result.is_ok() == should_be_ok);
|
||||
}
|
||||
|
||||
/// Command succeeds with exit code 0 normally
|
||||
#[tokio::test]
|
||||
async fn exit_code_0_succeeds() {
|
||||
let tmp = TempDir::new().expect("should be able to create temp dir");
|
||||
let cmd = vec!["echo", "hello"];
|
||||
|
||||
run_test_cmd(tmp, cmd, true).await
|
||||
}
|
||||
|
||||
/// Command not found returns exit code 127, this is not considered a sandbox error
|
||||
#[tokio::test]
|
||||
async fn exit_command_not_found_is_ok() {
|
||||
let tmp = TempDir::new().expect("should be able to create temp dir");
|
||||
let cmd = vec!["/bin/bash", "-c", "nonexistent_command_12345"];
|
||||
run_test_cmd(tmp, cmd, true).await
|
||||
}
|
||||
|
||||
/// Writing a file fails and should be considered a sandbox error
|
||||
#[tokio::test]
|
||||
async fn write_file_fails_as_sandbox_error() {
|
||||
let tmp = TempDir::new().expect("should be able to create temp dir");
|
||||
let path = tmp.path().join("test.txt");
|
||||
let cmd = vec![
|
||||
"/user/bin/touch",
|
||||
path.to_str().expect("should be able to get path"),
|
||||
];
|
||||
|
||||
run_test_cmd(tmp, cmd, false).await;
|
||||
}
|
||||
@@ -177,8 +177,7 @@ async fn live_shell_function_call() {
|
||||
match ev.msg {
|
||||
EventMsg::ExecCommandBegin(codex_core::protocol::ExecCommandBeginEvent {
|
||||
command,
|
||||
call_id: _,
|
||||
cwd: _,
|
||||
..
|
||||
}) => {
|
||||
assert_eq!(command, vec!["echo", MARKER]);
|
||||
saw_begin = true;
|
||||
@@ -186,8 +185,7 @@ async fn live_shell_function_call() {
|
||||
EventMsg::ExecCommandEnd(codex_core::protocol::ExecCommandEndEvent {
|
||||
stdout,
|
||||
exit_code,
|
||||
call_id: _,
|
||||
stderr: _,
|
||||
..
|
||||
}) => {
|
||||
assert_eq!(exit_code, 0, "echo returned non‑zero exit code");
|
||||
assert!(stdout.contains(MARKER));
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::path::Path;
|
||||
use codex_common::summarize_sandbox_policy;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::model_supports_reasoning_summaries;
|
||||
use codex_core::protocol::Event;
|
||||
|
||||
pub(crate) enum CodexStatus {
|
||||
@@ -29,7 +28,7 @@ pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static st
|
||||
("sandbox", summarize_sandbox_policy(&config.sandbox_policy)),
|
||||
];
|
||||
if config.model_provider.wire_api == WireApi::Responses
|
||||
&& model_supports_reasoning_summaries(config)
|
||||
&& config.model_family.supports_reasoning_summaries
|
||||
{
|
||||
entries.push((
|
||||
"reasoning effort",
|
||||
@@ -44,20 +43,14 @@ pub(crate) fn create_config_summary_entries(config: &Config) -> Vec<(&'static st
|
||||
entries
|
||||
}
|
||||
|
||||
pub(crate) fn handle_last_message(
|
||||
last_agent_message: Option<&str>,
|
||||
last_message_path: Option<&Path>,
|
||||
) {
|
||||
match (last_message_path, last_agent_message) {
|
||||
(Some(path), Some(msg)) => write_last_message_file(msg, Some(path)),
|
||||
(Some(path), None) => {
|
||||
write_last_message_file("", Some(path));
|
||||
eprintln!(
|
||||
"Warning: no last agent message; wrote empty content to {}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
(None, _) => eprintln!("Warning: no file to write last message to."),
|
||||
pub(crate) fn handle_last_message(last_agent_message: Option<&str>, output_file: &Path) {
|
||||
let message = last_agent_message.unwrap_or_default();
|
||||
write_last_message_file(message, Some(output_file));
|
||||
if last_agent_message.is_none() {
|
||||
eprintln!(
|
||||
"Warning: no last agent message; wrote empty content to {}",
|
||||
output_file.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,8 @@ use codex_core::plan_tool::UpdatePlanArgs;
|
||||
use codex_core::protocol::AgentMessageDeltaEvent;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::AgentReasoningDeltaEvent;
|
||||
use codex_core::protocol::AgentReasoningRawContentDeltaEvent;
|
||||
use codex_core::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_core::protocol::BackgroundEventEvent;
|
||||
use codex_core::protocol::ErrorEvent;
|
||||
use codex_core::protocol::Event;
|
||||
@@ -20,6 +22,7 @@ use codex_core::protocol::PatchApplyEndEvent;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_core::protocol::TurnDiffEvent;
|
||||
use owo_colors::OwoColorize;
|
||||
use owo_colors::Style;
|
||||
use shlex::try_join;
|
||||
@@ -54,8 +57,10 @@ pub(crate) struct EventProcessorWithHumanOutput {
|
||||
|
||||
/// Whether to include `AgentReasoning` events in the output.
|
||||
show_agent_reasoning: bool,
|
||||
show_raw_agent_reasoning: bool,
|
||||
answer_started: bool,
|
||||
reasoning_started: bool,
|
||||
raw_reasoning_started: bool,
|
||||
last_message_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
@@ -80,8 +85,10 @@ impl EventProcessorWithHumanOutput {
|
||||
green: Style::new().green(),
|
||||
cyan: Style::new().cyan(),
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
answer_started: false,
|
||||
reasoning_started: false,
|
||||
raw_reasoning_started: false,
|
||||
last_message_path,
|
||||
}
|
||||
} else {
|
||||
@@ -96,8 +103,10 @@ impl EventProcessorWithHumanOutput {
|
||||
green: Style::new(),
|
||||
cyan: Style::new(),
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
answer_started: false,
|
||||
reasoning_started: false,
|
||||
raw_reasoning_started: false,
|
||||
last_message_path,
|
||||
}
|
||||
}
|
||||
@@ -106,7 +115,6 @@ impl EventProcessorWithHumanOutput {
|
||||
|
||||
struct ExecCommandBegin {
|
||||
command: Vec<String>,
|
||||
start_time: Instant,
|
||||
}
|
||||
|
||||
struct PatchApplyBegin {
|
||||
@@ -170,10 +178,9 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
// Ignore.
|
||||
}
|
||||
EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
|
||||
handle_last_message(
|
||||
last_agent_message.as_deref(),
|
||||
self.last_message_path.as_deref(),
|
||||
);
|
||||
if let Some(output_file) = self.last_message_path.as_deref() {
|
||||
handle_last_message(last_agent_message.as_deref(), output_file);
|
||||
}
|
||||
return CodexStatus::InitiateShutdown;
|
||||
}
|
||||
EventMsg::TokenCount(TokenUsage { total_tokens, .. }) => {
|
||||
@@ -204,6 +211,32 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
#[allow(clippy::expect_used)]
|
||||
std::io::stdout().flush().expect("could not flush stdout");
|
||||
}
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent { text }) => {
|
||||
if !self.show_raw_agent_reasoning {
|
||||
return CodexStatus::Running;
|
||||
}
|
||||
if !self.raw_reasoning_started {
|
||||
print!("{text}");
|
||||
#[allow(clippy::expect_used)]
|
||||
std::io::stdout().flush().expect("could not flush stdout");
|
||||
} else {
|
||||
println!();
|
||||
self.raw_reasoning_started = false;
|
||||
}
|
||||
}
|
||||
EventMsg::AgentReasoningRawContentDelta(AgentReasoningRawContentDeltaEvent {
|
||||
delta,
|
||||
}) => {
|
||||
if !self.show_raw_agent_reasoning {
|
||||
return CodexStatus::Running;
|
||||
}
|
||||
if !self.raw_reasoning_started {
|
||||
self.raw_reasoning_started = true;
|
||||
}
|
||||
print!("{delta}");
|
||||
#[allow(clippy::expect_used)]
|
||||
std::io::stdout().flush().expect("could not flush stdout");
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { message }) => {
|
||||
// if answer_started is false, this means we haven't received any
|
||||
// delta. Thus, we need to print the message as a new answer.
|
||||
@@ -228,7 +261,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
call_id.clone(),
|
||||
ExecCommandBegin {
|
||||
command: command.clone(),
|
||||
start_time: Instant::now(),
|
||||
},
|
||||
);
|
||||
ts_println!(
|
||||
@@ -244,16 +276,14 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
call_id,
|
||||
stdout,
|
||||
stderr,
|
||||
duration,
|
||||
exit_code,
|
||||
}) => {
|
||||
let exec_command = self.call_id_to_command.remove(&call_id);
|
||||
let (duration, call) = if let Some(ExecCommandBegin {
|
||||
command,
|
||||
start_time,
|
||||
}) = exec_command
|
||||
let (duration, call) = if let Some(ExecCommandBegin { command, .. }) = exec_command
|
||||
{
|
||||
(
|
||||
format!(" in {}", format_elapsed(start_time)),
|
||||
format!(" in {}", format_duration(duration)),
|
||||
format!("{}", escape_command(&command).style(self.bold)),
|
||||
)
|
||||
} else {
|
||||
@@ -403,6 +433,7 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
stdout,
|
||||
stderr,
|
||||
success,
|
||||
..
|
||||
}) => {
|
||||
let patch_begin = self.call_id_to_patch.remove(&call_id);
|
||||
|
||||
@@ -432,6 +463,10 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
println!("{}", line.style(self.dimmed));
|
||||
}
|
||||
}
|
||||
EventMsg::TurnDiff(TurnDiffEvent { unified_diff }) => {
|
||||
ts_println!(self, "{}", "turn diff:".style(self.magenta));
|
||||
println!("{unified_diff}");
|
||||
}
|
||||
EventMsg::ExecApprovalRequest(_) => {
|
||||
// Should we exit?
|
||||
}
|
||||
|
||||
@@ -46,10 +46,9 @@ impl EventProcessor for EventProcessorWithJsonOutput {
|
||||
CodexStatus::Running
|
||||
}
|
||||
EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }) => {
|
||||
handle_last_message(
|
||||
last_agent_message.as_deref(),
|
||||
self.last_message_path.as_deref(),
|
||||
);
|
||||
if let Some(output_file) = self.last_message_path.as_deref() {
|
||||
handle_last_message(last_agent_message.as_deref(), output_file);
|
||||
}
|
||||
CodexStatus::InitiateShutdown
|
||||
}
|
||||
EventMsg::ShutdownComplete => CodexStatus::Shutdown,
|
||||
|
||||
@@ -26,7 +26,7 @@ multimap = "0.10.0"
|
||||
path-absolutize = "3.1.1"
|
||||
regex-lite = "0.1"
|
||||
serde = { version = "1.0.194", features = ["derive"] }
|
||||
serde_json = "1.0.110"
|
||||
serde_json = "1.0.142"
|
||||
serde_with = { version = "3", features = ["macros"] }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -17,5 +17,5 @@ clap = { version = "4", features = ["derive"] }
|
||||
ignore = "0.4.23"
|
||||
nucleo-matcher = "0.3.1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1.0.110"
|
||||
serde_json = "1.0.142"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
|
||||
@@ -22,7 +22,7 @@ const SOURCE_FOR_PYTHON_SERVER: &str = include_str!("./login_with_chatgpt.py");
|
||||
const CLIENT_ID: &str = "app_EMoamEEZ73f0CkXaXp7hrann";
|
||||
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Copy)]
|
||||
pub enum AuthMode {
|
||||
ApiKey,
|
||||
ChatGPT,
|
||||
|
||||
@@ -458,6 +458,7 @@ class _ApiKeyHTTPServer(http.server.HTTPServer):
|
||||
"code_challenge": self.pkce.code_challenge,
|
||||
"code_challenge_method": "S256",
|
||||
"id_token_add_organizations": "true",
|
||||
"codex_cli_simplified_flow": "true",
|
||||
"state": self.state,
|
||||
}
|
||||
return f"{self.issuer}/oauth/authorize?" + urllib.parse.urlencode(params)
|
||||
|
||||
@@ -31,6 +31,7 @@ tokio = { version = "1", features = [
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = { version = "0.7" }
|
||||
toml = "0.9"
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
||||
|
||||
@@ -15,7 +15,6 @@ use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecApprovalRequestEvent;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::Submission;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use mcp_types::CallToolResult;
|
||||
use mcp_types::ContentBlock;
|
||||
@@ -79,27 +78,18 @@ pub async fn run_codex_tool_session(
|
||||
)
|
||||
.await;
|
||||
|
||||
// Use the original MCP request ID as the `sub_id` for the Codex submission so that
|
||||
// any events emitted for this tool-call can be correlated with the
|
||||
// originating `tools/call` request.
|
||||
let sub_id = match &id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(n) => n.to_string(),
|
||||
};
|
||||
running_requests_id_to_codex_uuid
|
||||
.lock()
|
||||
.await
|
||||
.insert(id.clone(), session_id);
|
||||
let submission = Submission {
|
||||
id: sub_id.clone(),
|
||||
op: Op::UserInput {
|
||||
if let Err(e) = codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: initial_prompt.clone(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
|
||||
if let Err(e) = codex.submit_with_id(submission).await {
|
||||
})
|
||||
.await
|
||||
{
|
||||
tracing::error!("Failed to submit initial prompt: {e}");
|
||||
// unregister the id so we don't keep it in the map
|
||||
running_requests_id_to_codex_uuid.lock().await.remove(&id);
|
||||
@@ -151,10 +141,7 @@ async fn run_codex_tool_session_inner(
|
||||
request_id: RequestId,
|
||||
running_requests_id_to_codex_uuid: Arc<Mutex<HashMap<RequestId, Uuid>>>,
|
||||
) {
|
||||
let request_id_str = match &request_id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(n) => n.to_string(),
|
||||
};
|
||||
let request_id_str = crate::request_id::request_id_to_string(&request_id);
|
||||
|
||||
// Stream events until the task needs to pause for user interaction or
|
||||
// completes.
|
||||
@@ -252,7 +239,9 @@ async fn run_codex_tool_session_inner(
|
||||
EventMsg::AgentMessage(AgentMessageEvent { .. }) => {
|
||||
// TODO: think how we want to support this in the MCP
|
||||
}
|
||||
EventMsg::TaskStarted
|
||||
EventMsg::AgentReasoningRawContent(_)
|
||||
| EventMsg::AgentReasoningRawContentDelta(_)
|
||||
| EventMsg::TaskStarted
|
||||
| EventMsg::TokenCount(_)
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::McpToolCallBegin(_)
|
||||
@@ -263,6 +252,7 @@ async fn run_codex_tool_session_inner(
|
||||
| EventMsg::BackgroundEvent(_)
|
||||
| EventMsg::PatchApplyBegin(_)
|
||||
| EventMsg::PatchApplyEnd(_)
|
||||
| EventMsg::TurnDiff(_)
|
||||
| EventMsg::GetHistoryEntryResponse(_)
|
||||
| EventMsg::PlanUpdate(_)
|
||||
| EventMsg::ShutdownComplete => {
|
||||
|
||||
@@ -1,121 +1,369 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::exec_approval::handle_exec_approval_request;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::OutgoingNotificationMeta;
|
||||
use crate::patch_approval::handle_patch_approval_request;
|
||||
use codex_core::Codex;
|
||||
use codex_core::error::Result as CodexResult;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecApprovalRequestEvent;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use mcp_types::RequestId;
|
||||
use tokio::sync::Mutex;
|
||||
// no streaming watch channel; streaming is toggled via set_streaming on the struct
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::error;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub async fn run_conversation_loop(
|
||||
use crate::exec_approval::handle_exec_approval_request;
|
||||
use crate::mcp_protocol::CodexEventNotificationParams;
|
||||
use crate::mcp_protocol::ConversationId;
|
||||
use crate::mcp_protocol::InitialStateNotificationParams;
|
||||
use crate::mcp_protocol::InitialStatePayload;
|
||||
use crate::mcp_protocol::NotificationMeta;
|
||||
use crate::mcp_protocol::ServerNotification;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::patch_approval::handle_patch_approval_request;
|
||||
use crate::request_id::request_id_to_string;
|
||||
|
||||
/// Conversation struct that owns the Codex session and all per-conversation state.
|
||||
pub(crate) struct Conversation {
|
||||
codex: Arc<Codex>,
|
||||
session_id: Uuid,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
request_id: RequestId,
|
||||
) {
|
||||
let request_id_str = match &request_id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(n) => n.to_string(),
|
||||
};
|
||||
state: Mutex<ConversationState>,
|
||||
cancel: CancellationToken,
|
||||
}
|
||||
|
||||
// Stream events until the task needs to pause for user interaction or
|
||||
// completes.
|
||||
loop {
|
||||
match codex.next_event().await {
|
||||
Ok(event) => {
|
||||
outgoing
|
||||
.send_event_as_notification(
|
||||
&event,
|
||||
Some(OutgoingNotificationMeta::new(Some(request_id.clone()))),
|
||||
)
|
||||
struct ConversationState {
|
||||
streaming_enabled: bool,
|
||||
buffered_events: Vec<CodexEventNotificationParams>,
|
||||
pending_elicitations: Vec<PendingElicitation>,
|
||||
}
|
||||
|
||||
impl Conversation {
|
||||
pub(crate) fn new(
|
||||
codex: Arc<Codex>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
request_id: RequestId,
|
||||
session_id: Uuid,
|
||||
) -> Arc<Self> {
|
||||
let conv = Arc::new(Self {
|
||||
codex,
|
||||
session_id,
|
||||
outgoing,
|
||||
request_id,
|
||||
state: Mutex::new(ConversationState {
|
||||
streaming_enabled: false,
|
||||
buffered_events: Vec::new(),
|
||||
pending_elicitations: Vec::new(),
|
||||
}),
|
||||
cancel: CancellationToken::new(),
|
||||
});
|
||||
// Detach a background loop tied to this Conversation
|
||||
spawn_conversation_loop(conv.clone());
|
||||
conv
|
||||
}
|
||||
|
||||
pub(crate) async fn set_streaming(&self, enabled: bool) {
|
||||
if enabled {
|
||||
let (events_snapshot, pending_snapshot) = {
|
||||
let mut st = self.state.lock().await;
|
||||
st.streaming_enabled = true;
|
||||
(
|
||||
st.buffered_events.clone(),
|
||||
std::mem::take(&mut st.pending_elicitations),
|
||||
)
|
||||
};
|
||||
self.emit_initial_state_with(events_snapshot).await;
|
||||
self.drain_pending_elicitations_from(pending_snapshot).await;
|
||||
} else {
|
||||
let mut st = self.state.lock().await;
|
||||
st.streaming_enabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn codex(&self) -> Arc<Codex> {
|
||||
self.codex.clone()
|
||||
}
|
||||
|
||||
pub(crate) async fn try_submit_user_input(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
items: Vec<InputItem>,
|
||||
) -> CodexResult<()> {
|
||||
let _ = request_id; // request_id is not used to enforce uniqueness; Codex generates ids.
|
||||
self.codex.submit(Op::UserInput { items }).await.map(|_| ())
|
||||
}
|
||||
|
||||
async fn handle_event(&self, event: Event) {
|
||||
{
|
||||
let mut st = self.state.lock().await;
|
||||
st.buffered_events.push(CodexEventNotificationParams {
|
||||
meta: None,
|
||||
msg: event.msg.clone(),
|
||||
});
|
||||
}
|
||||
self.stream_event_if_enabled(&event.msg).await;
|
||||
|
||||
match event.msg {
|
||||
EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
command,
|
||||
cwd,
|
||||
call_id,
|
||||
reason: _,
|
||||
}) => {
|
||||
self.process_exec_request(command, cwd, call_id, event.id.clone())
|
||||
.await;
|
||||
}
|
||||
EventMsg::Error(err) => {
|
||||
error!("Codex runtime error: {}", err.message);
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
}) => {
|
||||
self.start_patch_approval(PatchRequest {
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
event_id: event.id.clone(),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
EventMsg::TaskComplete(_) => {}
|
||||
EventMsg::TaskStarted => {}
|
||||
EventMsg::SessionConfigured(ev) => {
|
||||
error!("unexpected SessionConfigured event: {:?}", ev);
|
||||
}
|
||||
EventMsg::AgentMessageDelta(_) => {}
|
||||
EventMsg::AgentReasoningDelta(_) => {}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { .. }) => {}
|
||||
EventMsg::TokenCount(_)
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::AgentReasoningRawContent(_)
|
||||
| EventMsg::AgentReasoningRawContentDelta(_)
|
||||
| EventMsg::McpToolCallBegin(_)
|
||||
| EventMsg::McpToolCallEnd(_)
|
||||
| EventMsg::ExecCommandBegin(_)
|
||||
| EventMsg::ExecCommandEnd(_)
|
||||
| EventMsg::BackgroundEvent(_)
|
||||
| EventMsg::ExecCommandOutputDelta(_)
|
||||
| EventMsg::PatchApplyBegin(_)
|
||||
| EventMsg::PatchApplyEnd(_)
|
||||
| EventMsg::GetHistoryEntryResponse(_)
|
||||
| EventMsg::PlanUpdate(_)
|
||||
| EventMsg::TurnDiff(_)
|
||||
| EventMsg::ShutdownComplete => {}
|
||||
}
|
||||
}
|
||||
|
||||
match event.msg {
|
||||
EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
async fn emit_initial_state_with(&self, events: Vec<CodexEventNotificationParams>) {
|
||||
let params = InitialStateNotificationParams {
|
||||
meta: Some(NotificationMeta {
|
||||
conversation_id: Some(ConversationId(self.session_id)),
|
||||
request_id: None,
|
||||
}),
|
||||
initial_state: InitialStatePayload { events },
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::InitialState(params))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn drain_pending_elicitations_from(&self, items: Vec<PendingElicitation>) {
|
||||
for item in items {
|
||||
match item {
|
||||
PendingElicitation::ExecRequest(ExecRequest {
|
||||
command,
|
||||
cwd,
|
||||
event_id,
|
||||
call_id,
|
||||
}) => {
|
||||
handle_exec_approval_request(
|
||||
command,
|
||||
cwd,
|
||||
self.outgoing.clone(),
|
||||
self.codex.clone(),
|
||||
self.request_id.clone(),
|
||||
request_id_to_string(&self.request_id),
|
||||
event_id,
|
||||
call_id,
|
||||
reason: _,
|
||||
}) => {
|
||||
handle_exec_approval_request(
|
||||
command,
|
||||
cwd,
|
||||
outgoing.clone(),
|
||||
codex.clone(),
|
||||
request_id.clone(),
|
||||
request_id_str.clone(),
|
||||
event.id.clone(),
|
||||
call_id,
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
EventMsg::Error(_) => {
|
||||
error!("Codex runtime error");
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
PendingElicitation::PatchRequest(PatchRequest {
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
event_id,
|
||||
}) => {
|
||||
handle_patch_approval_request(
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
}) => {
|
||||
handle_patch_approval_request(
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
outgoing.clone(),
|
||||
codex.clone(),
|
||||
request_id.clone(),
|
||||
request_id_str.clone(),
|
||||
event.id.clone(),
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
EventMsg::TaskComplete(_) => {}
|
||||
EventMsg::SessionConfigured(_) => {
|
||||
tracing::error!("unexpected SessionConfigured event");
|
||||
}
|
||||
EventMsg::AgentMessageDelta(_) => {
|
||||
// TODO: think how we want to support this in the MCP
|
||||
}
|
||||
EventMsg::AgentReasoningDelta(_) => {
|
||||
// TODO: think how we want to support this in the MCP
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { .. }) => {
|
||||
// TODO: think how we want to support this in the MCP
|
||||
}
|
||||
EventMsg::TaskStarted
|
||||
| EventMsg::TokenCount(_)
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::McpToolCallBegin(_)
|
||||
| EventMsg::McpToolCallEnd(_)
|
||||
| EventMsg::ExecCommandBegin(_)
|
||||
| EventMsg::ExecCommandEnd(_)
|
||||
| EventMsg::BackgroundEvent(_)
|
||||
| EventMsg::ExecCommandOutputDelta(_)
|
||||
| EventMsg::PatchApplyBegin(_)
|
||||
| EventMsg::PatchApplyEnd(_)
|
||||
| EventMsg::GetHistoryEntryResponse(_)
|
||||
| EventMsg::PlanUpdate(_)
|
||||
| EventMsg::ShutdownComplete => {
|
||||
// For now, we do not do anything extra for these
|
||||
// events. Note that
|
||||
// send(codex_event_to_notification(&event)) above has
|
||||
// already dispatched these events as notifications,
|
||||
// though we may want to do give different treatment to
|
||||
// individual events in the future.
|
||||
}
|
||||
self.outgoing.clone(),
|
||||
self.codex.clone(),
|
||||
self.request_id.clone(),
|
||||
request_id_to_string(&self.request_id),
|
||||
event_id,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Codex runtime error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
async fn process_exec_request(
|
||||
&self,
|
||||
command: Vec<String>,
|
||||
cwd: PathBuf,
|
||||
call_id: String,
|
||||
event_id: String,
|
||||
) {
|
||||
let should_stream = {
|
||||
let st = self.state.lock().await;
|
||||
st.streaming_enabled
|
||||
};
|
||||
if should_stream {
|
||||
handle_exec_approval_request(
|
||||
command,
|
||||
cwd,
|
||||
self.outgoing.clone(),
|
||||
self.codex.clone(),
|
||||
self.request_id.clone(),
|
||||
request_id_to_string(&self.request_id),
|
||||
event_id,
|
||||
call_id,
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
let mut st = self.state.lock().await;
|
||||
st.pending_elicitations
|
||||
.push(PendingElicitation::ExecRequest(ExecRequest {
|
||||
command,
|
||||
cwd,
|
||||
event_id,
|
||||
call_id,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
async fn start_patch_approval(&self, req: PatchRequest) {
|
||||
let PatchRequest {
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
event_id,
|
||||
} = req;
|
||||
let should_stream = {
|
||||
let st = self.state.lock().await;
|
||||
st.streaming_enabled
|
||||
};
|
||||
if should_stream {
|
||||
handle_patch_approval_request(
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
self.outgoing.clone(),
|
||||
self.codex.clone(),
|
||||
self.request_id.clone(),
|
||||
request_id_to_string(&self.request_id),
|
||||
event_id,
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
let mut st = self.state.lock().await;
|
||||
st.pending_elicitations
|
||||
.push(PendingElicitation::PatchRequest(PatchRequest {
|
||||
call_id,
|
||||
reason,
|
||||
grant_root,
|
||||
changes,
|
||||
event_id,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
async fn stream_event_if_enabled(&self, msg: &EventMsg) {
|
||||
if !{ self.state.lock().await.streaming_enabled } {
|
||||
return;
|
||||
}
|
||||
let method = msg.to_string();
|
||||
let params = CodexEventNotificationParams {
|
||||
meta: None,
|
||||
msg: msg.clone(),
|
||||
};
|
||||
match serde_json::to_value(¶ms) {
|
||||
Ok(params_val) => {
|
||||
self.outgoing
|
||||
.send_custom_notification(&method, params_val)
|
||||
.await;
|
||||
}
|
||||
Err(err) => {
|
||||
error!("Failed to serialize event params: {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum PendingElicitation {
|
||||
ExecRequest(ExecRequest),
|
||||
PatchRequest(PatchRequest),
|
||||
}
|
||||
|
||||
struct PatchRequest {
|
||||
call_id: String,
|
||||
reason: Option<String>,
|
||||
grant_root: Option<PathBuf>,
|
||||
changes: HashMap<PathBuf, FileChange>,
|
||||
event_id: String,
|
||||
}
|
||||
|
||||
struct ExecRequest {
|
||||
command: Vec<String>,
|
||||
cwd: PathBuf,
|
||||
event_id: String,
|
||||
call_id: String,
|
||||
}
|
||||
|
||||
impl Drop for Conversation {
|
||||
fn drop(&mut self) {
|
||||
self.cancel.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_conversation_loop(this: Arc<Conversation>) {
|
||||
tokio::spawn(async move {
|
||||
let codex = this.codex.clone();
|
||||
let cancel = this.cancel.clone();
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = cancel.cancelled() => {
|
||||
break;
|
||||
}
|
||||
res = codex.next_event() => {
|
||||
match res {
|
||||
Ok(event) => this.handle_event(event).await,
|
||||
Err(e) => {
|
||||
error!("Codex next_event error (session {}): {e}", this.session_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ use crate::codex_tool_runner::INVALID_PARAMS_ERROR_CODE;
|
||||
|
||||
/// Conforms to [`mcp_types::ElicitRequestParams`] so that it can be used as the
|
||||
/// `params` field of an [`ElicitRequest`].
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct ExecApprovalElicitRequestParams {
|
||||
// These fields are required so that `params`
|
||||
// conforms to ElicitRequestParams.
|
||||
|
||||
@@ -24,6 +24,7 @@ pub mod mcp_protocol;
|
||||
pub(crate) mod message_processor;
|
||||
mod outgoing_message;
|
||||
mod patch_approval;
|
||||
mod request_id;
|
||||
pub(crate) mod tool_handlers;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -13,10 +12,11 @@ use crate::mcp_protocol::ToolCallResponseResult;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::tool_handlers::create_conversation::handle_create_conversation;
|
||||
use crate::tool_handlers::send_message::handle_send_message;
|
||||
use crate::tool_handlers::stream_conversation;
|
||||
use crate::tool_handlers::stream_conversation::handle_stream_conversation;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::config::Config as CodexConfig;
|
||||
use codex_core::protocol::Submission;
|
||||
use mcp_types::CallToolRequest;
|
||||
use mcp_types::CallToolRequestParams;
|
||||
use mcp_types::CallToolResult;
|
||||
@@ -43,8 +43,10 @@ pub(crate) struct MessageProcessor {
|
||||
initialized: bool,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
session_map: Arc<Mutex<HashMap<Uuid, Arc<Codex>>>>,
|
||||
conversation_map: Arc<Mutex<HashMap<Uuid, Arc<crate::conversation_loop::Conversation>>>>,
|
||||
running_requests_id_to_codex_uuid: Arc<Mutex<HashMap<RequestId, Uuid>>>,
|
||||
running_session_ids: Arc<Mutex<HashSet<Uuid>>>,
|
||||
/// Track request IDs to the original ToolCallRequestParams for cancellation handling
|
||||
tool_request_map: Arc<Mutex<HashMap<RequestId, ToolCallRequestParams>>>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
@@ -59,23 +61,22 @@ impl MessageProcessor {
|
||||
initialized: false,
|
||||
codex_linux_sandbox_exe,
|
||||
session_map: Arc::new(Mutex::new(HashMap::new())),
|
||||
conversation_map: Arc::new(Mutex::new(HashMap::new())),
|
||||
running_requests_id_to_codex_uuid: Arc::new(Mutex::new(HashMap::new())),
|
||||
running_session_ids: Arc::new(Mutex::new(HashSet::new())),
|
||||
tool_request_map: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn session_map(&self) -> Arc<Mutex<HashMap<Uuid, Arc<Codex>>>> {
|
||||
self.session_map.clone()
|
||||
pub(crate) fn conversation_map(
|
||||
&self,
|
||||
) -> Arc<Mutex<HashMap<Uuid, Arc<crate::conversation_loop::Conversation>>>> {
|
||||
self.conversation_map.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn outgoing(&self) -> Arc<OutgoingMessageSender> {
|
||||
self.outgoing.clone()
|
||||
}
|
||||
|
||||
pub(crate) fn running_session_ids(&self) -> Arc<Mutex<HashSet<Uuid>>> {
|
||||
self.running_session_ids.clone()
|
||||
}
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
// Hold on to the ID so we can respond.
|
||||
let request_id = request.id.clone();
|
||||
@@ -353,6 +354,11 @@ impl MessageProcessor {
|
||||
}
|
||||
}
|
||||
async fn handle_new_tool_calls(&self, request_id: RequestId, params: ToolCallRequestParams) {
|
||||
// Track the request to allow graceful cancellation routing later.
|
||||
{
|
||||
let mut tool_request_map = self.tool_request_map.lock().await;
|
||||
tool_request_map.insert(request_id.clone(), params.clone());
|
||||
}
|
||||
match params {
|
||||
ToolCallRequestParams::ConversationCreate(args) => {
|
||||
handle_create_conversation(self, request_id, args).await;
|
||||
@@ -360,6 +366,9 @@ impl MessageProcessor {
|
||||
ToolCallRequestParams::ConversationSendMessage(args) => {
|
||||
handle_send_message(self, request_id, args).await;
|
||||
}
|
||||
ToolCallRequestParams::ConversationStream(args) => {
|
||||
handle_stream_conversation(self, request_id, args).await;
|
||||
}
|
||||
_ => {
|
||||
let result = CallToolResult {
|
||||
content: vec![ContentBlock::TextContent(TextContent {
|
||||
@@ -584,23 +593,72 @@ impl MessageProcessor {
|
||||
// ---------------------------------------------------------------------
|
||||
// Notification handlers
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
async fn handle_cancelled_notification(
|
||||
&self,
|
||||
params: <mcp_types::CancelledNotification as mcp_types::ModelContextProtocolNotification>::Params,
|
||||
) {
|
||||
let request_id = params.request_id;
|
||||
// Create a stable string form early for logging and submission id.
|
||||
let request_id_string = match &request_id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(i) => i.to_string(),
|
||||
};
|
||||
|
||||
// Obtain the session_id while holding the first lock, then release.
|
||||
if let Some(orig) = {
|
||||
let mut tool_request_map = self.tool_request_map.lock().await;
|
||||
tool_request_map.remove(&request_id)
|
||||
} {
|
||||
self.handle_mcp_protocol_cancelled_notification(request_id, orig)
|
||||
.await;
|
||||
} else {
|
||||
self.handle_legacy_cancelled_notification(request_id).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_mcp_protocol_cancelled_notification(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
orig: ToolCallRequestParams,
|
||||
) {
|
||||
match orig {
|
||||
ToolCallRequestParams::ConversationStream(args) => {
|
||||
stream_conversation::handle_cancel(self, &args).await;
|
||||
}
|
||||
ToolCallRequestParams::ConversationSendMessage(args) => {
|
||||
// Cancel in-flight user input for this conversation by interrupting the session.
|
||||
|
||||
let session_id = args.conversation_id.0;
|
||||
let codex_arc = {
|
||||
let sessions_guard = self.conversation_map.lock().await;
|
||||
match sessions_guard.get(&session_id) {
|
||||
Some(conv) => conv.codex().clone(),
|
||||
None => {
|
||||
tracing::warn!(
|
||||
"Cancel send_message: session not found for session_id: {session_id}"
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = codex_arc.submit(codex_core::protocol::Op::Interrupt).await {
|
||||
tracing::error!("Failed to submit interrupt for send_message cancel: {e}");
|
||||
}
|
||||
}
|
||||
ToolCallRequestParams::ConversationCreate(_)
|
||||
| ToolCallRequestParams::ConversationsList(_) => {
|
||||
// Likely fast/non-streaming; nothing to cancel currently.
|
||||
tracing::debug!(
|
||||
"Cancel conversationsList received for request_id: {:?} (no-op)",
|
||||
request_id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_legacy_cancelled_notification(&self, request_id: RequestId) {
|
||||
use crate::request_id::request_id_to_string;
|
||||
let request_id_string = request_id_to_string(&request_id);
|
||||
|
||||
let session_id = {
|
||||
let map_guard = self.running_requests_id_to_codex_uuid.lock().await;
|
||||
match map_guard.get(&request_id) {
|
||||
Some(id) => *id, // Uuid is Copy
|
||||
Some(id) => *id,
|
||||
None => {
|
||||
tracing::warn!("Session not found for request_id: {}", request_id_string);
|
||||
return;
|
||||
@@ -609,7 +667,6 @@ impl MessageProcessor {
|
||||
};
|
||||
tracing::info!("session_id: {session_id}");
|
||||
|
||||
// Obtain the Codex Arc while holding the session_map lock, then release.
|
||||
let codex_arc = {
|
||||
let sessions_guard = self.session_map.lock().await;
|
||||
match sessions_guard.get(&session_id) {
|
||||
@@ -621,18 +678,11 @@ impl MessageProcessor {
|
||||
}
|
||||
};
|
||||
|
||||
// Submit interrupt to Codex.
|
||||
let err = codex_arc
|
||||
.submit_with_id(Submission {
|
||||
id: request_id_string,
|
||||
op: codex_core::protocol::Op::Interrupt,
|
||||
})
|
||||
.await;
|
||||
if let Err(e) = err {
|
||||
if let Err(e) = codex_arc.submit(codex_core::protocol::Op::Interrupt).await {
|
||||
tracing::error!("Failed to submit interrupt to Codex: {e}");
|
||||
return;
|
||||
}
|
||||
// unregister the id so we don't keep it in the map
|
||||
|
||||
self.running_requests_id_to_codex_uuid
|
||||
.lock()
|
||||
.await
|
||||
|
||||
@@ -109,7 +109,7 @@ impl OutgoingMessageSender {
|
||||
|
||||
// should be backwards compatible.
|
||||
// it will replace send_event_as_notification eventually.
|
||||
async fn send_event_as_notification_new_schema(
|
||||
pub(crate) async fn send_event_as_notification_new_schema(
|
||||
&self,
|
||||
event: &Event,
|
||||
params: Option<serde_json::Value>,
|
||||
@@ -124,6 +124,37 @@ impl OutgoingMessageSender {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
let _ = self.sender.send(outgoing_message).await;
|
||||
}
|
||||
|
||||
/// Send a custom notification with an explicit method name and params object.
|
||||
pub(crate) async fn send_custom_notification(&self, method: &str, params: serde_json::Value) {
|
||||
let outgoing_message = OutgoingMessage::Notification(OutgoingNotification {
|
||||
method: method.to_string(),
|
||||
params: Some(params),
|
||||
});
|
||||
let _ = self.sender.send(outgoing_message).await;
|
||||
}
|
||||
|
||||
/// Send a typed server notification by serializing it into a method/params pair.
|
||||
pub(crate) async fn send_server_notification(
|
||||
&self,
|
||||
notification: crate::mcp_protocol::ServerNotification,
|
||||
) {
|
||||
match serde_json::to_value(notification) {
|
||||
Ok(serde_json::Value::Object(mut map)) => {
|
||||
let method = map
|
||||
.remove("method")
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string()));
|
||||
let params = map.remove("params").unwrap_or(serde_json::Value::Null);
|
||||
if let Some(method) = method {
|
||||
self.send_custom_notification(&method, params).await;
|
||||
} else {
|
||||
warn!("ServerNotification missing method after serialization");
|
||||
}
|
||||
}
|
||||
Ok(_) => warn!("ServerNotification did not serialize to an object"),
|
||||
Err(err) => warn!("Failed to serialize ServerNotification: {err:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Outgoing message from the server to the client.
|
||||
|
||||
9
codex-rs/mcp-server/src/request_id.rs
Normal file
9
codex-rs/mcp-server/src/request_id.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use mcp_types::RequestId;
|
||||
|
||||
/// Utility to convert an MCP `RequestId` into a `String`.
|
||||
pub(crate) fn request_id_to_string(id: &RequestId) -> String {
|
||||
match id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(i) => i.to_string(),
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,14 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::codex_wrapper::init_codex;
|
||||
use codex_core::config::Config as CodexConfig;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use mcp_types::RequestId;
|
||||
use tokio::sync::Mutex;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::conversation_loop::run_conversation_loop;
|
||||
use crate::conversation_loop::Conversation;
|
||||
use crate::json_to_toml::json_to_toml;
|
||||
use crate::mcp_protocol::ConversationCreateArgs;
|
||||
use crate::mcp_protocol::ConversationCreateResult;
|
||||
@@ -121,24 +117,17 @@ pub(crate) async fn handle_create_conversation(
|
||||
let session_id = codex_conversation.session_id;
|
||||
let codex_arc = Arc::new(codex_conversation.codex);
|
||||
|
||||
// Store session for future calls
|
||||
insert_session(
|
||||
session_id,
|
||||
codex_arc.clone(),
|
||||
message_processor.session_map(),
|
||||
)
|
||||
.await;
|
||||
// Run the conversation loop in the background so this request can return immediately.
|
||||
// Construct conversation and start its loop, store it, then reply with id and model
|
||||
let outgoing = message_processor.outgoing();
|
||||
let spawn_id = id.clone();
|
||||
tokio::spawn(async move {
|
||||
run_conversation_loop(codex_arc.clone(), outgoing, spawn_id).await;
|
||||
});
|
||||
|
||||
// Reply with the new conversation id and effective model
|
||||
let conversation = Conversation::new(codex_arc.clone(), outgoing, id.clone(), session_id);
|
||||
let conv_map = message_processor.conversation_map();
|
||||
{
|
||||
let mut guard = conv_map.lock().await;
|
||||
guard.insert(session_id, conversation);
|
||||
}
|
||||
message_processor
|
||||
.send_response_with_optional_error(
|
||||
id,
|
||||
id.clone(),
|
||||
Some(ToolCallResponseResult::ConversationCreate(
|
||||
ConversationCreateResult::Ok {
|
||||
conversation_id: ConversationId(session_id),
|
||||
@@ -149,12 +138,3 @@ pub(crate) async fn handle_create_conversation(
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn insert_session(
|
||||
session_id: Uuid,
|
||||
codex: Arc<Codex>,
|
||||
session_map: Arc<Mutex<HashMap<Uuid, Arc<Codex>>>>,
|
||||
) {
|
||||
let mut guard = session_map.lock().await;
|
||||
guard.insert(session_id, codex);
|
||||
}
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
pub(crate) mod create_conversation;
|
||||
pub(crate) mod send_message;
|
||||
pub(crate) mod stream_conversation;
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_core::Codex;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::Submission;
|
||||
use mcp_types::RequestId;
|
||||
use tokio::sync::Mutex;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::conversation_loop::Conversation;
|
||||
use crate::mcp_protocol::ConversationSendMessageArgs;
|
||||
use crate::mcp_protocol::ConversationSendMessageResult;
|
||||
use crate::mcp_protocol::ToolCallResponseResult;
|
||||
@@ -41,7 +39,8 @@ pub(crate) async fn handle_send_message(
|
||||
}
|
||||
|
||||
let session_id = conversation_id.0;
|
||||
let Some(codex) = get_session(session_id, message_processor.session_map()).await else {
|
||||
let Some(conversation) = get_session(session_id, message_processor.conversation_map()).await
|
||||
else {
|
||||
message_processor
|
||||
.send_response_with_optional_error(
|
||||
id,
|
||||
@@ -56,46 +55,15 @@ pub(crate) async fn handle_send_message(
|
||||
return;
|
||||
};
|
||||
|
||||
let running = {
|
||||
let running_sessions = message_processor.running_session_ids();
|
||||
let mut running_sessions = running_sessions.lock().await;
|
||||
!running_sessions.insert(session_id)
|
||||
};
|
||||
let res = conversation.try_submit_user_input(id.clone(), items).await;
|
||||
|
||||
if running {
|
||||
if let Err(e) = res {
|
||||
message_processor
|
||||
.send_response_with_optional_error(
|
||||
id,
|
||||
Some(ToolCallResponseResult::ConversationSendMessage(
|
||||
ConversationSendMessageResult::Error {
|
||||
message: "Session is already running".to_string(),
|
||||
},
|
||||
)),
|
||||
Some(true),
|
||||
)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
|
||||
let request_id_string = match &id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(i) => i.to_string(),
|
||||
};
|
||||
|
||||
let submit_res = codex
|
||||
.submit_with_id(Submission {
|
||||
id: request_id_string,
|
||||
op: Op::UserInput { items },
|
||||
})
|
||||
.await;
|
||||
|
||||
if let Err(e) = submit_res {
|
||||
message_processor
|
||||
.send_response_with_optional_error(
|
||||
id,
|
||||
Some(ToolCallResponseResult::ConversationSendMessage(
|
||||
ConversationSendMessageResult::Error {
|
||||
message: format!("Failed to submit user input: {e}"),
|
||||
message: e.to_string(),
|
||||
},
|
||||
)),
|
||||
Some(true),
|
||||
@@ -117,8 +85,8 @@ pub(crate) async fn handle_send_message(
|
||||
|
||||
pub(crate) async fn get_session(
|
||||
session_id: Uuid,
|
||||
session_map: Arc<Mutex<HashMap<Uuid, Arc<Codex>>>>,
|
||||
) -> Option<Arc<Codex>> {
|
||||
let guard = session_map.lock().await;
|
||||
conversation_map: Arc<Mutex<HashMap<Uuid, Arc<Conversation>>>>,
|
||||
) -> Option<Arc<Conversation>> {
|
||||
let guard = conversation_map.lock().await;
|
||||
guard.get(&session_id).cloned()
|
||||
}
|
||||
|
||||
57
codex-rs/mcp-server/src/tool_handlers/stream_conversation.rs
Normal file
57
codex-rs/mcp-server/src/tool_handlers/stream_conversation.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use mcp_types::RequestId;
|
||||
|
||||
use crate::mcp_protocol::ConversationStreamArgs;
|
||||
use crate::mcp_protocol::ConversationStreamResult;
|
||||
use crate::mcp_protocol::ToolCallResponseResult;
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::tool_handlers::send_message::get_session;
|
||||
|
||||
/// Handles the ConversationStream tool call: verifies the session and
|
||||
/// enables streaming for the session, replying with an OK result.
|
||||
pub(crate) async fn handle_stream_conversation(
|
||||
message_processor: &MessageProcessor,
|
||||
id: RequestId,
|
||||
arguments: ConversationStreamArgs,
|
||||
) {
|
||||
let ConversationStreamArgs { conversation_id } = arguments;
|
||||
|
||||
let session_id = conversation_id.0;
|
||||
|
||||
// Ensure the session exists
|
||||
let conv = get_session(session_id, message_processor.conversation_map()).await;
|
||||
|
||||
if conv.is_none() {
|
||||
// Return an error with no result payload per MCP error pattern
|
||||
message_processor
|
||||
.send_response_with_optional_error(id, None, Some(true))
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
|
||||
message_processor
|
||||
.send_response_with_optional_error(
|
||||
id,
|
||||
Some(ToolCallResponseResult::ConversationStream(
|
||||
ConversationStreamResult {},
|
||||
)),
|
||||
Some(false),
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Some(conv) = conv {
|
||||
tokio::spawn(async move {
|
||||
conv.set_streaming(true).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles cancellation for ConversationStream by disabling streaming for the session.
|
||||
pub(crate) async fn handle_cancel(
|
||||
message_processor: &MessageProcessor,
|
||||
args: &ConversationStreamArgs,
|
||||
) {
|
||||
let session_id = args.conversation_id.0;
|
||||
if let Some(conv) = get_session(session_id, message_processor.conversation_map()).await {
|
||||
conv.set_streaming(false).await;
|
||||
}
|
||||
}
|
||||
@@ -89,14 +89,18 @@ async fn shell_command_approval_triggers_elicitation() -> anyhow::Result<()> {
|
||||
// This is the first request from the server, so the id should be 0 given
|
||||
// how things are currently implemented.
|
||||
let elicitation_request_id = RequestId::Integer(0);
|
||||
let params = serde_json::from_value::<ExecApprovalElicitRequestParams>(
|
||||
elicitation_request
|
||||
.params
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow::anyhow!("elicitation_request.params must be set"))?,
|
||||
)?;
|
||||
let expected_elicitation_request = create_expected_elicitation_request(
|
||||
elicitation_request_id.clone(),
|
||||
shell_command.clone(),
|
||||
workdir_for_shell_function_call.path(),
|
||||
codex_request_id.to_string(),
|
||||
// Internal Codex id: empirically it is 1, but this is
|
||||
// admittedly an internal detail that could change.
|
||||
"1".to_string(),
|
||||
params.codex_event_id.clone(),
|
||||
)?;
|
||||
assert_eq!(expected_elicitation_request, elicitation_request);
|
||||
|
||||
|
||||
26
codex-rs/mcp-server/tests/common/config.rs
Normal file
26
codex-rs/mcp-server/tests/common/config.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use std::path::Path;
|
||||
|
||||
/// Write a minimal Codex config.toml pointing at the provided mock server URI.
|
||||
/// Used by tests that don't exercise approval/sandbox variations.
|
||||
pub fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
mod config;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
|
||||
pub use config::create_config_toml;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
@@ -17,6 +18,7 @@ use codex_mcp_server::CodexToolCallReplyParam;
|
||||
use codex_mcp_server::mcp_protocol::ConversationCreateArgs;
|
||||
use codex_mcp_server::mcp_protocol::ConversationId;
|
||||
use codex_mcp_server::mcp_protocol::ConversationSendMessageArgs;
|
||||
use codex_mcp_server::mcp_protocol::ConversationStreamArgs;
|
||||
use codex_mcp_server::mcp_protocol::ToolCallRequestParams;
|
||||
|
||||
use mcp_types::CallToolRequestParams;
|
||||
@@ -201,6 +203,20 @@ impl McpProcess {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_conversation_stream_tool_call(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = ToolCallRequestParams::ConversationStream(ConversationStreamArgs {
|
||||
conversation_id: ConversationId(Uuid::parse_str(session_id)?),
|
||||
});
|
||||
self.send_request(
|
||||
mcp_types::CallToolRequest::METHOD,
|
||||
Some(serde_json::to_value(params)?),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_conversation_create_tool_call(
|
||||
&mut self,
|
||||
prompt: &str,
|
||||
@@ -236,6 +252,83 @@ impl McpProcess {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Create a conversation and return its conversation_id as a string.
|
||||
pub async fn create_conversation_and_get_id(
|
||||
&mut self,
|
||||
prompt: &str,
|
||||
model: &str,
|
||||
cwd: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
let req_id = self
|
||||
.send_conversation_create_tool_call(prompt, model, cwd)
|
||||
.await?;
|
||||
let resp = self
|
||||
.read_stream_until_response_message(RequestId::Integer(req_id))
|
||||
.await?;
|
||||
let conv_id = resp.result["structuredContent"]["conversation_id"]
|
||||
.as_str()
|
||||
.ok_or_else(|| anyhow::format_err!("missing conversation_id"))?
|
||||
.to_string();
|
||||
Ok(conv_id)
|
||||
}
|
||||
|
||||
/// Connect stream for a conversation and wait for the initial_state notification.
|
||||
/// Returns (requestId, params) where params are the initial_state notification params.
|
||||
pub async fn connect_stream_and_expect_initial_state(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
) -> anyhow::Result<(i64, serde_json::Value)> {
|
||||
let req_id = self.send_conversation_stream_tool_call(session_id).await?;
|
||||
// Wait for stream() tool-call response first
|
||||
let _ = self
|
||||
.read_stream_until_response_message(RequestId::Integer(req_id))
|
||||
.await?;
|
||||
// Then the initial_state notification
|
||||
let note = self
|
||||
.read_stream_until_notification_method("notifications/initial_state")
|
||||
.await?;
|
||||
let params = note
|
||||
.params
|
||||
.ok_or_else(|| anyhow::format_err!("initial_state must have params"))?;
|
||||
Ok((req_id, params))
|
||||
}
|
||||
|
||||
/// Wait for an agent_message with a bounded timeout. Returns Some(params) if received, None on timeout.
|
||||
pub async fn maybe_wait_for_agent_message(
|
||||
&mut self,
|
||||
dur: Duration,
|
||||
) -> anyhow::Result<Option<serde_json::Value>> {
|
||||
match tokio::time::timeout(dur, self.wait_for_agent_message()).await {
|
||||
Ok(Ok(v)) => Ok(Some(v)),
|
||||
Ok(Err(e)) => Err(e),
|
||||
Err(_elapsed) => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Send a user message to a conversation and wait for the OK tool-call response.
|
||||
pub async fn send_user_message_and_wait_ok(
|
||||
&mut self,
|
||||
message: &str,
|
||||
session_id: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let req_id = self
|
||||
.send_user_message_tool_call(message, session_id)
|
||||
.await?;
|
||||
let _ = self
|
||||
.read_stream_until_response_message(RequestId::Integer(req_id))
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Wait until an agent_message notification arrives; returns its params.
|
||||
pub async fn wait_for_agent_message(&mut self) -> anyhow::Result<serde_json::Value> {
|
||||
let note = self
|
||||
.read_stream_until_notification_method("agent_message")
|
||||
.await?;
|
||||
note.params
|
||||
.ok_or_else(|| anyhow::format_err!("agent_message missing params"))
|
||||
}
|
||||
|
||||
async fn send_request(
|
||||
&mut self,
|
||||
method: &str,
|
||||
@@ -329,53 +422,51 @@ impl McpProcess {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_notification_method(
|
||||
&mut self,
|
||||
method: &str,
|
||||
) -> anyhow::Result<JSONRPCNotification> {
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(n) => {
|
||||
if n.method == method {
|
||||
return Ok(n);
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
// ignore
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_configured_response_message(
|
||||
&mut self,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut sid_old: Option<String> = None;
|
||||
let mut sid_new: Option<String> = None;
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
eprint!("message: {message:?}");
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if let Some(params) = notification.params {
|
||||
// Back-compat schema: method == "codex/event" and msg.type == "session_configured"
|
||||
if notification.method == "codex/event" {
|
||||
if let Some(msg) = params.get("msg") {
|
||||
if msg.get("type").and_then(|v| v.as_str())
|
||||
== Some("session_configured")
|
||||
{
|
||||
if let Some(session_id) =
|
||||
msg.get("session_id").and_then(|v| v.as_str())
|
||||
{
|
||||
sid_old = Some(session_id.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// New schema: method is the Display of EventMsg::SessionConfigured => "SessionConfigured"
|
||||
if notification.method == "session_configured" {
|
||||
if notification.method == "session_configured" {
|
||||
if let Some(params) = notification.params {
|
||||
if let Some(msg) = params.get("msg") {
|
||||
if let Some(session_id) =
|
||||
msg.get("session_id").and_then(|v| v.as_str())
|
||||
{
|
||||
sid_new = Some(session_id.to_string());
|
||||
return Ok(session_id.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if sid_old.is_some() && sid_new.is_some() {
|
||||
// Both seen, they must match
|
||||
assert_eq!(
|
||||
sid_old.as_ref().unwrap(),
|
||||
sid_new.as_ref().unwrap(),
|
||||
"session_id mismatch between old and new schema"
|
||||
);
|
||||
return Ok(sid_old.unwrap());
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_config_toml;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
@@ -103,26 +102,4 @@ async fn test_conversation_create_and_send_message_ok() {
|
||||
drop(server);
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
// create_config_toml is provided by tests/common
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
#![cfg(unix)]
|
||||
// Support code lives in the `mcp_test_support` crate under tests/common.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_mcp_server::CodexToolCallParam;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::ModelContextProtocolNotification;
|
||||
use mcp_types::RequestId;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_config_toml;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::create_shell_sse_response;
|
||||
|
||||
@@ -66,7 +66,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
|
||||
// Create Codex configuration
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), server.uri())?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
let mut mcp_process = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp_process.initialize()).await??;
|
||||
|
||||
@@ -95,7 +95,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
// Send interrupt notification
|
||||
mcp_process
|
||||
.send_notification(
|
||||
"notifications/cancelled",
|
||||
mcp_types::CancelledNotification::METHOD,
|
||||
Some(json!({ "requestId": codex_request_id })),
|
||||
)
|
||||
.await?;
|
||||
@@ -126,7 +126,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
// Send interrupt notification
|
||||
mcp_process
|
||||
.send_notification(
|
||||
"notifications/cancelled",
|
||||
mcp_types::CancelledNotification::METHOD,
|
||||
Some(json!({ "requestId": codex_reply_request_id })),
|
||||
)
|
||||
.await?;
|
||||
@@ -148,30 +148,3 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: String) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
#![allow(clippy::expect_used)]
|
||||
|
||||
use std::path::Path;
|
||||
use std::thread::sleep;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_mcp_server::CodexToolCallParam;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_config_toml;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_types::JSONRPC_VERSION;
|
||||
@@ -20,11 +19,9 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_send_message_success() {
|
||||
// Spin up a mock completions server that immediately ends the Codex turn.
|
||||
// Two Codex turns hit the mock model (session start + send-user-message). Provide two SSE responses.
|
||||
// Spin up a mock completions server that ends the Codex turn for the send-user-message call.
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
@@ -41,29 +38,11 @@ async fn test_send_message_success() {
|
||||
.expect("init timed out")
|
||||
.expect("init failed");
|
||||
|
||||
// Kick off a Codex session so we have a valid session_id.
|
||||
let codex_request_id = mcp_process
|
||||
.send_codex_tool_call(CodexToolCallParam {
|
||||
prompt: "Start a session".to_string(),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.expect("send codex tool call");
|
||||
|
||||
// Wait for the session_configured event to get the session_id.
|
||||
// Create a conversation using the tool and get its conversation_id
|
||||
let session_id = mcp_process
|
||||
.read_stream_until_configured_response_message()
|
||||
.create_conversation_and_get_id("", "mock-model", "/repo")
|
||||
.await
|
||||
.expect("read session_configured");
|
||||
|
||||
// The original codex call will finish quickly given our mock; consume its response.
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp_process.read_stream_until_response_message(RequestId::Integer(codex_request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("codex response timeout")
|
||||
.expect("codex response error");
|
||||
.expect("create conversation");
|
||||
|
||||
// Now exercise the send-user-message tool.
|
||||
let send_msg_request_id = mcp_process
|
||||
@@ -135,29 +114,4 @@ async fn test_send_message_session_not_found() {
|
||||
assert_eq!(result["isError"], json!(true));
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn create_config_toml(codex_home: &Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
// Helpers are provided by tests/common
|
||||
|
||||
251
codex-rs/mcp-server/tests/stream_conversation.rs
Normal file
251
codex-rs/mcp-server/tests/stream_conversation.rs
Normal file
@@ -0,0 +1,251 @@
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_config_toml;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::ModelContextProtocolNotification;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(3);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_connect_then_send_receives_initial_state_and_notifications() {
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
// Create conversation
|
||||
let conv_id = mcp
|
||||
.create_conversation_and_get_id("", "o3", "/repo")
|
||||
.await
|
||||
.expect("create conversation");
|
||||
|
||||
// Connect the stream
|
||||
let (_stream_req, params) = mcp
|
||||
.connect_stream_and_expect_initial_state(&conv_id)
|
||||
.await
|
||||
.expect("initial_state params");
|
||||
let expected_params = json!({
|
||||
"_meta": {
|
||||
"conversationId": conv_id.as_str(),
|
||||
},
|
||||
"initial_state": {
|
||||
"events": []
|
||||
}
|
||||
});
|
||||
assert_eq!(params, expected_params);
|
||||
|
||||
// Send a message and expect a subsequent notification (non-initial_state)
|
||||
mcp.send_user_message_and_wait_ok("Hello there", &conv_id)
|
||||
.await
|
||||
.expect("send message ok");
|
||||
|
||||
// Read until we see an event notification (new schema example: agent_message)
|
||||
let params = mcp.wait_for_agent_message().await.expect("agent message");
|
||||
let expected_params = json!({
|
||||
"msg": {
|
||||
"type": "agent_message",
|
||||
"message": "Done"
|
||||
}
|
||||
});
|
||||
assert_eq!(params, expected_params);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_send_then_connect_receives_initial_state_with_message() {
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done").expect("build mock assistant message"),
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
// Create conversation
|
||||
let conv_id = mcp
|
||||
.create_conversation_and_get_id("", "o3", "/repo")
|
||||
.await
|
||||
.expect("create conversation");
|
||||
|
||||
// Send a message BEFORE connecting stream
|
||||
mcp.send_user_message_and_wait_ok("Hello world", &conv_id)
|
||||
.await
|
||||
.expect("send message ok");
|
||||
|
||||
// Now connect stream and expect InitialState with the prior message included
|
||||
let (_stream_req, params) = mcp
|
||||
.connect_stream_and_expect_initial_state(&conv_id)
|
||||
.await
|
||||
.expect("initial_state params");
|
||||
let events = params["initial_state"]["events"]
|
||||
.as_array()
|
||||
.expect("events array");
|
||||
if !events.iter().any(|ev| {
|
||||
ev.get("msg")
|
||||
.and_then(|m| m.get("type"))
|
||||
.and_then(|t| t.as_str())
|
||||
== Some("agent_message")
|
||||
&& ev
|
||||
.get("msg")
|
||||
.and_then(|m| m.get("message"))
|
||||
.and_then(|t| t.as_str())
|
||||
== Some("Done")
|
||||
}) {
|
||||
// Fallback to live notification if not present in initial state
|
||||
let note: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_method("agent_message"),
|
||||
)
|
||||
.await
|
||||
.expect("event note timeout")
|
||||
.expect("event note err");
|
||||
let params = note.params.expect("params");
|
||||
let expected_params = json!({
|
||||
"msg": {
|
||||
"type": "agent_message",
|
||||
"message": "Done"
|
||||
}
|
||||
});
|
||||
assert_eq!(params, expected_params);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_cancel_stream_then_reconnect_catches_up_initial_state() {
|
||||
// One response is sufficient for the assertions in this test
|
||||
let responses = vec![
|
||||
create_final_assistant_message_sse_response("Done 1")
|
||||
.expect("build mock assistant message"),
|
||||
create_final_assistant_message_sse_response("Done 2")
|
||||
.expect("build mock assistant message"),
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
create_config_toml(codex_home.path(), &server.uri()).expect("write config.toml");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
// Create and connect stream A
|
||||
let conv_id = mcp
|
||||
.create_conversation_and_get_id("", "o3", "/repo")
|
||||
.await
|
||||
.expect("create");
|
||||
let (stream_a_id, _params) = mcp
|
||||
.connect_stream_and_expect_initial_state(&conv_id)
|
||||
.await
|
||||
.expect("stream A initial_state");
|
||||
|
||||
// Send M1 and ensure we get live agent_message
|
||||
mcp.send_user_message_and_wait_ok("Hello M1", &conv_id)
|
||||
.await
|
||||
.expect("send M1");
|
||||
let _params = mcp.wait_for_agent_message().await.expect("agent M1");
|
||||
|
||||
// Ensure the first task has fully completed before cancelling the stream
|
||||
// so that the session is no longer marked as running.
|
||||
let _ = mcp
|
||||
.read_stream_until_notification_method("task_complete")
|
||||
.await
|
||||
.expect("task complete");
|
||||
|
||||
// Cancel stream A
|
||||
mcp.send_notification(
|
||||
mcp_types::CancelledNotification::METHOD,
|
||||
Some(json!({ "requestId": stream_a_id })),
|
||||
)
|
||||
.await
|
||||
.expect("send cancelled");
|
||||
|
||||
// Send M2 while stream is cancelled; we should NOT get agent_message live
|
||||
mcp.send_user_message_and_wait_ok("Hello M2", &conv_id)
|
||||
.await
|
||||
.expect("send M2");
|
||||
let maybe = mcp
|
||||
.maybe_wait_for_agent_message(std::time::Duration::from_millis(300))
|
||||
.await
|
||||
.expect("maybe wait");
|
||||
assert!(
|
||||
maybe.is_none(),
|
||||
"should not get live agent_message after cancel"
|
||||
);
|
||||
|
||||
// Connect stream B and expect initial_state that includes the response
|
||||
let (_stream_req, params) = mcp
|
||||
.connect_stream_and_expect_initial_state(&conv_id)
|
||||
.await
|
||||
.expect("stream B initial_state");
|
||||
let events = params["initial_state"]["events"]
|
||||
.as_array()
|
||||
.expect("events array");
|
||||
let expected = vec![
|
||||
json!({
|
||||
"msg": {
|
||||
"type": "task_started",
|
||||
},
|
||||
}),
|
||||
json!({
|
||||
"msg": {
|
||||
"message": "Done 1",
|
||||
"type": "agent_message",
|
||||
},
|
||||
}),
|
||||
json!({
|
||||
"msg": {
|
||||
"last_agent_message": "Done 1",
|
||||
"type": "task_complete",
|
||||
},
|
||||
}),
|
||||
json!({
|
||||
"msg": {
|
||||
"type": "task_started",
|
||||
},
|
||||
}),
|
||||
json!({
|
||||
"msg": {
|
||||
"message": "Done 2",
|
||||
"type": "agent_message",
|
||||
},
|
||||
}),
|
||||
json!({
|
||||
"msg": {
|
||||
"last_agent_message": "Done 2",
|
||||
"type": "task_complete",
|
||||
},
|
||||
}),
|
||||
];
|
||||
assert_eq!(*events, expected);
|
||||
}
|
||||
|
||||
//
|
||||
@@ -11,6 +11,10 @@ path = "src/main.rs"
|
||||
name = "codex_tui"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[features]
|
||||
# Enable vt100-based tests (emulator) when running with `--features vt100-tests`.
|
||||
vt100-tests = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -48,6 +52,8 @@ serde_json = { version = "1", features = ["preserve_order"] }
|
||||
shlex = "1.3.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
textwrap = "0.16.2"
|
||||
tokio = { version = "1", features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
@@ -60,7 +66,6 @@ tracing-appender = "0.2.3"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
tui-input = "0.14.0"
|
||||
tui-markdown = "0.3.3"
|
||||
tui-textarea = "0.7.0"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.1"
|
||||
uuid = "1"
|
||||
@@ -70,3 +75,6 @@ uuid = "1"
|
||||
[dev-dependencies]
|
||||
insta = "1.43.1"
|
||||
pretty_assertions = "1"
|
||||
rand = "0.8"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
vt100 = "0.16.2"
|
||||
|
||||
@@ -438,14 +438,15 @@ impl App<'_> {
|
||||
);
|
||||
self.pending_history_lines.clear();
|
||||
}
|
||||
match &mut self.app_state {
|
||||
terminal.draw(|frame| match &mut self.app_state {
|
||||
AppState::Chat { widget } => {
|
||||
terminal.draw(|frame| frame.render_widget_ref(&**widget, frame.area()))?;
|
||||
if let Some((x, y)) = widget.cursor_pos(frame.area()) {
|
||||
frame.set_cursor_position((x, y));
|
||||
}
|
||||
frame.render_widget_ref(&**widget, frame.area())
|
||||
}
|
||||
AppState::GitWarning { screen } => {
|
||||
terminal.draw(|frame| frame.render_widget_ref(&*screen, frame.area()))?;
|
||||
}
|
||||
}
|
||||
AppState::GitWarning { screen } => frame.render_widget_ref(&*screen, frame.area()),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Constraint;
|
||||
use ratatui::layout::Layout;
|
||||
use ratatui::layout::Margin;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::style::Color;
|
||||
use ratatui::style::Style;
|
||||
@@ -8,13 +13,11 @@ use ratatui::style::Styled;
|
||||
use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Span;
|
||||
use ratatui::widgets::Block;
|
||||
use ratatui::widgets::BorderType;
|
||||
use ratatui::widgets::Borders;
|
||||
use ratatui::widgets::Widget;
|
||||
use ratatui::widgets::StatefulWidgetRef;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
use tui_textarea::Input;
|
||||
use tui_textarea::Key;
|
||||
use tui_textarea::TextArea;
|
||||
|
||||
use super::chat_composer_history::ChatComposerHistory;
|
||||
use super::command_popup::CommandPopup;
|
||||
@@ -22,7 +25,10 @@ use super::file_search_popup::FileSearchPopup;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::bottom_pane::textarea::TextArea;
|
||||
use crate::bottom_pane::textarea::TextAreaState;
|
||||
use codex_file_search::FileMatch;
|
||||
use std::cell::RefCell;
|
||||
|
||||
const BASE_PLACEHOLDER_TEXT: &str = "...";
|
||||
/// If the pasted content exceeds this number of characters, replace it with a
|
||||
@@ -35,8 +41,14 @@ pub enum InputResult {
|
||||
None,
|
||||
}
|
||||
|
||||
pub(crate) struct ChatComposer<'a> {
|
||||
textarea: TextArea<'a>,
|
||||
struct TokenUsageInfo {
|
||||
token_usage: TokenUsage,
|
||||
model_context_window: Option<u64>,
|
||||
}
|
||||
|
||||
pub(crate) struct ChatComposer {
|
||||
textarea: TextArea,
|
||||
textarea_state: RefCell<TextAreaState>,
|
||||
active_popup: ActivePopup,
|
||||
app_event_tx: AppEventSender,
|
||||
history: ChatComposerHistory,
|
||||
@@ -45,6 +57,8 @@ pub(crate) struct ChatComposer<'a> {
|
||||
dismissed_file_popup_token: Option<String>,
|
||||
current_file_query: Option<String>,
|
||||
pending_pastes: Vec<(String, String)>,
|
||||
token_usage_info: Option<TokenUsageInfo>,
|
||||
has_focus: bool,
|
||||
}
|
||||
|
||||
/// Popup state – at most one can be visible at any time.
|
||||
@@ -54,20 +68,17 @@ enum ActivePopup {
|
||||
File(FileSearchPopup),
|
||||
}
|
||||
|
||||
impl ChatComposer<'_> {
|
||||
impl ChatComposer {
|
||||
pub fn new(
|
||||
has_input_focus: bool,
|
||||
app_event_tx: AppEventSender,
|
||||
enhanced_keys_supported: bool,
|
||||
) -> Self {
|
||||
let mut textarea = TextArea::default();
|
||||
textarea.set_placeholder_text(BASE_PLACEHOLDER_TEXT);
|
||||
textarea.set_cursor_line_style(ratatui::style::Style::default());
|
||||
|
||||
let use_shift_enter_hint = enhanced_keys_supported;
|
||||
|
||||
let mut this = Self {
|
||||
textarea,
|
||||
Self {
|
||||
textarea: TextArea::new(),
|
||||
textarea_state: RefCell::new(TextAreaState::default()),
|
||||
active_popup: ActivePopup::None,
|
||||
app_event_tx,
|
||||
history: ChatComposerHistory::new(),
|
||||
@@ -76,13 +87,13 @@ impl ChatComposer<'_> {
|
||||
dismissed_file_popup_token: None,
|
||||
current_file_query: None,
|
||||
pending_pastes: Vec::new(),
|
||||
};
|
||||
this.update_border(has_input_focus);
|
||||
this
|
||||
token_usage_info: None,
|
||||
has_focus: has_input_focus,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn desired_height(&self) -> u16 {
|
||||
self.textarea.lines().len().max(1) as u16
|
||||
pub fn desired_height(&self, width: u16) -> u16 {
|
||||
self.textarea.desired_height(width - 1)
|
||||
+ match &self.active_popup {
|
||||
ActivePopup::None => 1u16,
|
||||
ActivePopup::Command(c) => c.calculate_required_height(),
|
||||
@@ -90,6 +101,21 @@ impl ChatComposer<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
|
||||
let popup_height = match &self.active_popup {
|
||||
ActivePopup::Command(popup) => popup.calculate_required_height(),
|
||||
ActivePopup::File(popup) => popup.calculate_required_height(),
|
||||
ActivePopup::None => 1,
|
||||
};
|
||||
let [textarea_rect, _] =
|
||||
Layout::vertical([Constraint::Min(0), Constraint::Max(popup_height)]).areas(area);
|
||||
let mut textarea_rect = textarea_rect;
|
||||
textarea_rect.width = textarea_rect.width.saturating_sub(1);
|
||||
textarea_rect.x += 1;
|
||||
let state = self.textarea_state.borrow();
|
||||
self.textarea.cursor_pos_with_state(textarea_rect, &state)
|
||||
}
|
||||
|
||||
/// Returns true if the composer currently contains no user input.
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.textarea.is_empty()
|
||||
@@ -103,28 +129,10 @@ impl ChatComposer<'_> {
|
||||
token_usage: TokenUsage,
|
||||
model_context_window: Option<u64>,
|
||||
) {
|
||||
let placeholder = match (token_usage.total_tokens, model_context_window) {
|
||||
(total_tokens, Some(context_window)) => {
|
||||
let percent_remaining: u8 = if context_window > 0 {
|
||||
// Calculate the percentage of context left.
|
||||
let percent = 100.0 - (total_tokens as f32 / context_window as f32 * 100.0);
|
||||
percent.clamp(0.0, 100.0) as u8
|
||||
} else {
|
||||
// If we don't have a context window, we cannot compute the
|
||||
// percentage.
|
||||
100
|
||||
};
|
||||
// When https://github.com/openai/codex/issues/1257 is resolved,
|
||||
// check if `percent_remaining < 25`, and if so, recommend
|
||||
// /compact.
|
||||
format!("{BASE_PLACEHOLDER_TEXT} — {percent_remaining}% context left")
|
||||
}
|
||||
(total_tokens, None) => {
|
||||
format!("{BASE_PLACEHOLDER_TEXT} — {total_tokens} tokens used")
|
||||
}
|
||||
};
|
||||
|
||||
self.textarea.set_placeholder_text(placeholder);
|
||||
self.token_usage_info = Some(TokenUsageInfo {
|
||||
token_usage,
|
||||
model_context_window,
|
||||
});
|
||||
}
|
||||
|
||||
/// Record the history metadata advertised by `SessionConfiguredEvent` so
|
||||
@@ -142,8 +150,12 @@ impl ChatComposer<'_> {
|
||||
offset: usize,
|
||||
entry: Option<String>,
|
||||
) -> bool {
|
||||
self.history
|
||||
.on_entry_response(log_id, offset, entry, &mut self.textarea)
|
||||
let Some(text) = self.history.on_entry_response(log_id, offset, entry) else {
|
||||
return false;
|
||||
};
|
||||
self.textarea.set_text(&text);
|
||||
self.textarea.set_cursor(0);
|
||||
true
|
||||
}
|
||||
|
||||
pub fn handle_paste(&mut self, pasted: String) -> bool {
|
||||
@@ -179,7 +191,7 @@ impl ChatComposer<'_> {
|
||||
|
||||
pub fn set_ctrl_c_quit_hint(&mut self, show: bool, has_focus: bool) {
|
||||
self.ctrl_c_quit_hint = show;
|
||||
self.update_border(has_focus);
|
||||
self.set_has_focus(has_focus);
|
||||
}
|
||||
|
||||
/// Handle a key event coming from the main UI.
|
||||
@@ -207,49 +219,47 @@ impl ChatComposer<'_> {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
match key_event.into() {
|
||||
Input { key: Key::Up, .. } => {
|
||||
match key_event {
|
||||
KeyEvent {
|
||||
code: KeyCode::Up, ..
|
||||
} => {
|
||||
popup.move_up();
|
||||
(InputResult::None, true)
|
||||
}
|
||||
Input { key: Key::Down, .. } => {
|
||||
KeyEvent {
|
||||
code: KeyCode::Down,
|
||||
..
|
||||
} => {
|
||||
popup.move_down();
|
||||
(InputResult::None, true)
|
||||
}
|
||||
Input { key: Key::Tab, .. } => {
|
||||
KeyEvent {
|
||||
code: KeyCode::Tab, ..
|
||||
} => {
|
||||
if let Some(cmd) = popup.selected_command() {
|
||||
let first_line = self
|
||||
.textarea
|
||||
.lines()
|
||||
.first()
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("");
|
||||
let first_line = self.textarea.text().lines().next().unwrap_or("");
|
||||
|
||||
let starts_with_cmd = first_line
|
||||
.trim_start()
|
||||
.starts_with(&format!("/{}", cmd.command()));
|
||||
|
||||
if !starts_with_cmd {
|
||||
self.textarea.select_all();
|
||||
self.textarea.cut();
|
||||
let _ = self.textarea.insert_str(format!("/{} ", cmd.command()));
|
||||
self.textarea.set_text(&format!("/{} ", cmd.command()));
|
||||
}
|
||||
}
|
||||
(InputResult::None, true)
|
||||
}
|
||||
Input {
|
||||
key: Key::Enter,
|
||||
shift: false,
|
||||
alt: false,
|
||||
ctrl: false,
|
||||
KeyEvent {
|
||||
code: KeyCode::Enter,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
..
|
||||
} => {
|
||||
if let Some(cmd) = popup.selected_command() {
|
||||
// Send command to the app layer.
|
||||
self.app_event_tx.send(AppEvent::DispatchCommand(*cmd));
|
||||
|
||||
// Clear textarea so no residual text remains.
|
||||
self.textarea.select_all();
|
||||
self.textarea.cut();
|
||||
self.textarea.set_text("");
|
||||
|
||||
// Hide popup since the command has been dispatched.
|
||||
self.active_popup = ActivePopup::None;
|
||||
@@ -268,16 +278,23 @@ impl ChatComposer<'_> {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
match key_event.into() {
|
||||
Input { key: Key::Up, .. } => {
|
||||
match key_event {
|
||||
KeyEvent {
|
||||
code: KeyCode::Up, ..
|
||||
} => {
|
||||
popup.move_up();
|
||||
(InputResult::None, true)
|
||||
}
|
||||
Input { key: Key::Down, .. } => {
|
||||
KeyEvent {
|
||||
code: KeyCode::Down,
|
||||
..
|
||||
} => {
|
||||
popup.move_down();
|
||||
(InputResult::None, true)
|
||||
}
|
||||
Input { key: Key::Esc, .. } => {
|
||||
KeyEvent {
|
||||
code: KeyCode::Esc, ..
|
||||
} => {
|
||||
// Hide popup without modifying text, remember token to avoid immediate reopen.
|
||||
if let Some(tok) = Self::current_at_token(&self.textarea) {
|
||||
self.dismissed_file_popup_token = Some(tok.to_string());
|
||||
@@ -285,12 +302,13 @@ impl ChatComposer<'_> {
|
||||
self.active_popup = ActivePopup::None;
|
||||
(InputResult::None, true)
|
||||
}
|
||||
Input { key: Key::Tab, .. }
|
||||
| Input {
|
||||
key: Key::Enter,
|
||||
ctrl: false,
|
||||
alt: false,
|
||||
shift: false,
|
||||
KeyEvent {
|
||||
code: KeyCode::Tab, ..
|
||||
}
|
||||
| KeyEvent {
|
||||
code: KeyCode::Enter,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
..
|
||||
} => {
|
||||
if let Some(sel) = popup.selected_match() {
|
||||
let sel_path = sel.to_string();
|
||||
@@ -315,46 +333,89 @@ impl ChatComposer<'_> {
|
||||
/// - A token is delimited by ASCII whitespace (space, tab, newline).
|
||||
/// - If the token under the cursor starts with `@` and contains at least
|
||||
/// one additional character, that token (without `@`) is returned.
|
||||
fn current_at_token(textarea: &tui_textarea::TextArea) -> Option<String> {
|
||||
let (row, col) = textarea.cursor();
|
||||
fn current_at_token(textarea: &TextArea) -> Option<String> {
|
||||
let cursor_offset = textarea.cursor();
|
||||
let text = textarea.text();
|
||||
|
||||
// Guard against out-of-bounds rows.
|
||||
let line = textarea.lines().get(row)?.as_str();
|
||||
// Adjust the provided byte offset to the nearest valid char boundary at or before it.
|
||||
let mut safe_cursor = cursor_offset.min(text.len());
|
||||
// If we're not on a char boundary, move back to the start of the current char.
|
||||
if safe_cursor < text.len() && !text.is_char_boundary(safe_cursor) {
|
||||
// Find the last valid boundary <= cursor_offset.
|
||||
safe_cursor = text
|
||||
.char_indices()
|
||||
.map(|(i, _)| i)
|
||||
.take_while(|&i| i <= cursor_offset)
|
||||
.last()
|
||||
.unwrap_or(0);
|
||||
}
|
||||
|
||||
// Calculate byte offset for cursor position
|
||||
let cursor_byte_offset = line.chars().take(col).map(|c| c.len_utf8()).sum::<usize>();
|
||||
// Split the line around the (now safe) cursor position.
|
||||
let before_cursor = &text[..safe_cursor];
|
||||
let after_cursor = &text[safe_cursor..];
|
||||
|
||||
// Split the line at the cursor position so we can search for word
|
||||
// boundaries on both sides.
|
||||
let before_cursor = &line[..cursor_byte_offset];
|
||||
let after_cursor = &line[cursor_byte_offset..];
|
||||
// Detect whether we're on whitespace at the cursor boundary.
|
||||
let at_whitespace = if safe_cursor < text.len() {
|
||||
text[safe_cursor..]
|
||||
.chars()
|
||||
.next()
|
||||
.map(|c| c.is_whitespace())
|
||||
.unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// Find start index (first character **after** the previous multi-byte whitespace).
|
||||
let start_idx = before_cursor
|
||||
// Left candidate: token containing the cursor position.
|
||||
let start_left = before_cursor
|
||||
.char_indices()
|
||||
.rfind(|(_, c)| c.is_whitespace())
|
||||
.map(|(idx, c)| idx + c.len_utf8())
|
||||
.unwrap_or(0);
|
||||
|
||||
// Find end index (first multi-byte whitespace **after** the cursor position).
|
||||
let end_rel_idx = after_cursor
|
||||
let end_left_rel = after_cursor
|
||||
.char_indices()
|
||||
.find(|(_, c)| c.is_whitespace())
|
||||
.map(|(idx, _)| idx)
|
||||
.unwrap_or(after_cursor.len());
|
||||
let end_idx = cursor_byte_offset + end_rel_idx;
|
||||
|
||||
if start_idx >= end_idx {
|
||||
return None;
|
||||
}
|
||||
|
||||
let token = &line[start_idx..end_idx];
|
||||
|
||||
if token.starts_with('@') && token.len() > 1 {
|
||||
Some(token[1..].to_string())
|
||||
let end_left = safe_cursor + end_left_rel;
|
||||
let token_left = if start_left < end_left {
|
||||
Some(&text[start_left..end_left])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Right candidate: token immediately after any whitespace from the cursor.
|
||||
let ws_len_right: usize = after_cursor
|
||||
.chars()
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.map(|c| c.len_utf8())
|
||||
.sum();
|
||||
let start_right = safe_cursor + ws_len_right;
|
||||
let end_right_rel = text[start_right..]
|
||||
.char_indices()
|
||||
.find(|(_, c)| c.is_whitespace())
|
||||
.map(|(idx, _)| idx)
|
||||
.unwrap_or(text.len() - start_right);
|
||||
let end_right = start_right + end_right_rel;
|
||||
let token_right = if start_right < end_right {
|
||||
Some(&text[start_right..end_right])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let left_at = token_left
|
||||
.filter(|t| t.starts_with('@') && t.len() > 1)
|
||||
.map(|t| t[1..].to_string());
|
||||
let right_at = token_right
|
||||
.filter(|t| t.starts_with('@') && t.len() > 1)
|
||||
.map(|t| t[1..].to_string());
|
||||
|
||||
if at_whitespace {
|
||||
return right_at.or(left_at);
|
||||
}
|
||||
if after_cursor.starts_with('@') {
|
||||
return right_at.or(left_at);
|
||||
}
|
||||
left_at.or(right_at)
|
||||
}
|
||||
|
||||
/// Replace the active `@token` (the one under the cursor) with `path`.
|
||||
@@ -363,94 +424,73 @@ impl ChatComposer<'_> {
|
||||
/// where the cursor is within the token and regardless of how many
|
||||
/// `@tokens` exist in the line.
|
||||
fn insert_selected_path(&mut self, path: &str) {
|
||||
let (row, col) = self.textarea.cursor();
|
||||
let cursor_offset = self.textarea.cursor();
|
||||
let text = self.textarea.text();
|
||||
|
||||
// Materialize the textarea lines so we can mutate them easily.
|
||||
let mut lines: Vec<String> = self.textarea.lines().to_vec();
|
||||
let before_cursor = &text[..cursor_offset];
|
||||
let after_cursor = &text[cursor_offset..];
|
||||
|
||||
if let Some(line) = lines.get_mut(row) {
|
||||
// Calculate byte offset for cursor position
|
||||
let cursor_byte_offset = line.chars().take(col).map(|c| c.len_utf8()).sum::<usize>();
|
||||
// Determine token boundaries.
|
||||
let start_idx = before_cursor
|
||||
.char_indices()
|
||||
.rfind(|(_, c)| c.is_whitespace())
|
||||
.map(|(idx, c)| idx + c.len_utf8())
|
||||
.unwrap_or(0);
|
||||
|
||||
let before_cursor = &line[..cursor_byte_offset];
|
||||
let after_cursor = &line[cursor_byte_offset..];
|
||||
let end_rel_idx = after_cursor
|
||||
.char_indices()
|
||||
.find(|(_, c)| c.is_whitespace())
|
||||
.map(|(idx, _)| idx)
|
||||
.unwrap_or(after_cursor.len());
|
||||
let end_idx = cursor_offset + end_rel_idx;
|
||||
|
||||
// Determine token boundaries.
|
||||
let start_idx = before_cursor
|
||||
.char_indices()
|
||||
.rfind(|(_, c)| c.is_whitespace())
|
||||
.map(|(idx, c)| idx + c.len_utf8())
|
||||
.unwrap_or(0);
|
||||
// Replace the slice `[start_idx, end_idx)` with the chosen path and a trailing space.
|
||||
let mut new_text =
|
||||
String::with_capacity(text.len() - (end_idx - start_idx) + path.len() + 1);
|
||||
new_text.push_str(&text[..start_idx]);
|
||||
new_text.push_str(path);
|
||||
new_text.push(' ');
|
||||
new_text.push_str(&text[end_idx..]);
|
||||
|
||||
let end_rel_idx = after_cursor
|
||||
.char_indices()
|
||||
.find(|(_, c)| c.is_whitespace())
|
||||
.map(|(idx, _)| idx)
|
||||
.unwrap_or(after_cursor.len());
|
||||
let end_idx = cursor_byte_offset + end_rel_idx;
|
||||
|
||||
// Replace the slice `[start_idx, end_idx)` with the chosen path and a trailing space.
|
||||
let mut new_line =
|
||||
String::with_capacity(line.len() - (end_idx - start_idx) + path.len() + 1);
|
||||
new_line.push_str(&line[..start_idx]);
|
||||
new_line.push_str(path);
|
||||
new_line.push(' ');
|
||||
new_line.push_str(&line[end_idx..]);
|
||||
|
||||
*line = new_line;
|
||||
|
||||
// Re-populate the textarea.
|
||||
let new_text = lines.join("\n");
|
||||
self.textarea.select_all();
|
||||
self.textarea.cut();
|
||||
let _ = self.textarea.insert_str(new_text);
|
||||
|
||||
// Note: tui-textarea currently exposes only relative cursor
|
||||
// movements. Leaving the cursor position unchanged is acceptable
|
||||
// as subsequent typing will move the cursor naturally.
|
||||
}
|
||||
self.textarea.set_text(&new_text);
|
||||
}
|
||||
|
||||
/// Handle key event when no popup is visible.
|
||||
fn handle_key_event_without_popup(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
|
||||
let input: Input = key_event.into();
|
||||
match input {
|
||||
match key_event {
|
||||
// -------------------------------------------------------------
|
||||
// History navigation (Up / Down) – only when the composer is not
|
||||
// empty or when the cursor is at the correct position, to avoid
|
||||
// interfering with normal cursor movement.
|
||||
// -------------------------------------------------------------
|
||||
Input { key: Key::Up, .. } => {
|
||||
if self.history.should_handle_navigation(&self.textarea) {
|
||||
let consumed = self
|
||||
.history
|
||||
.navigate_up(&mut self.textarea, &self.app_event_tx);
|
||||
if consumed {
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
}
|
||||
self.handle_input_basic(input)
|
||||
}
|
||||
Input { key: Key::Down, .. } => {
|
||||
if self.history.should_handle_navigation(&self.textarea) {
|
||||
let consumed = self
|
||||
.history
|
||||
.navigate_down(&mut self.textarea, &self.app_event_tx);
|
||||
if consumed {
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
}
|
||||
self.handle_input_basic(input)
|
||||
}
|
||||
Input {
|
||||
key: Key::Enter,
|
||||
shift: false,
|
||||
alt: false,
|
||||
ctrl: false,
|
||||
KeyEvent {
|
||||
code: KeyCode::Up | KeyCode::Down,
|
||||
..
|
||||
} => {
|
||||
let mut text = self.textarea.lines().join("\n");
|
||||
self.textarea.select_all();
|
||||
self.textarea.cut();
|
||||
if self
|
||||
.history
|
||||
.should_handle_navigation(self.textarea.text(), self.textarea.cursor())
|
||||
{
|
||||
let replace_text = match key_event.code {
|
||||
KeyCode::Up => self.history.navigate_up(&self.app_event_tx),
|
||||
KeyCode::Down => self.history.navigate_down(&self.app_event_tx),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
if let Some(text) = replace_text {
|
||||
self.textarea.set_text(&text);
|
||||
self.textarea.set_cursor(0);
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
}
|
||||
self.handle_input_basic(key_event)
|
||||
}
|
||||
KeyEvent {
|
||||
code: KeyCode::Enter,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
..
|
||||
} => {
|
||||
let mut text = self.textarea.text().to_string();
|
||||
self.textarea.set_text("");
|
||||
|
||||
// Replace all pending pastes in the text
|
||||
for (placeholder, actual) in &self.pending_pastes {
|
||||
@@ -467,41 +507,15 @@ impl ChatComposer<'_> {
|
||||
(InputResult::Submitted(text), true)
|
||||
}
|
||||
}
|
||||
Input {
|
||||
key: Key::Enter, ..
|
||||
}
|
||||
| Input {
|
||||
key: Key::Char('j'),
|
||||
ctrl: true,
|
||||
alt: false,
|
||||
shift: false,
|
||||
} => {
|
||||
self.textarea.insert_newline();
|
||||
(InputResult::None, true)
|
||||
}
|
||||
Input {
|
||||
key: Key::Char('d'),
|
||||
ctrl: true,
|
||||
alt: false,
|
||||
shift: false,
|
||||
} => {
|
||||
self.textarea.input(Input {
|
||||
key: Key::Delete,
|
||||
ctrl: false,
|
||||
alt: false,
|
||||
shift: false,
|
||||
});
|
||||
(InputResult::None, true)
|
||||
}
|
||||
input => self.handle_input_basic(input),
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle generic Input events that modify the textarea content.
|
||||
fn handle_input_basic(&mut self, input: Input) -> (InputResult, bool) {
|
||||
fn handle_input_basic(&mut self, input: KeyEvent) -> (InputResult, bool) {
|
||||
// Special handling for backspace on placeholders
|
||||
if let Input {
|
||||
key: Key::Backspace,
|
||||
if let KeyEvent {
|
||||
code: KeyCode::Backspace,
|
||||
..
|
||||
} = input
|
||||
{
|
||||
@@ -510,20 +524,9 @@ impl ChatComposer<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Input {
|
||||
key: Key::Char('u'),
|
||||
ctrl: true,
|
||||
alt: false,
|
||||
..
|
||||
} = input
|
||||
{
|
||||
self.textarea.delete_line_by_head();
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
|
||||
// Normal input handling
|
||||
self.textarea.input(input);
|
||||
let text_after = self.textarea.lines().join("\n");
|
||||
let text_after = self.textarea.text();
|
||||
|
||||
// Check if any placeholders were removed and remove their corresponding pending pastes
|
||||
self.pending_pastes
|
||||
@@ -535,21 +538,16 @@ impl ChatComposer<'_> {
|
||||
/// Attempts to remove a placeholder if the cursor is at the end of one.
|
||||
/// Returns true if a placeholder was removed.
|
||||
fn try_remove_placeholder_at_cursor(&mut self) -> bool {
|
||||
let (row, col) = self.textarea.cursor();
|
||||
let line = self
|
||||
.textarea
|
||||
.lines()
|
||||
.get(row)
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("");
|
||||
let p = self.textarea.cursor();
|
||||
let text = self.textarea.text();
|
||||
|
||||
// Find any placeholder that ends at the cursor position
|
||||
let placeholder_to_remove = self.pending_pastes.iter().find_map(|(ph, _)| {
|
||||
if col < ph.len() {
|
||||
if p < ph.len() {
|
||||
return None;
|
||||
}
|
||||
let potential_ph_start = col - ph.len();
|
||||
if line[potential_ph_start..col] == *ph {
|
||||
let potential_ph_start = p - ph.len();
|
||||
if text[potential_ph_start..p] == *ph {
|
||||
Some(ph.clone())
|
||||
} else {
|
||||
None
|
||||
@@ -557,17 +555,7 @@ impl ChatComposer<'_> {
|
||||
});
|
||||
|
||||
if let Some(placeholder) = placeholder_to_remove {
|
||||
// Remove the entire placeholder from the text
|
||||
let placeholder_len = placeholder.len();
|
||||
for _ in 0..placeholder_len {
|
||||
self.textarea.input(Input {
|
||||
key: Key::Backspace,
|
||||
ctrl: false,
|
||||
alt: false,
|
||||
shift: false,
|
||||
});
|
||||
}
|
||||
// Remove from pending pastes
|
||||
self.textarea.replace_range(p - placeholder.len()..p, "");
|
||||
self.pending_pastes.retain(|(ph, _)| ph != &placeholder);
|
||||
true
|
||||
} else {
|
||||
@@ -579,16 +567,7 @@ impl ChatComposer<'_> {
|
||||
/// textarea. This must be called after every modification that can change
|
||||
/// the text so the popup is shown/updated/hidden as appropriate.
|
||||
fn sync_command_popup(&mut self) {
|
||||
// Inspect only the first line to decide whether to show the popup. In
|
||||
// the common case (no leading slash) we avoid copying the entire
|
||||
// textarea contents.
|
||||
let first_line = self
|
||||
.textarea
|
||||
.lines()
|
||||
.first()
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
let first_line = self.textarea.text().lines().next().unwrap_or("");
|
||||
let input_starts_with_slash = first_line.starts_with('/');
|
||||
match &mut self.active_popup {
|
||||
ActivePopup::Command(popup) => {
|
||||
@@ -644,74 +623,29 @@ impl ChatComposer<'_> {
|
||||
self.dismissed_file_popup_token = None;
|
||||
}
|
||||
|
||||
fn update_border(&mut self, has_focus: bool) {
|
||||
let border_style = if has_focus {
|
||||
Style::default().fg(Color::Cyan)
|
||||
} else {
|
||||
Style::default().dim()
|
||||
};
|
||||
|
||||
self.textarea.set_block(
|
||||
ratatui::widgets::Block::default()
|
||||
.borders(Borders::LEFT)
|
||||
.border_type(BorderType::QuadrantOutside)
|
||||
.border_style(border_style),
|
||||
);
|
||||
fn set_has_focus(&mut self, has_focus: bool) {
|
||||
self.has_focus = has_focus;
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for &ChatComposer<'_> {
|
||||
impl WidgetRef for &ChatComposer {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
let popup_height = match &self.active_popup {
|
||||
ActivePopup::Command(popup) => popup.calculate_required_height(),
|
||||
ActivePopup::File(popup) => popup.calculate_required_height(),
|
||||
ActivePopup::None => 1,
|
||||
};
|
||||
let [textarea_rect, popup_rect] =
|
||||
Layout::vertical([Constraint::Min(0), Constraint::Max(popup_height)]).areas(area);
|
||||
match &self.active_popup {
|
||||
ActivePopup::Command(popup) => {
|
||||
let popup_height = popup.calculate_required_height();
|
||||
|
||||
// Split the provided rect so that the popup is rendered at the
|
||||
// **bottom** and the textarea occupies the remaining space above.
|
||||
let popup_height = popup_height.min(area.height);
|
||||
let textarea_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y,
|
||||
width: area.width,
|
||||
height: area.height.saturating_sub(popup_height),
|
||||
};
|
||||
let popup_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y + textarea_rect.height,
|
||||
width: area.width,
|
||||
height: popup_height,
|
||||
};
|
||||
|
||||
popup.render(popup_rect, buf);
|
||||
self.textarea.render(textarea_rect, buf);
|
||||
popup.render_ref(popup_rect, buf);
|
||||
}
|
||||
ActivePopup::File(popup) => {
|
||||
let popup_height = popup.calculate_required_height();
|
||||
|
||||
let popup_height = popup_height.min(area.height);
|
||||
let textarea_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y,
|
||||
width: area.width,
|
||||
height: area.height.saturating_sub(popup_height),
|
||||
};
|
||||
let popup_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y + textarea_rect.height,
|
||||
width: area.width,
|
||||
height: popup_height,
|
||||
};
|
||||
|
||||
popup.render(popup_rect, buf);
|
||||
self.textarea.render(textarea_rect, buf);
|
||||
popup.render_ref(popup_rect, buf);
|
||||
}
|
||||
ActivePopup::None => {
|
||||
let mut textarea_rect = area;
|
||||
textarea_rect.height = textarea_rect.height.saturating_sub(1);
|
||||
self.textarea.render(textarea_rect, buf);
|
||||
let mut bottom_line_rect = area;
|
||||
bottom_line_rect.y += textarea_rect.height;
|
||||
bottom_line_rect.height = 1;
|
||||
let bottom_line_rect = popup_rect;
|
||||
let key_hint_style = Style::default().fg(Color::Cyan);
|
||||
let hint = if self.ctrl_c_quit_hint {
|
||||
vec![
|
||||
@@ -740,6 +674,56 @@ impl WidgetRef for &ChatComposer<'_> {
|
||||
.render_ref(bottom_line_rect, buf);
|
||||
}
|
||||
}
|
||||
Block::default()
|
||||
.border_style(Style::default().dim())
|
||||
.borders(Borders::LEFT)
|
||||
.border_type(BorderType::QuadrantOutside)
|
||||
.border_style(Style::default().fg(if self.has_focus {
|
||||
Color::Cyan
|
||||
} else {
|
||||
Color::Gray
|
||||
}))
|
||||
.render_ref(
|
||||
Rect::new(textarea_rect.x, textarea_rect.y, 1, textarea_rect.height),
|
||||
buf,
|
||||
);
|
||||
let mut textarea_rect = textarea_rect;
|
||||
textarea_rect.width = textarea_rect.width.saturating_sub(1);
|
||||
textarea_rect.x += 1;
|
||||
let mut state = self.textarea_state.borrow_mut();
|
||||
StatefulWidgetRef::render_ref(&(&self.textarea), textarea_rect, buf, &mut state);
|
||||
if self.textarea.text().is_empty() {
|
||||
let placeholder = if let Some(token_usage_info) = &self.token_usage_info {
|
||||
let token_usage = &token_usage_info.token_usage;
|
||||
let model_context_window = token_usage_info.model_context_window;
|
||||
match (token_usage.total_tokens, model_context_window) {
|
||||
(total_tokens, Some(context_window)) => {
|
||||
let percent_remaining: u8 = if context_window > 0 {
|
||||
// Calculate the percentage of context left.
|
||||
let percent =
|
||||
100.0 - (total_tokens as f32 / context_window as f32 * 100.0);
|
||||
percent.clamp(0.0, 100.0) as u8
|
||||
} else {
|
||||
// If we don't have a context window, we cannot compute the
|
||||
// percentage.
|
||||
100
|
||||
};
|
||||
// When https://github.com/openai/codex/issues/1257 is resolved,
|
||||
// check if `percent_remaining < 25`, and if so, recommend
|
||||
// /compact.
|
||||
format!("{BASE_PLACEHOLDER_TEXT} — {percent_remaining}% context left")
|
||||
}
|
||||
(total_tokens, None) => {
|
||||
format!("{BASE_PLACEHOLDER_TEXT} — {total_tokens} tokens used")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
BASE_PLACEHOLDER_TEXT.to_string()
|
||||
};
|
||||
Line::from(placeholder)
|
||||
.style(Style::default().dim())
|
||||
.render_ref(textarea_rect.inner(Margin::new(1, 0)), buf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -749,7 +733,7 @@ mod tests {
|
||||
use crate::bottom_pane::ChatComposer;
|
||||
use crate::bottom_pane::InputResult;
|
||||
use crate::bottom_pane::chat_composer::LARGE_PASTE_CHAR_THRESHOLD;
|
||||
use tui_textarea::TextArea;
|
||||
use crate::bottom_pane::textarea::TextArea;
|
||||
|
||||
#[test]
|
||||
fn test_current_at_token_basic_cases() {
|
||||
@@ -792,9 +776,9 @@ mod tests {
|
||||
];
|
||||
|
||||
for (input, cursor_pos, expected, description) in test_cases {
|
||||
let mut textarea = TextArea::default();
|
||||
let mut textarea = TextArea::new();
|
||||
textarea.insert_str(input);
|
||||
textarea.move_cursor(tui_textarea::CursorMove::Jump(0, cursor_pos));
|
||||
textarea.set_cursor(cursor_pos);
|
||||
|
||||
let result = ChatComposer::current_at_token(&textarea);
|
||||
assert_eq!(
|
||||
@@ -826,9 +810,9 @@ mod tests {
|
||||
];
|
||||
|
||||
for (input, cursor_pos, expected, description) in test_cases {
|
||||
let mut textarea = TextArea::default();
|
||||
let mut textarea = TextArea::new();
|
||||
textarea.insert_str(input);
|
||||
textarea.move_cursor(tui_textarea::CursorMove::Jump(0, cursor_pos));
|
||||
textarea.set_cursor(cursor_pos);
|
||||
|
||||
let result = ChatComposer::current_at_token(&textarea);
|
||||
assert_eq!(
|
||||
@@ -863,13 +847,13 @@ mod tests {
|
||||
// Full-width space boundaries
|
||||
(
|
||||
"test @İstanbul",
|
||||
6,
|
||||
8,
|
||||
Some("İstanbul".to_string()),
|
||||
"@ token after full-width space",
|
||||
),
|
||||
(
|
||||
"@ЙЦУ @诶",
|
||||
6,
|
||||
10,
|
||||
Some("诶".to_string()),
|
||||
"Full-width space between Unicode tokens",
|
||||
),
|
||||
@@ -883,9 +867,9 @@ mod tests {
|
||||
];
|
||||
|
||||
for (input, cursor_pos, expected, description) in test_cases {
|
||||
let mut textarea = TextArea::default();
|
||||
let mut textarea = TextArea::new();
|
||||
textarea.insert_str(input);
|
||||
textarea.move_cursor(tui_textarea::CursorMove::Jump(0, cursor_pos));
|
||||
textarea.set_cursor(cursor_pos);
|
||||
|
||||
let result = ChatComposer::current_at_token(&textarea);
|
||||
assert_eq!(
|
||||
@@ -907,7 +891,7 @@ mod tests {
|
||||
|
||||
let needs_redraw = composer.handle_paste("hello".to_string());
|
||||
assert!(needs_redraw);
|
||||
assert_eq!(composer.textarea.lines(), ["hello"]);
|
||||
assert_eq!(composer.textarea.text(), "hello");
|
||||
assert!(composer.pending_pastes.is_empty());
|
||||
|
||||
let (result, _) =
|
||||
@@ -932,7 +916,7 @@ mod tests {
|
||||
let needs_redraw = composer.handle_paste(large.clone());
|
||||
assert!(needs_redraw);
|
||||
let placeholder = format!("[Pasted Content {} chars]", large.chars().count());
|
||||
assert_eq!(composer.textarea.lines(), [placeholder.as_str()]);
|
||||
assert_eq!(composer.textarea.text(), placeholder);
|
||||
assert_eq!(composer.pending_pastes.len(), 1);
|
||||
assert_eq!(composer.pending_pastes[0].0, placeholder);
|
||||
assert_eq!(composer.pending_pastes[0].1, large);
|
||||
@@ -1008,7 +992,7 @@ mod tests {
|
||||
composer.handle_paste("b".repeat(LARGE_PASTE_CHAR_THRESHOLD + 4));
|
||||
composer.handle_paste("c".repeat(LARGE_PASTE_CHAR_THRESHOLD + 6));
|
||||
// Move cursor to end and press backspace
|
||||
composer.textarea.move_cursor(tui_textarea::CursorMove::End);
|
||||
composer.textarea.set_cursor(composer.textarea.text().len());
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
}
|
||||
|
||||
@@ -1123,7 +1107,7 @@ mod tests {
|
||||
current_pos += content.len();
|
||||
}
|
||||
(
|
||||
composer.textarea.lines().join("\n"),
|
||||
composer.textarea.text().to_string(),
|
||||
composer.pending_pastes.len(),
|
||||
current_pos,
|
||||
)
|
||||
@@ -1134,25 +1118,18 @@ mod tests {
|
||||
let mut deletion_states = vec![];
|
||||
|
||||
// First deletion
|
||||
composer
|
||||
.textarea
|
||||
.move_cursor(tui_textarea::CursorMove::Jump(0, states[0].2 as u16));
|
||||
composer.textarea.set_cursor(states[0].2);
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
deletion_states.push((
|
||||
composer.textarea.lines().join("\n"),
|
||||
composer.textarea.text().to_string(),
|
||||
composer.pending_pastes.len(),
|
||||
));
|
||||
|
||||
// Second deletion
|
||||
composer
|
||||
.textarea
|
||||
.move_cursor(tui_textarea::CursorMove::Jump(
|
||||
0,
|
||||
composer.textarea.lines().join("\n").len() as u16,
|
||||
));
|
||||
composer.textarea.set_cursor(composer.textarea.text().len());
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
deletion_states.push((
|
||||
composer.textarea.lines().join("\n"),
|
||||
composer.textarea.text().to_string(),
|
||||
composer.pending_pastes.len(),
|
||||
));
|
||||
|
||||
@@ -1191,17 +1168,13 @@ mod tests {
|
||||
composer.handle_paste(paste.clone());
|
||||
composer
|
||||
.textarea
|
||||
.move_cursor(tui_textarea::CursorMove::Jump(
|
||||
0,
|
||||
(placeholder.len() - pos_from_end) as u16,
|
||||
));
|
||||
.set_cursor((placeholder.len() - pos_from_end) as usize);
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
let result = (
|
||||
composer.textarea.lines().join("\n").contains(&placeholder),
|
||||
composer.textarea.text().contains(&placeholder),
|
||||
composer.pending_pastes.len(),
|
||||
);
|
||||
composer.textarea.select_all();
|
||||
composer.textarea.cut();
|
||||
composer.textarea.set_text("");
|
||||
result
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use tui_textarea::CursorMove;
|
||||
use tui_textarea::TextArea;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use codex_core::protocol::Op;
|
||||
@@ -67,59 +64,52 @@ impl ChatComposerHistory {
|
||||
|
||||
/// Should Up/Down key presses be interpreted as history navigation given
|
||||
/// the current content and cursor position of `textarea`?
|
||||
pub fn should_handle_navigation(&self, textarea: &TextArea) -> bool {
|
||||
pub fn should_handle_navigation(&self, text: &str, cursor: usize) -> bool {
|
||||
if self.history_entry_count == 0 && self.local_history.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if textarea.is_empty() {
|
||||
if text.is_empty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Textarea is not empty – only navigate when cursor is at start and
|
||||
// text matches last recalled history entry so regular editing is not
|
||||
// hijacked.
|
||||
let (row, col) = textarea.cursor();
|
||||
if row != 0 || col != 0 {
|
||||
if cursor != 0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
let lines = textarea.lines();
|
||||
matches!(&self.last_history_text, Some(prev) if prev == &lines.join("\n"))
|
||||
matches!(&self.last_history_text, Some(prev) if prev == text)
|
||||
}
|
||||
|
||||
/// Handle <Up>. Returns true when the key was consumed and the caller
|
||||
/// should request a redraw.
|
||||
pub fn navigate_up(&mut self, textarea: &mut TextArea, app_event_tx: &AppEventSender) -> bool {
|
||||
pub fn navigate_up(&mut self, app_event_tx: &AppEventSender) -> Option<String> {
|
||||
let total_entries = self.history_entry_count + self.local_history.len();
|
||||
if total_entries == 0 {
|
||||
return false;
|
||||
return None;
|
||||
}
|
||||
|
||||
let next_idx = match self.history_cursor {
|
||||
None => (total_entries as isize) - 1,
|
||||
Some(0) => return true, // already at oldest
|
||||
Some(0) => return None, // already at oldest
|
||||
Some(idx) => idx - 1,
|
||||
};
|
||||
|
||||
self.history_cursor = Some(next_idx);
|
||||
self.populate_history_at_index(next_idx as usize, textarea, app_event_tx);
|
||||
true
|
||||
self.populate_history_at_index(next_idx as usize, app_event_tx)
|
||||
}
|
||||
|
||||
/// Handle <Down>.
|
||||
pub fn navigate_down(
|
||||
&mut self,
|
||||
textarea: &mut TextArea,
|
||||
app_event_tx: &AppEventSender,
|
||||
) -> bool {
|
||||
pub fn navigate_down(&mut self, app_event_tx: &AppEventSender) -> Option<String> {
|
||||
let total_entries = self.history_entry_count + self.local_history.len();
|
||||
if total_entries == 0 {
|
||||
return false;
|
||||
return None;
|
||||
}
|
||||
|
||||
let next_idx_opt = match self.history_cursor {
|
||||
None => return false, // not browsing
|
||||
None => return None, // not browsing
|
||||
Some(idx) if (idx as usize) + 1 >= total_entries => None,
|
||||
Some(idx) => Some(idx + 1),
|
||||
};
|
||||
@@ -127,16 +117,15 @@ impl ChatComposerHistory {
|
||||
match next_idx_opt {
|
||||
Some(idx) => {
|
||||
self.history_cursor = Some(idx);
|
||||
self.populate_history_at_index(idx as usize, textarea, app_event_tx);
|
||||
self.populate_history_at_index(idx as usize, app_event_tx)
|
||||
}
|
||||
None => {
|
||||
// Past newest – clear and exit browsing mode.
|
||||
self.history_cursor = None;
|
||||
self.last_history_text = None;
|
||||
self.replace_textarea_content(textarea, "");
|
||||
Some(String::new())
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
/// Integrate a GetHistoryEntryResponse event.
|
||||
@@ -145,19 +134,18 @@ impl ChatComposerHistory {
|
||||
log_id: u64,
|
||||
offset: usize,
|
||||
entry: Option<String>,
|
||||
textarea: &mut TextArea,
|
||||
) -> bool {
|
||||
) -> Option<String> {
|
||||
if self.history_log_id != Some(log_id) {
|
||||
return false;
|
||||
return None;
|
||||
}
|
||||
let Some(text) = entry else { return false };
|
||||
let text = entry?;
|
||||
self.fetched_history.insert(offset, text.clone());
|
||||
|
||||
if self.history_cursor == Some(offset as isize) {
|
||||
self.replace_textarea_content(textarea, &text);
|
||||
return true;
|
||||
self.last_history_text = Some(text.clone());
|
||||
return Some(text);
|
||||
}
|
||||
false
|
||||
None
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
@@ -167,21 +155,20 @@ impl ChatComposerHistory {
|
||||
fn populate_history_at_index(
|
||||
&mut self,
|
||||
global_idx: usize,
|
||||
textarea: &mut TextArea,
|
||||
app_event_tx: &AppEventSender,
|
||||
) {
|
||||
) -> Option<String> {
|
||||
if global_idx >= self.history_entry_count {
|
||||
// Local entry.
|
||||
if let Some(text) = self
|
||||
.local_history
|
||||
.get(global_idx - self.history_entry_count)
|
||||
{
|
||||
let t = text.clone();
|
||||
self.replace_textarea_content(textarea, &t);
|
||||
self.last_history_text = Some(text.clone());
|
||||
return Some(text.clone());
|
||||
}
|
||||
} else if let Some(text) = self.fetched_history.get(&global_idx) {
|
||||
let t = text.clone();
|
||||
self.replace_textarea_content(textarea, &t);
|
||||
self.last_history_text = Some(text.clone());
|
||||
return Some(text.clone());
|
||||
} else if let Some(log_id) = self.history_log_id {
|
||||
let op = Op::GetHistoryEntryRequest {
|
||||
offset: global_idx,
|
||||
@@ -189,14 +176,7 @@ impl ChatComposerHistory {
|
||||
};
|
||||
app_event_tx.send(AppEvent::CodexOp(op));
|
||||
}
|
||||
}
|
||||
|
||||
fn replace_textarea_content(&mut self, textarea: &mut TextArea, text: &str) {
|
||||
textarea.select_all();
|
||||
textarea.cut();
|
||||
let _ = textarea.insert_str(text);
|
||||
textarea.move_cursor(CursorMove::Jump(0, 0));
|
||||
self.last_history_text = Some(text.to_string());
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -217,11 +197,9 @@ mod tests {
|
||||
// Pretend there are 3 persistent entries.
|
||||
history.set_metadata(1, 3);
|
||||
|
||||
let mut textarea = TextArea::default();
|
||||
|
||||
// First Up should request offset 2 (latest) and await async data.
|
||||
assert!(history.should_handle_navigation(&textarea));
|
||||
assert!(history.navigate_up(&mut textarea, &tx));
|
||||
assert!(history.should_handle_navigation("", 0));
|
||||
assert!(history.navigate_up(&tx).is_none()); // don't replace the text yet
|
||||
|
||||
// Verify that an AppEvent::CodexOp with the correct GetHistoryEntryRequest was sent.
|
||||
let event = rx.try_recv().expect("expected AppEvent to be sent");
|
||||
@@ -235,14 +213,15 @@ mod tests {
|
||||
},
|
||||
history_request1
|
||||
);
|
||||
assert_eq!(textarea.lines().join("\n"), ""); // still empty
|
||||
|
||||
// Inject the async response.
|
||||
assert!(history.on_entry_response(1, 2, Some("latest".into()), &mut textarea));
|
||||
assert_eq!(textarea.lines().join("\n"), "latest");
|
||||
assert_eq!(
|
||||
Some("latest".into()),
|
||||
history.on_entry_response(1, 2, Some("latest".into()))
|
||||
);
|
||||
|
||||
// Next Up should move to offset 1.
|
||||
assert!(history.navigate_up(&mut textarea, &tx));
|
||||
assert!(history.navigate_up(&tx).is_none()); // don't replace the text yet
|
||||
|
||||
// Verify second CodexOp event for offset 1.
|
||||
let event2 = rx.try_recv().expect("expected second event");
|
||||
@@ -257,7 +236,9 @@ mod tests {
|
||||
history_request_2
|
||||
);
|
||||
|
||||
history.on_entry_response(1, 1, Some("older".into()), &mut textarea);
|
||||
assert_eq!(textarea.lines().join("\n"), "older");
|
||||
assert_eq!(
|
||||
Some("older".into()),
|
||||
history.on_entry_response(1, 1, Some("older".into()))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
45
codex-rs/tui/src/bottom_pane/live_ring_widget.rs
Normal file
45
codex-rs/tui/src/bottom_pane/live_ring_widget.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
|
||||
/// Minimal rendering-only widget for the transient ring rows.
|
||||
pub(crate) struct LiveRingWidget {
|
||||
max_rows: u16,
|
||||
rows: Vec<Line<'static>>, // newest at the end
|
||||
}
|
||||
|
||||
impl LiveRingWidget {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
max_rows: 3,
|
||||
rows: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_max_rows(&mut self, n: u16) {
|
||||
self.max_rows = n.max(1);
|
||||
}
|
||||
|
||||
pub fn set_rows(&mut self, rows: Vec<Line<'static>>) {
|
||||
self.rows = rows;
|
||||
}
|
||||
|
||||
pub fn desired_height(&self, _width: u16) -> u16 {
|
||||
let len = self.rows.len() as u16;
|
||||
len.min(self.max_rows)
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for LiveRingWidget {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
if area.height == 0 {
|
||||
return;
|
||||
}
|
||||
let visible = self.rows.len().saturating_sub(self.max_rows as usize);
|
||||
let slice = &self.rows[visible..];
|
||||
let para = Paragraph::new(slice.to_vec());
|
||||
para.render_ref(area, buf);
|
||||
}
|
||||
}
|
||||
@@ -4,12 +4,12 @@ use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::user_approval_widget::ApprovalRequest;
|
||||
use bottom_pane_view::BottomPaneView;
|
||||
use bottom_pane_view::ConditionalUpdate;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_file_search::FileMatch;
|
||||
use crossterm::event::KeyEvent;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
|
||||
mod approval_modal_view;
|
||||
@@ -18,7 +18,9 @@ mod chat_composer;
|
||||
mod chat_composer_history;
|
||||
mod command_popup;
|
||||
mod file_search_popup;
|
||||
mod live_ring_widget;
|
||||
mod status_indicator_view;
|
||||
mod textarea;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum CancellationEvent {
|
||||
@@ -29,6 +31,7 @@ pub(crate) enum CancellationEvent {
|
||||
pub(crate) use chat_composer::ChatComposer;
|
||||
pub(crate) use chat_composer::InputResult;
|
||||
|
||||
use crate::status_indicator_widget::StatusIndicatorWidget;
|
||||
use approval_modal_view::ApprovalModalView;
|
||||
use status_indicator_view::StatusIndicatorView;
|
||||
|
||||
@@ -36,7 +39,7 @@ use status_indicator_view::StatusIndicatorView;
|
||||
pub(crate) struct BottomPane<'a> {
|
||||
/// Composer is retained even when a BottomPaneView is displayed so the
|
||||
/// input state is retained when the view is closed.
|
||||
composer: ChatComposer<'a>,
|
||||
composer: ChatComposer,
|
||||
|
||||
/// If present, this is displayed instead of the `composer`.
|
||||
active_view: Option<Box<dyn BottomPaneView<'a> + 'a>>,
|
||||
@@ -45,6 +48,19 @@ pub(crate) struct BottomPane<'a> {
|
||||
has_input_focus: bool,
|
||||
is_task_running: bool,
|
||||
ctrl_c_quit_hint: bool,
|
||||
|
||||
/// Optional live, multi‑line status/"live cell" rendered directly above
|
||||
/// the composer while a task is running. Unlike `active_view`, this does
|
||||
/// not replace the composer; it augments it.
|
||||
live_status: Option<StatusIndicatorWidget>,
|
||||
|
||||
/// Optional transient ring shown above the composer. This is a rendering-only
|
||||
/// container used during development before we wire it to ChatWidget events.
|
||||
live_ring: Option<live_ring_widget::LiveRingWidget>,
|
||||
|
||||
/// True if the active view is the StatusIndicatorView that replaces the
|
||||
/// composer during a running task.
|
||||
status_view_active: bool,
|
||||
}
|
||||
|
||||
pub(crate) struct BottomPaneParams {
|
||||
@@ -54,6 +70,7 @@ pub(crate) struct BottomPaneParams {
|
||||
}
|
||||
|
||||
impl BottomPane<'_> {
|
||||
const BOTTOM_PAD_LINES: u16 = 2;
|
||||
pub fn new(params: BottomPaneParams) -> Self {
|
||||
let enhanced_keys_supported = params.enhanced_keys_supported;
|
||||
Self {
|
||||
@@ -67,14 +84,52 @@ impl BottomPane<'_> {
|
||||
has_input_focus: params.has_input_focus,
|
||||
is_task_running: false,
|
||||
ctrl_c_quit_hint: false,
|
||||
live_status: None,
|
||||
live_ring: None,
|
||||
status_view_active: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn desired_height(&self, width: u16) -> u16 {
|
||||
self.active_view
|
||||
let overlay_status_h = self
|
||||
.live_status
|
||||
.as_ref()
|
||||
.map(|v| v.desired_height(width))
|
||||
.unwrap_or(self.composer.desired_height())
|
||||
.map(|s| s.desired_height(width))
|
||||
.unwrap_or(0);
|
||||
let ring_h = self
|
||||
.live_ring
|
||||
.as_ref()
|
||||
.map(|r| r.desired_height(width))
|
||||
.unwrap_or(0);
|
||||
|
||||
let view_height = if let Some(view) = self.active_view.as_ref() {
|
||||
// Add a single blank spacer line between live ring and status view when active.
|
||||
let spacer = if self.live_ring.is_some() && self.status_view_active {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
spacer + view.desired_height(width)
|
||||
} else {
|
||||
self.composer.desired_height(width)
|
||||
};
|
||||
|
||||
overlay_status_h
|
||||
.saturating_add(ring_h)
|
||||
.saturating_add(view_height)
|
||||
.saturating_add(Self::BOTTOM_PAD_LINES)
|
||||
}
|
||||
|
||||
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
|
||||
// Hide the cursor whenever an overlay view is active (e.g. the
|
||||
// status indicator shown while a task is running, or approval modal).
|
||||
// In these states the textarea is not interactable, so we should not
|
||||
// show its caret.
|
||||
if self.active_view.is_some() {
|
||||
None
|
||||
} else {
|
||||
self.composer.cursor_pos(area)
|
||||
}
|
||||
}
|
||||
|
||||
/// Forward a key event to the active view or the composer.
|
||||
@@ -84,9 +139,10 @@ impl BottomPane<'_> {
|
||||
if !view.is_complete() {
|
||||
self.active_view = Some(view);
|
||||
} else if self.is_task_running {
|
||||
self.active_view = Some(Box::new(StatusIndicatorView::new(
|
||||
self.app_event_tx.clone(),
|
||||
)));
|
||||
let mut v = StatusIndicatorView::new(self.app_event_tx.clone());
|
||||
v.update_text("waiting for model".to_string());
|
||||
self.active_view = Some(Box::new(v));
|
||||
self.status_view_active = true;
|
||||
}
|
||||
self.request_redraw();
|
||||
InputResult::None
|
||||
@@ -113,9 +169,11 @@ impl BottomPane<'_> {
|
||||
if !view.is_complete() {
|
||||
self.active_view = Some(view);
|
||||
} else if self.is_task_running {
|
||||
self.active_view = Some(Box::new(StatusIndicatorView::new(
|
||||
self.app_event_tx.clone(),
|
||||
)));
|
||||
// Modal aborted but task still running – restore status indicator.
|
||||
let mut v = StatusIndicatorView::new(self.app_event_tx.clone());
|
||||
v.update_text("waiting for model".to_string());
|
||||
self.active_view = Some(Box::new(v));
|
||||
self.status_view_active = true;
|
||||
}
|
||||
self.show_ctrl_c_quit_hint();
|
||||
}
|
||||
@@ -135,19 +193,42 @@ impl BottomPane<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the status indicator text (only when the `StatusIndicatorView` is
|
||||
/// active).
|
||||
/// Update the status indicator text. Prefer replacing the composer with
|
||||
/// the StatusIndicatorView so the input pane shows a single-line status
|
||||
/// like: `▌ Working waiting for model`.
|
||||
pub(crate) fn update_status_text(&mut self, text: String) {
|
||||
if let Some(view) = &mut self.active_view {
|
||||
match view.update_status_text(text) {
|
||||
ConditionalUpdate::NeedsRedraw => {
|
||||
self.request_redraw();
|
||||
}
|
||||
ConditionalUpdate::NoRedraw => {
|
||||
// No redraw needed.
|
||||
}
|
||||
let mut handled_by_view = false;
|
||||
if let Some(view) = self.active_view.as_mut() {
|
||||
if matches!(
|
||||
view.update_status_text(text.clone()),
|
||||
bottom_pane_view::ConditionalUpdate::NeedsRedraw
|
||||
) {
|
||||
handled_by_view = true;
|
||||
}
|
||||
} else {
|
||||
let mut v = StatusIndicatorView::new(self.app_event_tx.clone());
|
||||
v.update_text(text.clone());
|
||||
self.active_view = Some(Box::new(v));
|
||||
self.status_view_active = true;
|
||||
handled_by_view = true;
|
||||
}
|
||||
|
||||
// Fallback: if the current active view did not consume status updates
|
||||
// and no modal view is active, present an overlay above the composer.
|
||||
// If a modal is active, do NOT render the overlay to avoid drawing
|
||||
// over the dialog.
|
||||
if !handled_by_view && self.active_view.is_none() {
|
||||
if self.live_status.is_none() {
|
||||
self.live_status = Some(StatusIndicatorWidget::new(self.app_event_tx.clone()));
|
||||
}
|
||||
if let Some(status) = &mut self.live_status {
|
||||
status.update_text(text);
|
||||
}
|
||||
} else if !handled_by_view {
|
||||
// Ensure any previous overlay is cleared when a modal becomes active.
|
||||
self.live_status = None;
|
||||
}
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
pub(crate) fn show_ctrl_c_quit_hint(&mut self) {
|
||||
@@ -173,27 +254,23 @@ impl BottomPane<'_> {
|
||||
pub fn set_task_running(&mut self, running: bool) {
|
||||
self.is_task_running = running;
|
||||
|
||||
match (running, self.active_view.is_some()) {
|
||||
(true, false) => {
|
||||
// Show status indicator overlay.
|
||||
if running {
|
||||
if self.active_view.is_none() {
|
||||
self.active_view = Some(Box::new(StatusIndicatorView::new(
|
||||
self.app_event_tx.clone(),
|
||||
)));
|
||||
self.request_redraw();
|
||||
self.status_view_active = true;
|
||||
}
|
||||
(false, true) => {
|
||||
if let Some(mut view) = self.active_view.take() {
|
||||
if view.should_hide_when_task_is_done() {
|
||||
// Leave self.active_view as None.
|
||||
self.request_redraw();
|
||||
} else {
|
||||
// Preserve the view.
|
||||
self.active_view = Some(view);
|
||||
}
|
||||
self.request_redraw();
|
||||
} else {
|
||||
self.live_status = None;
|
||||
// Drop the status view when a task completes, but keep other
|
||||
// modal views (e.g. approval dialogs).
|
||||
if let Some(mut view) = self.active_view.take() {
|
||||
if !view.should_hide_when_task_is_done() {
|
||||
self.active_view = Some(view);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// No change.
|
||||
self.status_view_active = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -235,6 +312,9 @@ impl BottomPane<'_> {
|
||||
// Otherwise create a new approval modal overlay.
|
||||
let modal = ApprovalModalView::new(request, self.app_event_tx.clone());
|
||||
self.active_view = Some(Box::new(modal));
|
||||
// Hide any overlay status while a modal is visible.
|
||||
self.live_status = None;
|
||||
self.status_view_active = false;
|
||||
self.request_redraw()
|
||||
}
|
||||
|
||||
@@ -268,15 +348,82 @@ impl BottomPane<'_> {
|
||||
self.composer.on_file_search_result(query, matches);
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
/// Set the rows and cap for the transient live ring overlay.
|
||||
pub(crate) fn set_live_ring_rows(&mut self, max_rows: u16, rows: Vec<Line<'static>>) {
|
||||
let mut w = live_ring_widget::LiveRingWidget::new();
|
||||
w.set_max_rows(max_rows);
|
||||
w.set_rows(rows);
|
||||
self.live_ring = Some(w);
|
||||
}
|
||||
|
||||
pub(crate) fn clear_live_ring(&mut self) {
|
||||
self.live_ring = None;
|
||||
}
|
||||
|
||||
// Removed restart_live_status_with_text – no longer used by the current streaming UI.
|
||||
}
|
||||
|
||||
impl WidgetRef for &BottomPane<'_> {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
// Show BottomPaneView if present.
|
||||
if let Some(ov) = &self.active_view {
|
||||
ov.render(area, buf);
|
||||
} else {
|
||||
(&self.composer).render_ref(area, buf);
|
||||
let mut y_offset = 0u16;
|
||||
if let Some(ring) = &self.live_ring {
|
||||
let live_h = ring.desired_height(area.width).min(area.height);
|
||||
if live_h > 0 {
|
||||
let live_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y,
|
||||
width: area.width,
|
||||
height: live_h,
|
||||
};
|
||||
ring.render_ref(live_rect, buf);
|
||||
y_offset = live_h;
|
||||
}
|
||||
}
|
||||
// Spacer between live ring and status view when active
|
||||
if self.live_ring.is_some() && self.status_view_active && y_offset < area.height {
|
||||
// Leave one empty line
|
||||
y_offset = y_offset.saturating_add(1);
|
||||
}
|
||||
if let Some(status) = &self.live_status {
|
||||
let live_h = status
|
||||
.desired_height(area.width)
|
||||
.min(area.height.saturating_sub(y_offset));
|
||||
if live_h > 0 {
|
||||
let live_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y + y_offset,
|
||||
width: area.width,
|
||||
height: live_h,
|
||||
};
|
||||
status.render_ref(live_rect, buf);
|
||||
y_offset = y_offset.saturating_add(live_h);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(view) = &self.active_view {
|
||||
if y_offset < area.height {
|
||||
// Reserve bottom padding lines; keep at least 1 line for the view.
|
||||
let avail = area.height - y_offset;
|
||||
let pad = BottomPane::BOTTOM_PAD_LINES.min(avail.saturating_sub(1));
|
||||
let view_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y + y_offset,
|
||||
width: area.width,
|
||||
height: avail - pad,
|
||||
};
|
||||
view.render(view_rect, buf);
|
||||
}
|
||||
} else if y_offset < area.height {
|
||||
let composer_rect = Rect {
|
||||
x: area.x,
|
||||
y: area.y + y_offset,
|
||||
width: area.width,
|
||||
// Reserve bottom padding
|
||||
height: (area.height - y_offset)
|
||||
- BottomPane::BOTTOM_PAD_LINES.min((area.height - y_offset).saturating_sub(1)),
|
||||
};
|
||||
(&self.composer).render_ref(composer_rect, buf);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -285,6 +432,9 @@ impl WidgetRef for &BottomPane<'_> {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::app_event::AppEvent;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::text::Line;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
@@ -311,4 +461,316 @@ mod tests {
|
||||
assert!(pane.ctrl_c_quit_hint_visible());
|
||||
assert_eq!(CancellationEvent::Ignored, pane.on_ctrl_c());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn live_ring_renders_above_composer() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx,
|
||||
has_input_focus: true,
|
||||
enhanced_keys_supported: false,
|
||||
});
|
||||
|
||||
// Provide 4 rows with max_rows=3; only the last 3 should be visible.
|
||||
pane.set_live_ring_rows(
|
||||
3,
|
||||
vec![
|
||||
Line::from("one".to_string()),
|
||||
Line::from("two".to_string()),
|
||||
Line::from("three".to_string()),
|
||||
Line::from("four".to_string()),
|
||||
],
|
||||
);
|
||||
|
||||
let area = Rect::new(0, 0, 10, 5);
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
|
||||
// Extract the first 3 rows and assert they contain the last three lines.
|
||||
let mut lines: Vec<String> = Vec::new();
|
||||
for y in 0..3 {
|
||||
let mut s = String::new();
|
||||
for x in 0..area.width {
|
||||
s.push(buf[(x, y)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
lines.push(s.trim_end().to_string());
|
||||
}
|
||||
assert_eq!(lines, vec!["two", "three", "four"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn status_indicator_visible_with_live_ring() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx,
|
||||
has_input_focus: true,
|
||||
enhanced_keys_supported: false,
|
||||
});
|
||||
|
||||
// Simulate task running which replaces composer with the status indicator.
|
||||
pane.set_task_running(true);
|
||||
pane.update_status_text("waiting for model".to_string());
|
||||
|
||||
// Provide 2 rows in the live ring (e.g., streaming CoT) and ensure the
|
||||
// status indicator remains visible below them.
|
||||
pane.set_live_ring_rows(
|
||||
2,
|
||||
vec![
|
||||
Line::from("cot1".to_string()),
|
||||
Line::from("cot2".to_string()),
|
||||
],
|
||||
);
|
||||
|
||||
// Allow some frames so the dot animation is present.
|
||||
std::thread::sleep(std::time::Duration::from_millis(120));
|
||||
|
||||
// Height should include both ring rows, 1 spacer, and the 1-line status.
|
||||
let area = Rect::new(0, 0, 30, 4);
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
|
||||
// Top two rows are the live ring.
|
||||
let mut r0 = String::new();
|
||||
let mut r1 = String::new();
|
||||
for x in 0..area.width {
|
||||
r0.push(buf[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
r1.push(buf[(x, 1)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(r0.contains("cot1"), "expected first live row: {r0:?}");
|
||||
assert!(r1.contains("cot2"), "expected second live row: {r1:?}");
|
||||
|
||||
// Row 2 is the spacer (blank)
|
||||
let mut r2 = String::new();
|
||||
for x in 0..area.width {
|
||||
r2.push(buf[(x, 2)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(r2.trim().is_empty(), "expected blank spacer line: {r2:?}");
|
||||
|
||||
// Bottom row is the status line; it should contain the left bar and "Working".
|
||||
let mut r3 = String::new();
|
||||
for x in 0..area.width {
|
||||
r3.push(buf[(x, 3)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert_eq!(buf[(0, 3)].symbol().chars().next().unwrap_or(' '), '▌');
|
||||
assert!(
|
||||
r3.contains("Working"),
|
||||
"expected Working header in status line: {r3:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn overlay_not_shown_above_approval_modal() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx,
|
||||
has_input_focus: true,
|
||||
enhanced_keys_supported: false,
|
||||
});
|
||||
|
||||
// Create an approval modal (active view).
|
||||
pane.push_approval_request(exec_request());
|
||||
// Attempt to update status; this should NOT create an overlay while modal is visible.
|
||||
pane.update_status_text("running command".to_string());
|
||||
|
||||
// Render and verify the top row does not include the Working header overlay.
|
||||
let area = Rect::new(0, 0, 60, 6);
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
|
||||
let mut r0 = String::new();
|
||||
for x in 0..area.width {
|
||||
r0.push(buf[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
!r0.contains("Working"),
|
||||
"overlay Working header should not render above modal"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn composer_not_shown_after_denied_if_task_running() {
|
||||
let (tx_raw, rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx.clone(),
|
||||
has_input_focus: true,
|
||||
enhanced_keys_supported: false,
|
||||
});
|
||||
|
||||
// Start a running task so the status indicator replaces the composer.
|
||||
pane.set_task_running(true);
|
||||
pane.update_status_text("waiting for model".to_string());
|
||||
|
||||
// Push an approval modal (e.g., command approval) which should hide the status view.
|
||||
pane.push_approval_request(exec_request());
|
||||
|
||||
// Simulate pressing 'n' (deny) on the modal.
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
pane.handle_key_event(KeyEvent::new(KeyCode::Char('n'), KeyModifiers::NONE));
|
||||
|
||||
// After denial, since the task is still running, the status indicator
|
||||
// should be restored as the active view; the composer should NOT be visible.
|
||||
assert!(
|
||||
pane.status_view_active,
|
||||
"status view should be active after denial"
|
||||
);
|
||||
assert!(pane.active_view.is_some(), "active view should be present");
|
||||
|
||||
// Render and ensure the top row includes the Working header instead of the composer.
|
||||
// Give the animation thread a moment to tick.
|
||||
std::thread::sleep(std::time::Duration::from_millis(120));
|
||||
let area = Rect::new(0, 0, 40, 3);
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
let mut row0 = String::new();
|
||||
for x in 0..area.width {
|
||||
row0.push(buf[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
row0.contains("Working"),
|
||||
"expected Working header after denial: {row0:?}"
|
||||
);
|
||||
|
||||
// Drain the channel to avoid unused warnings.
|
||||
drop(rx);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn status_indicator_visible_during_command_execution() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx,
|
||||
has_input_focus: true,
|
||||
enhanced_keys_supported: false,
|
||||
});
|
||||
|
||||
// Begin a task: show initial status.
|
||||
pane.set_task_running(true);
|
||||
pane.update_status_text("waiting for model".to_string());
|
||||
|
||||
// As a long-running command begins (post-approval), ensure the status
|
||||
// indicator is visible while we wait for the command to run.
|
||||
pane.update_status_text("running command".to_string());
|
||||
|
||||
// Allow some frames so the animation thread ticks.
|
||||
std::thread::sleep(std::time::Duration::from_millis(120));
|
||||
|
||||
// Render and confirm the line contains the "Working" header.
|
||||
let area = Rect::new(0, 0, 40, 3);
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
|
||||
let mut row0 = String::new();
|
||||
for x in 0..area.width {
|
||||
row0.push(buf[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
row0.contains("Working"),
|
||||
"expected Working header: {row0:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bottom_padding_present_for_status_view() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx,
|
||||
has_input_focus: true,
|
||||
enhanced_keys_supported: false,
|
||||
});
|
||||
|
||||
// Activate spinner (status view replaces composer) with no live ring.
|
||||
pane.set_task_running(true);
|
||||
pane.update_status_text("waiting for model".to_string());
|
||||
|
||||
// Use height == desired_height; expect 1 status row at top and 2 bottom padding rows.
|
||||
let height = pane.desired_height(30);
|
||||
assert!(
|
||||
height >= 3,
|
||||
"expected at least 3 rows with bottom padding; got {height}"
|
||||
);
|
||||
let area = Rect::new(0, 0, 30, height);
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
|
||||
// Top row contains the status header
|
||||
let mut top = String::new();
|
||||
for x in 0..area.width {
|
||||
top.push(buf[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert_eq!(buf[(0, 0)].symbol().chars().next().unwrap_or(' '), '▌');
|
||||
assert!(
|
||||
top.contains("Working"),
|
||||
"expected Working header on top row: {top:?}"
|
||||
);
|
||||
|
||||
// Bottom two rows are blank padding
|
||||
let mut r_last = String::new();
|
||||
let mut r_last2 = String::new();
|
||||
for x in 0..area.width {
|
||||
r_last.push(buf[(x, height - 1)].symbol().chars().next().unwrap_or(' '));
|
||||
r_last2.push(buf[(x, height - 2)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
r_last.trim().is_empty(),
|
||||
"expected last row blank: {r_last:?}"
|
||||
);
|
||||
assert!(
|
||||
r_last2.trim().is_empty(),
|
||||
"expected second-to-last row blank: {r_last2:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bottom_padding_shrinks_when_tiny() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
app_event_tx: tx,
|
||||
has_input_focus: true,
|
||||
enhanced_keys_supported: false,
|
||||
});
|
||||
|
||||
pane.set_task_running(true);
|
||||
pane.update_status_text("waiting for model".to_string());
|
||||
|
||||
// Height=2 → pad shrinks to 1; bottom row is blank, top row has spinner.
|
||||
let area2 = Rect::new(0, 0, 20, 2);
|
||||
let mut buf2 = Buffer::empty(area2);
|
||||
(&pane).render_ref(area2, &mut buf2);
|
||||
let mut row0 = String::new();
|
||||
let mut row1 = String::new();
|
||||
for x in 0..area2.width {
|
||||
row0.push(buf2[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
row1.push(buf2[(x, 1)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
row0.contains("Working"),
|
||||
"expected Working header on row 0: {row0:?}"
|
||||
);
|
||||
assert!(
|
||||
row1.trim().is_empty(),
|
||||
"expected bottom padding on row 1: {row1:?}"
|
||||
);
|
||||
|
||||
// Height=1 → no padding; single row is the spinner.
|
||||
let area1 = Rect::new(0, 0, 20, 1);
|
||||
let mut buf1 = Buffer::empty(area1);
|
||||
(&pane).render_ref(area1, &mut buf1);
|
||||
let mut only = String::new();
|
||||
for x in 0..area1.width {
|
||||
only.push(buf1[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
only.contains("Working"),
|
||||
"expected Working header with no padding: {only:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
1305
codex-rs/tui/src/bottom_pane/textarea.rs
Normal file
1305
codex-rs/tui/src/bottom_pane/textarea.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::codex_wrapper::CodexConversation;
|
||||
use codex_core::codex_wrapper::init_codex;
|
||||
@@ -10,6 +9,8 @@ use codex_core::protocol::AgentMessageDeltaEvent;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::AgentReasoningDeltaEvent;
|
||||
use codex_core::protocol::AgentReasoningEvent;
|
||||
use codex_core::protocol::AgentReasoningRawContentDeltaEvent;
|
||||
use codex_core::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_core::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_core::protocol::ErrorEvent;
|
||||
use codex_core::protocol::Event;
|
||||
@@ -43,8 +44,10 @@ use crate::exec_command::strip_bash_lc_and_escape;
|
||||
use crate::history_cell::CommandOutput;
|
||||
use crate::history_cell::HistoryCell;
|
||||
use crate::history_cell::PatchEventType;
|
||||
use crate::live_wrap::RowBuilder;
|
||||
use crate::user_approval_widget::ApprovalRequest;
|
||||
use codex_file_search::FileMatch;
|
||||
use ratatui::style::Stylize;
|
||||
|
||||
struct RunningCommand {
|
||||
command: Vec<String>,
|
||||
@@ -60,11 +63,16 @@ pub(crate) struct ChatWidget<'a> {
|
||||
initial_user_message: Option<UserMessage>,
|
||||
token_usage: TokenUsage,
|
||||
reasoning_buffer: String,
|
||||
content_buffer: String,
|
||||
// Buffer for streaming assistant answer text; we do not surface partial
|
||||
// We wait for the final AgentMessage event and then emit the full text
|
||||
// at once into scrollback so the history contains a single message.
|
||||
answer_buffer: String,
|
||||
running_commands: HashMap<String, RunningCommand>,
|
||||
live_builder: RowBuilder,
|
||||
current_stream: Option<StreamKind>,
|
||||
stream_header_emitted: bool,
|
||||
live_max_rows: u16,
|
||||
}
|
||||
|
||||
struct UserMessage {
|
||||
@@ -72,6 +80,12 @@ struct UserMessage {
|
||||
image_paths: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum StreamKind {
|
||||
Answer,
|
||||
Reasoning,
|
||||
}
|
||||
|
||||
impl From<String> for UserMessage {
|
||||
fn from(text: String) -> Self {
|
||||
Self {
|
||||
@@ -90,6 +104,24 @@ fn create_initial_user_message(text: String, image_paths: Vec<PathBuf>) -> Optio
|
||||
}
|
||||
|
||||
impl ChatWidget<'_> {
|
||||
fn emit_stream_header(&mut self, kind: StreamKind) {
|
||||
use ratatui::text::Line as RLine;
|
||||
if self.stream_header_emitted {
|
||||
return;
|
||||
}
|
||||
let header = match kind {
|
||||
StreamKind::Reasoning => RLine::from("thinking".magenta().italic()),
|
||||
StreamKind::Answer => RLine::from("codex".magenta().bold()),
|
||||
};
|
||||
self.app_event_tx
|
||||
.send(AppEvent::InsertHistory(vec![header]));
|
||||
self.stream_header_emitted = true;
|
||||
}
|
||||
fn finalize_active_stream(&mut self) {
|
||||
if let Some(kind) = self.current_stream {
|
||||
self.finalize_stream(kind);
|
||||
}
|
||||
}
|
||||
pub(crate) fn new(
|
||||
config: Config,
|
||||
app_event_tx: AppEventSender,
|
||||
@@ -150,8 +182,13 @@ impl ChatWidget<'_> {
|
||||
),
|
||||
token_usage: TokenUsage::default(),
|
||||
reasoning_buffer: String::new(),
|
||||
content_buffer: String::new(),
|
||||
answer_buffer: String::new(),
|
||||
running_commands: HashMap::new(),
|
||||
live_builder: RowBuilder::new(80),
|
||||
current_stream: None,
|
||||
stream_header_emitted: false,
|
||||
live_max_rows: 3,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,58 +272,59 @@ impl ChatWidget<'_> {
|
||||
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { message }) => {
|
||||
// Final assistant answer. Prefer the fully provided message
|
||||
// from the event; if it is empty fall back to any accumulated
|
||||
// delta buffer (some providers may only stream deltas and send
|
||||
// an empty final message).
|
||||
let full = if message.is_empty() {
|
||||
std::mem::take(&mut self.answer_buffer)
|
||||
} else {
|
||||
self.answer_buffer.clear();
|
||||
message
|
||||
};
|
||||
if !full.is_empty() {
|
||||
self.add_to_history(HistoryCell::new_agent_message(&self.config, full));
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { message: _ }) => {
|
||||
// Final assistant answer: commit all remaining rows and close with
|
||||
// a blank line. Use the final text if provided, otherwise rely on
|
||||
// streamed deltas already in the builder.
|
||||
self.finalize_stream(StreamKind::Answer);
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }) => {
|
||||
// Buffer only – do not emit partial lines. This avoids cases
|
||||
// where long responses appear truncated if the terminal
|
||||
// wrapped early. The full message is emitted on
|
||||
// AgentMessage.
|
||||
self.begin_stream(StreamKind::Answer);
|
||||
self.answer_buffer.push_str(&delta);
|
||||
self.stream_push_and_maybe_commit(&delta);
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }) => {
|
||||
// Buffer only – disable incremental reasoning streaming so we
|
||||
// avoid truncated intermediate lines. Full text emitted on
|
||||
// AgentReasoning.
|
||||
// Stream CoT into the live pane; keep input visible and commit
|
||||
// overflow rows incrementally to scrollback.
|
||||
self.begin_stream(StreamKind::Reasoning);
|
||||
self.reasoning_buffer.push_str(&delta);
|
||||
self.stream_push_and_maybe_commit(&delta);
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent { text }) => {
|
||||
// Emit full reasoning text once. Some providers might send
|
||||
// final event with empty text if only deltas were used.
|
||||
let full = if text.is_empty() {
|
||||
std::mem::take(&mut self.reasoning_buffer)
|
||||
} else {
|
||||
self.reasoning_buffer.clear();
|
||||
text
|
||||
};
|
||||
if !full.is_empty() {
|
||||
self.add_to_history(HistoryCell::new_agent_reasoning(&self.config, full));
|
||||
}
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent { text: _ }) => {
|
||||
// Final reasoning: commit remaining rows and close with a blank.
|
||||
self.finalize_stream(StreamKind::Reasoning);
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentReasoningRawContentDelta(AgentReasoningRawContentDeltaEvent {
|
||||
delta,
|
||||
}) => {
|
||||
// Treat raw reasoning content the same as summarized reasoning for UI flow.
|
||||
self.begin_stream(StreamKind::Reasoning);
|
||||
self.reasoning_buffer.push_str(&delta);
|
||||
self.stream_push_and_maybe_commit(&delta);
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent { text: _ }) => {
|
||||
// Finalize the raw reasoning stream just like the summarized reasoning event.
|
||||
self.finalize_stream(StreamKind::Reasoning);
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::TaskStarted => {
|
||||
self.bottom_pane.clear_ctrl_c_quit_hint();
|
||||
self.bottom_pane.set_task_running(true);
|
||||
// Replace composer with single-line spinner while waiting.
|
||||
self.bottom_pane
|
||||
.update_status_text("waiting for model".to_string());
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::TaskComplete(TaskCompleteEvent {
|
||||
last_agent_message: _,
|
||||
}) => {
|
||||
self.bottom_pane.set_task_running(false);
|
||||
self.bottom_pane.clear_live_ring();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::TokenCount(token_usage) => {
|
||||
@@ -297,10 +335,18 @@ impl ChatWidget<'_> {
|
||||
EventMsg::Error(ErrorEvent { message }) => {
|
||||
self.add_to_history(HistoryCell::new_error_event(message.clone()));
|
||||
self.bottom_pane.set_task_running(false);
|
||||
self.bottom_pane.clear_live_ring();
|
||||
self.live_builder = RowBuilder::new(self.live_builder.width());
|
||||
self.current_stream = None;
|
||||
self.stream_header_emitted = false;
|
||||
self.answer_buffer.clear();
|
||||
self.reasoning_buffer.clear();
|
||||
self.content_buffer.clear();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::PlanUpdate(update) => {
|
||||
// Commit plan updates directly to history (no status-line preview).
|
||||
self.add_to_history(HistoryCell::new_plan_update(update));
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
call_id: _,
|
||||
@@ -308,8 +354,8 @@ impl ChatWidget<'_> {
|
||||
cwd,
|
||||
reason,
|
||||
}) => {
|
||||
// Print the command to the history so it is visible in the
|
||||
// transcript *before* the modal asks for approval.
|
||||
self.finalize_active_stream();
|
||||
// Log a background summary immediately so the history is chronological.
|
||||
let cmdline = strip_bash_lc_and_escape(&command);
|
||||
let text = format!(
|
||||
"command requires approval:\n$ {cmdline}{reason}",
|
||||
@@ -335,6 +381,7 @@ impl ChatWidget<'_> {
|
||||
reason,
|
||||
grant_root,
|
||||
}) => {
|
||||
self.finalize_active_stream();
|
||||
// ------------------------------------------------------------------
|
||||
// Before we even prompt the user for approval we surface the patch
|
||||
// summary in the main conversation so that the dialog appears in a
|
||||
@@ -345,7 +392,6 @@ impl ChatWidget<'_> {
|
||||
// approval dialog) and avoids surprising the user with a modal
|
||||
// prompt before they have seen *what* is being requested.
|
||||
// ------------------------------------------------------------------
|
||||
|
||||
self.add_to_history(HistoryCell::new_patch_event(
|
||||
PatchEventType::ApprovalRequest,
|
||||
changes,
|
||||
@@ -365,6 +411,10 @@ impl ChatWidget<'_> {
|
||||
command,
|
||||
cwd,
|
||||
}) => {
|
||||
self.finalize_active_stream();
|
||||
// Ensure the status indicator is visible while the command runs.
|
||||
self.bottom_pane
|
||||
.update_status_text("running command".to_string());
|
||||
self.running_commands.insert(
|
||||
call_id,
|
||||
RunningCommand {
|
||||
@@ -380,8 +430,6 @@ impl ChatWidget<'_> {
|
||||
auto_approved,
|
||||
changes,
|
||||
}) => {
|
||||
// Even when a patch is auto‑approved we still display the
|
||||
// summary so the user can follow along.
|
||||
self.add_to_history(HistoryCell::new_patch_event(
|
||||
PatchEventType::ApplyBegin { auto_approved },
|
||||
changes,
|
||||
@@ -390,9 +438,11 @@ impl ChatWidget<'_> {
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
call_id,
|
||||
exit_code,
|
||||
duration,
|
||||
stdout,
|
||||
stderr,
|
||||
}) => {
|
||||
// Compute summary before moving stdout into the history cell.
|
||||
let cmd = self.running_commands.remove(&call_id);
|
||||
self.add_to_history(HistoryCell::new_completed_exec_command(
|
||||
cmd.map(|cmd| cmd.command).unwrap_or_else(|| vec![call_id]),
|
||||
@@ -400,7 +450,7 @@ impl ChatWidget<'_> {
|
||||
exit_code,
|
||||
stdout,
|
||||
stderr,
|
||||
duration: Duration::from_secs(0),
|
||||
duration,
|
||||
},
|
||||
));
|
||||
}
|
||||
@@ -408,6 +458,7 @@ impl ChatWidget<'_> {
|
||||
call_id: _,
|
||||
invocation,
|
||||
}) => {
|
||||
self.finalize_active_stream();
|
||||
self.add_to_history(HistoryCell::new_active_mcp_tool_call(invocation));
|
||||
}
|
||||
EventMsg::McpToolCallEnd(McpToolCallEndEvent {
|
||||
@@ -442,15 +493,18 @@ impl ChatWidget<'_> {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
event => {
|
||||
self.add_to_history(HistoryCell::new_background_event(format!("{event:?}")));
|
||||
let text = format!("{event:?}");
|
||||
self.add_to_history(HistoryCell::new_background_event(text.clone()));
|
||||
self.update_latest_log(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the live log preview while a task is running.
|
||||
pub(crate) fn update_latest_log(&mut self, line: String) {
|
||||
// Forward only if we are currently showing the status indicator.
|
||||
self.bottom_pane.update_status_text(line);
|
||||
if self.bottom_pane.is_task_running() {
|
||||
self.bottom_pane.update_status_text(line);
|
||||
}
|
||||
}
|
||||
|
||||
fn request_redraw(&mut self) {
|
||||
@@ -477,8 +531,15 @@ impl ChatWidget<'_> {
|
||||
if self.bottom_pane.is_task_running() {
|
||||
self.bottom_pane.clear_ctrl_c_quit_hint();
|
||||
self.submit_op(Op::Interrupt);
|
||||
self.bottom_pane.set_task_running(false);
|
||||
self.bottom_pane.clear_live_ring();
|
||||
self.live_builder = RowBuilder::new(self.live_builder.width());
|
||||
self.current_stream = None;
|
||||
self.stream_header_emitted = false;
|
||||
self.answer_buffer.clear();
|
||||
self.reasoning_buffer.clear();
|
||||
self.content_buffer.clear();
|
||||
self.request_redraw();
|
||||
CancellationEvent::Ignored
|
||||
} else if self.bottom_pane.ctrl_c_quit_hint_visible() {
|
||||
self.submit_op(Op::Shutdown);
|
||||
@@ -509,6 +570,108 @@ impl ChatWidget<'_> {
|
||||
self.bottom_pane
|
||||
.set_token_usage(self.token_usage.clone(), self.config.model_context_window);
|
||||
}
|
||||
|
||||
pub fn cursor_pos(&self, area: Rect) -> Option<(u16, u16)> {
|
||||
self.bottom_pane.cursor_pos(area)
|
||||
}
|
||||
}
|
||||
|
||||
impl ChatWidget<'_> {
|
||||
fn begin_stream(&mut self, kind: StreamKind) {
|
||||
if let Some(current) = self.current_stream {
|
||||
if current != kind {
|
||||
self.finalize_stream(current);
|
||||
}
|
||||
}
|
||||
|
||||
if self.current_stream != Some(kind) {
|
||||
self.current_stream = Some(kind);
|
||||
self.stream_header_emitted = false;
|
||||
// Clear any previous live content; we're starting a new stream.
|
||||
self.live_builder = RowBuilder::new(self.live_builder.width());
|
||||
// Ensure the waiting status is visible (composer replaced).
|
||||
self.bottom_pane
|
||||
.update_status_text("waiting for model".to_string());
|
||||
self.emit_stream_header(kind);
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_push_and_maybe_commit(&mut self, delta: &str) {
|
||||
self.live_builder.push_fragment(delta);
|
||||
|
||||
// Commit overflow rows (small batches) while keeping the last N rows visible.
|
||||
let drained = self
|
||||
.live_builder
|
||||
.drain_commit_ready(self.live_max_rows as usize);
|
||||
if !drained.is_empty() {
|
||||
let mut lines: Vec<ratatui::text::Line<'static>> = Vec::new();
|
||||
if !self.stream_header_emitted {
|
||||
match self.current_stream {
|
||||
Some(StreamKind::Reasoning) => {
|
||||
lines.push(ratatui::text::Line::from("thinking".magenta().italic()));
|
||||
}
|
||||
Some(StreamKind::Answer) => {
|
||||
lines.push(ratatui::text::Line::from("codex".magenta().bold()));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
self.stream_header_emitted = true;
|
||||
}
|
||||
for r in drained {
|
||||
lines.push(ratatui::text::Line::from(r.text));
|
||||
}
|
||||
self.app_event_tx.send(AppEvent::InsertHistory(lines));
|
||||
}
|
||||
|
||||
// Update the live ring overlay lines (text-only, newest at bottom).
|
||||
let rows = self
|
||||
.live_builder
|
||||
.display_rows()
|
||||
.into_iter()
|
||||
.map(|r| ratatui::text::Line::from(r.text))
|
||||
.collect::<Vec<_>>();
|
||||
self.bottom_pane
|
||||
.set_live_ring_rows(self.live_max_rows, rows);
|
||||
}
|
||||
|
||||
fn finalize_stream(&mut self, kind: StreamKind) {
|
||||
if self.current_stream != Some(kind) {
|
||||
// Nothing to do; either already finalized or not the active stream.
|
||||
return;
|
||||
}
|
||||
// Flush any partial line as a full row, then drain all remaining rows.
|
||||
self.live_builder.end_line();
|
||||
let remaining = self.live_builder.drain_rows();
|
||||
// TODO: Re-add markdown rendering for assistant answers and reasoning.
|
||||
// When finalizing, pass the accumulated text through `markdown::append_markdown`
|
||||
// to build styled `Line<'static>` entries instead of raw plain text lines.
|
||||
if !remaining.is_empty() || !self.stream_header_emitted {
|
||||
let mut lines: Vec<ratatui::text::Line<'static>> = Vec::new();
|
||||
if !self.stream_header_emitted {
|
||||
match kind {
|
||||
StreamKind::Reasoning => {
|
||||
lines.push(ratatui::text::Line::from("thinking".magenta().italic()));
|
||||
}
|
||||
StreamKind::Answer => {
|
||||
lines.push(ratatui::text::Line::from("codex".magenta().bold()));
|
||||
}
|
||||
}
|
||||
self.stream_header_emitted = true;
|
||||
}
|
||||
for r in remaining {
|
||||
lines.push(ratatui::text::Line::from(r.text));
|
||||
}
|
||||
// Close the block with a blank line for readability.
|
||||
lines.push(ratatui::text::Line::from(""));
|
||||
self.app_event_tx.send(AppEvent::InsertHistory(lines));
|
||||
}
|
||||
|
||||
// Clear the live overlay and reset state for the next stream.
|
||||
self.live_builder = RowBuilder::new(self.live_builder.width());
|
||||
self.bottom_pane.clear_live_ring();
|
||||
self.current_stream = None;
|
||||
self.stream_header_emitted = false;
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for &ChatWidget<'_> {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::exec_command::strip_bash_lc_and_escape;
|
||||
use crate::markdown::append_markdown;
|
||||
use crate::text_block::TextBlock;
|
||||
use crate::text_formatting::format_and_truncate_tool_result;
|
||||
use base64::Engine;
|
||||
@@ -8,7 +7,6 @@ use codex_common::elapsed::format_duration;
|
||||
use codex_common::summarize_sandbox_policy;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::model_supports_reasoning_summaries;
|
||||
use codex_core::plan_tool::PlanItemArg;
|
||||
use codex_core::plan_tool::StepStatus;
|
||||
use codex_core::plan_tool::UpdatePlanArgs;
|
||||
@@ -68,12 +66,7 @@ pub(crate) enum HistoryCell {
|
||||
/// Message from the user.
|
||||
UserPrompt { view: TextBlock },
|
||||
|
||||
/// Message from the agent.
|
||||
AgentMessage { view: TextBlock },
|
||||
|
||||
/// Reasoning event from the agent.
|
||||
AgentReasoning { view: TextBlock },
|
||||
|
||||
// AgentMessage and AgentReasoning variants were unused and have been removed.
|
||||
/// An exec tool call that has not finished yet.
|
||||
ActiveExecCommand { view: TextBlock },
|
||||
|
||||
@@ -128,8 +121,6 @@ impl HistoryCell {
|
||||
match self {
|
||||
HistoryCell::WelcomeMessage { view }
|
||||
| HistoryCell::UserPrompt { view }
|
||||
| HistoryCell::AgentMessage { view }
|
||||
| HistoryCell::AgentReasoning { view }
|
||||
| HistoryCell::BackgroundEvent { view }
|
||||
| HistoryCell::GitDiffOutput { view }
|
||||
| HistoryCell::ErrorEvent { view }
|
||||
@@ -185,7 +176,7 @@ impl HistoryCell {
|
||||
("sandbox", summarize_sandbox_policy(&config.sandbox_policy)),
|
||||
];
|
||||
if config.model_provider.wire_api == WireApi::Responses
|
||||
&& model_supports_reasoning_summaries(config)
|
||||
&& config.model_family.supports_reasoning_summaries
|
||||
{
|
||||
entries.push((
|
||||
"reasoning effort",
|
||||
@@ -231,28 +222,6 @@ impl HistoryCell {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_agent_message(config: &Config, message: String) -> Self {
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
lines.push(Line::from("codex".magenta().bold()));
|
||||
append_markdown(&message, &mut lines, config);
|
||||
lines.push(Line::from(""));
|
||||
|
||||
HistoryCell::AgentMessage {
|
||||
view: TextBlock::new(lines),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_agent_reasoning(config: &Config, text: String) -> Self {
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
lines.push(Line::from("thinking".magenta().italic()));
|
||||
append_markdown(&text, &mut lines, config);
|
||||
lines.push(Line::from(""));
|
||||
|
||||
HistoryCell::AgentReasoning {
|
||||
view: TextBlock::new(lines),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_active_exec_command(command: Vec<String>) -> Self {
|
||||
let command_escaped = strip_bash_lc_and_escape(&command);
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ use crossterm::style::SetBackgroundColor;
|
||||
use crossterm::style::SetColors;
|
||||
use crossterm::style::SetForegroundColor;
|
||||
use ratatui::layout::Size;
|
||||
use ratatui::prelude::Backend;
|
||||
use ratatui::style::Color;
|
||||
use ratatui::style::Modifier;
|
||||
use ratatui::text::Line;
|
||||
@@ -22,6 +21,20 @@ use ratatui::text::Span;
|
||||
|
||||
/// Insert `lines` above the viewport.
|
||||
pub(crate) fn insert_history_lines(terminal: &mut tui::Tui, lines: Vec<Line>) {
|
||||
let mut out = std::io::stdout();
|
||||
insert_history_lines_to_writer(terminal, &mut out, lines);
|
||||
}
|
||||
|
||||
/// Like `insert_history_lines`, but writes ANSI to the provided writer. This
|
||||
/// is intended for testing where a capture buffer is used instead of stdout.
|
||||
pub fn insert_history_lines_to_writer<B, W>(
|
||||
terminal: &mut crate::custom_terminal::Terminal<B>,
|
||||
writer: &mut W,
|
||||
lines: Vec<Line>,
|
||||
) where
|
||||
B: ratatui::backend::Backend,
|
||||
W: Write,
|
||||
{
|
||||
let screen_size = terminal.backend().size().unwrap_or(Size::new(0, 0));
|
||||
let cursor_pos = terminal.get_cursor_position().ok();
|
||||
|
||||
@@ -32,10 +45,22 @@ pub(crate) fn insert_history_lines(terminal: &mut tui::Tui, lines: Vec<Line>) {
|
||||
// If the viewport is not at the bottom of the screen, scroll it down to make room.
|
||||
// Don't scroll it past the bottom of the screen.
|
||||
let scroll_amount = wrapped_lines.min(screen_size.height - area.bottom());
|
||||
terminal
|
||||
.backend_mut()
|
||||
.scroll_region_down(area.top()..screen_size.height, scroll_amount)
|
||||
.ok();
|
||||
|
||||
// Emit ANSI to scroll the lower region (from the top of the viewport to the bottom
|
||||
// of the screen) downward by `scroll_amount` lines. We do this by:
|
||||
// 1) Limiting the scroll region to [area.top()+1 .. screen_height] (1-based bounds)
|
||||
// 2) Placing the cursor at the top margin of that region
|
||||
// 3) Emitting Reverse Index (RI, ESC M) `scroll_amount` times
|
||||
// 4) Resetting the scroll region back to full screen
|
||||
let top_1based = area.top() + 1; // Convert 0-based row to 1-based for DECSTBM
|
||||
queue!(writer, SetScrollRegion(top_1based..screen_size.height)).ok();
|
||||
queue!(writer, MoveTo(0, area.top())).ok();
|
||||
for _ in 0..scroll_amount {
|
||||
// Reverse Index (RI): ESC M
|
||||
queue!(writer, Print("\x1bM")).ok();
|
||||
}
|
||||
queue!(writer, ResetScrollRegion).ok();
|
||||
|
||||
let cursor_top = area.top().saturating_sub(1);
|
||||
area.y += scroll_amount;
|
||||
terminal.set_viewport_area(area);
|
||||
@@ -59,23 +84,23 @@ pub(crate) fn insert_history_lines(terminal: &mut tui::Tui, lines: Vec<Line>) {
|
||||
// ││ ││
|
||||
// │╰────────────────────────────╯│
|
||||
// └──────────────────────────────┘
|
||||
queue!(std::io::stdout(), SetScrollRegion(1..area.top())).ok();
|
||||
queue!(writer, SetScrollRegion(1..area.top())).ok();
|
||||
|
||||
// NB: we are using MoveTo instead of set_cursor_position here to avoid messing with the
|
||||
// terminal's last_known_cursor_position, which hopefully will still be accurate after we
|
||||
// fetch/restore the cursor position. insert_history_lines should be cursor-position-neutral :)
|
||||
queue!(std::io::stdout(), MoveTo(0, cursor_top)).ok();
|
||||
queue!(writer, MoveTo(0, cursor_top)).ok();
|
||||
|
||||
for line in lines {
|
||||
queue!(std::io::stdout(), Print("\r\n")).ok();
|
||||
write_spans(&mut std::io::stdout(), line.iter()).ok();
|
||||
queue!(writer, Print("\r\n")).ok();
|
||||
write_spans(writer, line.iter()).ok();
|
||||
}
|
||||
|
||||
queue!(std::io::stdout(), ResetScrollRegion).ok();
|
||||
queue!(writer, ResetScrollRegion).ok();
|
||||
|
||||
// Restore the cursor position to where it was before we started.
|
||||
if let Some(cursor_pos) = cursor_pos {
|
||||
queue!(std::io::stdout(), MoveTo(cursor_pos.x, cursor_pos.y)).ok();
|
||||
queue!(writer, MoveTo(cursor_pos.x, cursor_pos.y)).ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,19 +113,25 @@ fn wrapped_line_count(lines: &[Line], width: u16) -> u16 {
|
||||
}
|
||||
|
||||
fn line_height(line: &Line, width: u16) -> u16 {
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
// get the total display width of the line, accounting for double-width chars
|
||||
let total_width = line
|
||||
// Use the same visible-width slicing semantics as the live row builder so
|
||||
// our pre-scroll estimation matches how rows will actually wrap.
|
||||
let w = width.max(1) as usize;
|
||||
let mut rows = 0u16;
|
||||
let mut remaining = line
|
||||
.spans
|
||||
.iter()
|
||||
.map(|span| span.content.width())
|
||||
.sum::<usize>();
|
||||
// divide by width to get the number of lines, rounding up
|
||||
if width == 0 {
|
||||
1
|
||||
} else {
|
||||
(total_width as u16).div_ceil(width).max(1)
|
||||
.map(|s| s.content.as_ref())
|
||||
.collect::<Vec<_>>()
|
||||
.join("");
|
||||
while !remaining.is_empty() {
|
||||
let (_prefix, suffix, taken) = crate::live_wrap::take_prefix_by_width(&remaining, w);
|
||||
rows = rows.saturating_add(1);
|
||||
if taken >= remaining.len() {
|
||||
break;
|
||||
}
|
||||
remaining = suffix.to_string();
|
||||
}
|
||||
rows.max(1)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -283,4 +314,12 @@ mod tests {
|
||||
String::from_utf8(expected).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn line_height_counts_double_width_emoji() {
|
||||
let line = Line::from("😀😀😀"); // each emoji ~ width 2
|
||||
assert_eq!(line_height(&line, 4), 2);
|
||||
assert_eq!(line_height(&line, 2), 3);
|
||||
assert_eq!(line_height(&line, 6), 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,13 +25,14 @@ mod bottom_pane;
|
||||
mod chatwidget;
|
||||
mod citation_regex;
|
||||
mod cli;
|
||||
mod custom_terminal;
|
||||
pub mod custom_terminal;
|
||||
mod exec_command;
|
||||
mod file_search;
|
||||
mod get_git_diff;
|
||||
mod git_warning_screen;
|
||||
mod history_cell;
|
||||
mod insert_history;
|
||||
pub mod insert_history;
|
||||
pub mod live_wrap;
|
||||
mod log_layer;
|
||||
mod markdown;
|
||||
mod slash_command;
|
||||
|
||||
290
codex-rs/tui/src/live_wrap.rs
Normal file
290
codex-rs/tui/src/live_wrap.rs
Normal file
@@ -0,0 +1,290 @@
|
||||
use unicode_width::UnicodeWidthChar;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
/// A single visual row produced by RowBuilder.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Row {
|
||||
pub text: String,
|
||||
/// True if this row ends with an explicit line break (as opposed to a hard wrap).
|
||||
pub explicit_break: bool,
|
||||
}
|
||||
|
||||
impl Row {
|
||||
pub fn width(&self) -> usize {
|
||||
self.text.width()
|
||||
}
|
||||
}
|
||||
|
||||
/// Incrementally wraps input text into visual rows of at most `width` cells.
|
||||
///
|
||||
/// Step 1: plain-text only. ANSI-carry and styled spans will be added later.
|
||||
pub struct RowBuilder {
|
||||
target_width: usize,
|
||||
/// Buffer for the current logical line (until a '\n' is seen).
|
||||
current_line: String,
|
||||
/// Output rows built so far for the current logical line and previous ones.
|
||||
rows: Vec<Row>,
|
||||
}
|
||||
|
||||
impl RowBuilder {
|
||||
pub fn new(target_width: usize) -> Self {
|
||||
Self {
|
||||
target_width: target_width.max(1),
|
||||
current_line: String::new(),
|
||||
rows: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn width(&self) -> usize {
|
||||
self.target_width
|
||||
}
|
||||
|
||||
pub fn set_width(&mut self, width: usize) {
|
||||
self.target_width = width.max(1);
|
||||
// Rewrap everything we have (simple approach for Step 1).
|
||||
let mut all = String::new();
|
||||
for row in self.rows.drain(..) {
|
||||
all.push_str(&row.text);
|
||||
if row.explicit_break {
|
||||
all.push('\n');
|
||||
}
|
||||
}
|
||||
all.push_str(&self.current_line);
|
||||
self.current_line.clear();
|
||||
self.push_fragment(&all);
|
||||
}
|
||||
|
||||
/// Push an input fragment. May contain newlines.
|
||||
pub fn push_fragment(&mut self, fragment: &str) {
|
||||
if fragment.is_empty() {
|
||||
return;
|
||||
}
|
||||
let mut start = 0usize;
|
||||
for (i, ch) in fragment.char_indices() {
|
||||
if ch == '\n' {
|
||||
// Flush anything pending before the newline.
|
||||
if start < i {
|
||||
self.current_line.push_str(&fragment[start..i]);
|
||||
}
|
||||
self.flush_current_line(true);
|
||||
start = i + ch.len_utf8();
|
||||
}
|
||||
}
|
||||
if start < fragment.len() {
|
||||
self.current_line.push_str(&fragment[start..]);
|
||||
self.wrap_current_line();
|
||||
}
|
||||
}
|
||||
|
||||
/// Mark the end of the current logical line (equivalent to pushing a '\n').
|
||||
pub fn end_line(&mut self) {
|
||||
self.flush_current_line(true);
|
||||
}
|
||||
|
||||
/// Drain and return all produced rows.
|
||||
pub fn drain_rows(&mut self) -> Vec<Row> {
|
||||
std::mem::take(&mut self.rows)
|
||||
}
|
||||
|
||||
/// Return a snapshot of produced rows (non-draining).
|
||||
pub fn rows(&self) -> &[Row] {
|
||||
&self.rows
|
||||
}
|
||||
|
||||
/// Rows suitable for display, including the current partial line if any.
|
||||
pub fn display_rows(&self) -> Vec<Row> {
|
||||
let mut out = self.rows.clone();
|
||||
if !self.current_line.is_empty() {
|
||||
out.push(Row {
|
||||
text: self.current_line.clone(),
|
||||
explicit_break: false,
|
||||
});
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
/// Drain the oldest rows that exceed `max_keep` display rows (including the
|
||||
/// current partial line, if any). Returns the drained rows in order.
|
||||
pub fn drain_commit_ready(&mut self, max_keep: usize) -> Vec<Row> {
|
||||
let display_count = self.rows.len() + if self.current_line.is_empty() { 0 } else { 1 };
|
||||
if display_count <= max_keep {
|
||||
return Vec::new();
|
||||
}
|
||||
let to_commit = display_count - max_keep;
|
||||
let commit_count = to_commit.min(self.rows.len());
|
||||
let mut drained = Vec::with_capacity(commit_count);
|
||||
for _ in 0..commit_count {
|
||||
drained.push(self.rows.remove(0));
|
||||
}
|
||||
drained
|
||||
}
|
||||
|
||||
fn flush_current_line(&mut self, explicit_break: bool) {
|
||||
// Wrap any remaining content in the current line and then finalize with explicit_break.
|
||||
self.wrap_current_line();
|
||||
// If the current line ended exactly on a width boundary and is non-empty, represent
|
||||
// the explicit break as an empty explicit row so that fragmentation invariance holds.
|
||||
if explicit_break {
|
||||
if self.current_line.is_empty() {
|
||||
// We ended on a boundary previously; add an empty explicit row.
|
||||
self.rows.push(Row {
|
||||
text: String::new(),
|
||||
explicit_break: true,
|
||||
});
|
||||
} else {
|
||||
// There is leftover content that did not wrap yet; push it now with the explicit flag.
|
||||
let mut s = String::new();
|
||||
std::mem::swap(&mut s, &mut self.current_line);
|
||||
self.rows.push(Row {
|
||||
text: s,
|
||||
explicit_break: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Reset current line buffer for next logical line.
|
||||
self.current_line.clear();
|
||||
}
|
||||
|
||||
fn wrap_current_line(&mut self) {
|
||||
// While the current_line exceeds width, cut a prefix.
|
||||
loop {
|
||||
if self.current_line.is_empty() {
|
||||
break;
|
||||
}
|
||||
let (prefix, suffix, taken) =
|
||||
take_prefix_by_width(&self.current_line, self.target_width);
|
||||
if taken == 0 {
|
||||
// Avoid infinite loop on pathological inputs; take one scalar and continue.
|
||||
if let Some((i, ch)) = self.current_line.char_indices().next() {
|
||||
let len = i + ch.len_utf8();
|
||||
let p = self.current_line[..len].to_string();
|
||||
self.rows.push(Row {
|
||||
text: p,
|
||||
explicit_break: false,
|
||||
});
|
||||
self.current_line = self.current_line[len..].to_string();
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
if suffix.is_empty() {
|
||||
// Fits entirely; keep in buffer (do not push yet) so we can append more later.
|
||||
break;
|
||||
} else {
|
||||
// Emit wrapped prefix as a non-explicit row and continue with the remainder.
|
||||
self.rows.push(Row {
|
||||
text: prefix,
|
||||
explicit_break: false,
|
||||
});
|
||||
self.current_line = suffix.to_string();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Take a prefix of `text` whose visible width is at most `max_cols`.
|
||||
/// Returns (prefix, suffix, prefix_width).
|
||||
pub fn take_prefix_by_width(text: &str, max_cols: usize) -> (String, &str, usize) {
|
||||
if max_cols == 0 || text.is_empty() {
|
||||
return (String::new(), text, 0);
|
||||
}
|
||||
let mut cols = 0usize;
|
||||
let mut end_idx = 0usize;
|
||||
for (i, ch) in text.char_indices() {
|
||||
let ch_width = UnicodeWidthChar::width(ch).unwrap_or(0);
|
||||
if cols.saturating_add(ch_width) > max_cols {
|
||||
break;
|
||||
}
|
||||
cols += ch_width;
|
||||
end_idx = i + ch.len_utf8();
|
||||
if cols == max_cols {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let prefix = text[..end_idx].to_string();
|
||||
let suffix = &text[end_idx..];
|
||||
(prefix, suffix, cols)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn rows_do_not_exceed_width_ascii() {
|
||||
let mut rb = RowBuilder::new(10);
|
||||
rb.push_fragment("hello whirl this is a test");
|
||||
let rows = rb.rows().to_vec();
|
||||
assert_eq!(
|
||||
rows,
|
||||
vec![
|
||||
Row {
|
||||
text: "hello whir".to_string(),
|
||||
explicit_break: false
|
||||
},
|
||||
Row {
|
||||
text: "l this is ".to_string(),
|
||||
explicit_break: false
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rows_do_not_exceed_width_emoji_cjk() {
|
||||
// 😀 is width 2; 你/好 are width 2.
|
||||
let mut rb = RowBuilder::new(6);
|
||||
rb.push_fragment("😀😀 你好");
|
||||
let rows = rb.rows().to_vec();
|
||||
// At width 6, we expect the first row to fit exactly two emojis and a space
|
||||
// (2 + 2 + 1 = 5) plus one more column for the first CJK char (2 would overflow),
|
||||
// so only the two emojis and the space fit; the rest remains buffered.
|
||||
assert_eq!(
|
||||
rows,
|
||||
vec![Row {
|
||||
text: "😀😀 ".to_string(),
|
||||
explicit_break: false
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fragmentation_invariance_long_token() {
|
||||
let s = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; // 26 chars
|
||||
let mut rb_all = RowBuilder::new(7);
|
||||
rb_all.push_fragment(s);
|
||||
let all_rows = rb_all.rows().to_vec();
|
||||
|
||||
let mut rb_chunks = RowBuilder::new(7);
|
||||
for i in (0..s.len()).step_by(3) {
|
||||
let end = (i + 3).min(s.len());
|
||||
rb_chunks.push_fragment(&s[i..end]);
|
||||
}
|
||||
let chunk_rows = rb_chunks.rows().to_vec();
|
||||
|
||||
assert_eq!(all_rows, chunk_rows);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn newline_splits_rows() {
|
||||
let mut rb = RowBuilder::new(10);
|
||||
rb.push_fragment("hello\nworld");
|
||||
let rows = rb.display_rows();
|
||||
assert!(rows.iter().any(|r| r.explicit_break));
|
||||
assert_eq!(rows[0].text, "hello");
|
||||
// Second row should begin with 'world'
|
||||
assert!(rows.iter().any(|r| r.text.starts_with("world")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rewrap_on_width_change() {
|
||||
let mut rb = RowBuilder::new(10);
|
||||
rb.push_fragment("abcdefghijK");
|
||||
assert!(!rb.rows().is_empty());
|
||||
rb.set_width(5);
|
||||
for r in rb.rows() {
|
||||
assert!(r.width() <= 5);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::citation_regex::CITATION_REGEX;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_types::UriBasedFileOpener;
|
||||
use ratatui::text::Line;
|
||||
@@ -5,8 +6,7 @@ use ratatui::text::Span;
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::citation_regex::CITATION_REGEX;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn append_markdown(
|
||||
markdown_source: &str,
|
||||
lines: &mut Vec<Line<'static>>,
|
||||
@@ -15,6 +15,7 @@ pub(crate) fn append_markdown(
|
||||
append_markdown_with_opener_and_cwd(markdown_source, lines, config.file_opener, &config.cwd);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn append_markdown_with_opener_and_cwd(
|
||||
markdown_source: &str,
|
||||
lines: &mut Vec<Line<'static>>,
|
||||
@@ -60,6 +61,7 @@ fn append_markdown_with_opener_and_cwd(
|
||||
/// ```text
|
||||
/// <scheme>://file<ABS_PATH>:<LINE>
|
||||
/// ```
|
||||
#[allow(dead_code)]
|
||||
fn rewrite_file_citations<'a>(
|
||||
src: &'a str,
|
||||
file_opener: UriBasedFileOpener,
|
||||
|
||||
@@ -9,24 +9,22 @@ use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Alignment;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::style::Color;
|
||||
use ratatui::style::Modifier;
|
||||
use ratatui::style::Style;
|
||||
use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Span;
|
||||
use ratatui::widgets::Block;
|
||||
use ratatui::widgets::BorderType;
|
||||
use ratatui::widgets::Borders;
|
||||
use ratatui::widgets::Padding;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
|
||||
// We render the live text using markdown so it visually matches the history
|
||||
// cells. Before rendering we strip any ANSI escape sequences to avoid writing
|
||||
// raw control bytes into the back buffer.
|
||||
use codex_ansi_escape::ansi_escape_line;
|
||||
|
||||
pub(crate) struct StatusIndicatorWidget {
|
||||
@@ -34,6 +32,14 @@ pub(crate) struct StatusIndicatorWidget {
|
||||
/// time).
|
||||
text: String,
|
||||
|
||||
/// Animation state: reveal target `text` progressively like a typewriter.
|
||||
/// We compute the currently visible prefix length based on the current
|
||||
/// frame index and a constant typing speed. The `base_frame` and
|
||||
/// `reveal_len_at_base` form the anchor from which we advance.
|
||||
last_target_len: usize,
|
||||
base_frame: usize,
|
||||
reveal_len_at_base: usize,
|
||||
|
||||
frame_idx: Arc<AtomicUsize>,
|
||||
running: Arc<AtomicBool>,
|
||||
// Keep one sender alive to prevent the channel from closing while the
|
||||
@@ -57,7 +63,7 @@ impl StatusIndicatorWidget {
|
||||
thread::spawn(move || {
|
||||
let mut counter = 0usize;
|
||||
while running_clone.load(Ordering::Relaxed) {
|
||||
std::thread::sleep(Duration::from_millis(200));
|
||||
std::thread::sleep(Duration::from_millis(100));
|
||||
counter = counter.wrapping_add(1);
|
||||
frame_idx_clone.store(counter, Ordering::Relaxed);
|
||||
app_event_tx_clone.send(AppEvent::RequestRedraw);
|
||||
@@ -66,9 +72,13 @@ impl StatusIndicatorWidget {
|
||||
}
|
||||
|
||||
Self {
|
||||
text: String::from("waiting for logs…"),
|
||||
text: String::from("waiting for model"),
|
||||
last_target_len: 0,
|
||||
base_frame: 0,
|
||||
reveal_len_at_base: 0,
|
||||
frame_idx,
|
||||
running,
|
||||
|
||||
_app_event_tx: app_event_tx,
|
||||
}
|
||||
}
|
||||
@@ -79,7 +89,67 @@ impl StatusIndicatorWidget {
|
||||
|
||||
/// Update the line that is displayed in the widget.
|
||||
pub(crate) fn update_text(&mut self, text: String) {
|
||||
self.text = text.replace(['\n', '\r'], " ");
|
||||
// If the text hasn't changed, don't reset the baseline; let the
|
||||
// animation continue advancing naturally.
|
||||
if text == self.text {
|
||||
return;
|
||||
}
|
||||
// Update the target text, preserving newlines so wrapping matches history cells.
|
||||
// Strip ANSI escapes for the character count so the typewriter animation speed is stable.
|
||||
let stripped = {
|
||||
let line = ansi_escape_line(&text);
|
||||
line.spans
|
||||
.iter()
|
||||
.map(|s| s.content.as_ref())
|
||||
.collect::<Vec<_>>()
|
||||
.join("")
|
||||
};
|
||||
let new_len = stripped.chars().count();
|
||||
|
||||
// Compute how many characters are currently revealed so we can carry
|
||||
// this forward as the new baseline when target text changes.
|
||||
let current_frame = self.frame_idx.load(std::sync::atomic::Ordering::Relaxed);
|
||||
let shown_now = self.current_shown_len(current_frame);
|
||||
|
||||
self.text = text;
|
||||
self.last_target_len = new_len;
|
||||
self.base_frame = current_frame;
|
||||
self.reveal_len_at_base = shown_now.min(new_len);
|
||||
}
|
||||
|
||||
/// Reset the animation and start revealing `text` from the beginning.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn restart_with_text(&mut self, text: String) {
|
||||
let sanitized = text.replace(['\n', '\r'], " ");
|
||||
let stripped = {
|
||||
let line = ansi_escape_line(&sanitized);
|
||||
line.spans
|
||||
.iter()
|
||||
.map(|s| s.content.as_ref())
|
||||
.collect::<Vec<_>>()
|
||||
.join("")
|
||||
};
|
||||
|
||||
let new_len = stripped.chars().count();
|
||||
let current_frame = self.frame_idx.load(std::sync::atomic::Ordering::Relaxed);
|
||||
|
||||
self.text = sanitized;
|
||||
self.last_target_len = new_len;
|
||||
self.base_frame = current_frame;
|
||||
// Start from zero revealed characters for a fresh typewriter cycle.
|
||||
self.reveal_len_at_base = 0;
|
||||
}
|
||||
|
||||
/// Calculate how many characters should currently be visible given the
|
||||
/// animation baseline and frame counter.
|
||||
fn current_shown_len(&self, current_frame: usize) -> usize {
|
||||
// Increase typewriter speed (~5x): reveal more characters per frame.
|
||||
const TYPING_CHARS_PER_FRAME: usize = 7;
|
||||
let frames = current_frame.saturating_sub(self.base_frame);
|
||||
let advanced = self
|
||||
.reveal_len_at_base
|
||||
.saturating_add(frames.saturating_mul(TYPING_CHARS_PER_FRAME));
|
||||
advanced.min(self.last_target_len)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,100 +162,140 @@ impl Drop for StatusIndicatorWidget {
|
||||
|
||||
impl WidgetRef for StatusIndicatorWidget {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
let widget_style = Style::default();
|
||||
let block = Block::default()
|
||||
.padding(Padding::new(1, 0, 0, 0))
|
||||
.borders(Borders::LEFT)
|
||||
.border_type(BorderType::QuadrantOutside)
|
||||
.border_style(widget_style.dim());
|
||||
// Animated 3‑dot pattern inside brackets. The *active* dot is bold
|
||||
// white, the others are dim.
|
||||
const DOT_COUNT: usize = 3;
|
||||
// Ensure minimal height
|
||||
if area.height == 0 || area.width == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// Build animated gradient header for the word "Working".
|
||||
let idx = self.frame_idx.load(std::sync::atomic::Ordering::Relaxed);
|
||||
let phase = idx % (DOT_COUNT * 2 - 2);
|
||||
let active = if phase < DOT_COUNT {
|
||||
phase
|
||||
} else {
|
||||
(DOT_COUNT * 2 - 2) - phase
|
||||
};
|
||||
let header_text = "Working";
|
||||
let header_chars: Vec<char> = header_text.chars().collect();
|
||||
let padding = 4usize; // virtual padding around the word for smoother loop
|
||||
let period = header_chars.len() + padding * 2;
|
||||
let pos = idx % period;
|
||||
let has_true_color = supports_color::on_cached(supports_color::Stream::Stdout)
|
||||
.map(|level| level.has_16m)
|
||||
.unwrap_or(false);
|
||||
let band_half_width = 2.0; // width of the bright band in characters
|
||||
|
||||
let mut header_spans: Vec<Span<'static>> = Vec::new();
|
||||
for (i, ch) in header_chars.iter().enumerate() {
|
||||
let i_pos = i as isize + padding as isize;
|
||||
let pos = pos as isize;
|
||||
let dist = (i_pos - pos).abs() as f32;
|
||||
|
||||
header_spans.push(Span::styled(
|
||||
"Working ",
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
));
|
||||
let t = if dist <= band_half_width {
|
||||
let x = std::f32::consts::PI * (dist / band_half_width);
|
||||
0.5 * (1.0 + x.cos())
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
header_spans.push(Span::styled(
|
||||
"[",
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
));
|
||||
|
||||
for i in 0..DOT_COUNT {
|
||||
let style = if i == active {
|
||||
let brightness = 0.4 + 0.6 * t;
|
||||
let level = (brightness * 255.0).clamp(0.0, 255.0) as u8;
|
||||
let style = if has_true_color {
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.fg(Color::Rgb(level, level, level))
|
||||
.add_modifier(Modifier::BOLD)
|
||||
} else {
|
||||
Style::default().dim()
|
||||
// Bold makes dark gray and gray look the same, so don't use it when true color is not supported.
|
||||
Style::default().fg(color_for_level(level))
|
||||
};
|
||||
header_spans.push(Span::styled(".", style));
|
||||
|
||||
header_spans.push(Span::styled(ch.to_string(), style));
|
||||
}
|
||||
|
||||
header_spans.push(Span::styled(
|
||||
"] ",
|
||||
// Plain rendering: no borders or padding so the live cell is visually indistinguishable from terminal scrollback.
|
||||
let inner_width = area.width as usize;
|
||||
|
||||
// Compose a single status line like: "▌ Working [•] waiting for model"
|
||||
let mut spans: Vec<Span<'static>> = Vec::new();
|
||||
spans.push(Span::styled("▌ ", Style::default().fg(Color::Cyan)));
|
||||
// Gradient header
|
||||
spans.extend(header_spans);
|
||||
// Space after header
|
||||
spans.push(Span::styled(
|
||||
" ",
|
||||
Style::default()
|
||||
.fg(Color::White)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
));
|
||||
|
||||
// Ensure we do not overflow width.
|
||||
let inner_width = block.inner(area).width as usize;
|
||||
|
||||
// Sanitize and colour‑strip the potentially colourful log text. This
|
||||
// ensures that **no** raw ANSI escape sequences leak into the
|
||||
// back‑buffer which would otherwise cause cursor jumps or stray
|
||||
// artefacts when the terminal is resized.
|
||||
let line = ansi_escape_line(&self.text);
|
||||
let mut sanitized_tail: String = line
|
||||
.spans
|
||||
.iter()
|
||||
.map(|s| s.content.as_ref())
|
||||
.collect::<Vec<_>>()
|
||||
.join("");
|
||||
|
||||
// Truncate *after* stripping escape codes so width calculation is
|
||||
// accurate. See UTF‑8 boundary comments above.
|
||||
let header_len: usize = header_spans.iter().map(|s| s.content.len()).sum();
|
||||
|
||||
if header_len + sanitized_tail.len() > inner_width {
|
||||
let available_bytes = inner_width.saturating_sub(header_len);
|
||||
|
||||
if sanitized_tail.is_char_boundary(available_bytes) {
|
||||
sanitized_tail.truncate(available_bytes);
|
||||
// Truncate spans to fit the width.
|
||||
let mut acc: Vec<Span<'static>> = Vec::new();
|
||||
let mut used = 0usize;
|
||||
for s in spans {
|
||||
let w = s.content.width();
|
||||
if used + w <= inner_width {
|
||||
acc.push(s);
|
||||
used += w;
|
||||
} else {
|
||||
let mut idx = available_bytes;
|
||||
while idx < sanitized_tail.len() && !sanitized_tail.is_char_boundary(idx) {
|
||||
idx += 1;
|
||||
}
|
||||
sanitized_tail.truncate(idx);
|
||||
break;
|
||||
}
|
||||
}
|
||||
let lines = vec![Line::from(acc)];
|
||||
|
||||
let mut spans = header_spans;
|
||||
// No-op once full text is revealed; the app no longer reacts to a completion event.
|
||||
|
||||
// Re‑apply the DIM modifier so the tail appears visually subdued
|
||||
// irrespective of the colour information preserved by
|
||||
// `ansi_escape_line`.
|
||||
spans.push(Span::styled(sanitized_tail, Style::default().dim()));
|
||||
|
||||
let paragraph = Paragraph::new(Line::from(spans))
|
||||
.block(block)
|
||||
.alignment(Alignment::Left);
|
||||
let paragraph = Paragraph::new(lines);
|
||||
paragraph.render_ref(area, buf);
|
||||
}
|
||||
}
|
||||
|
||||
fn color_for_level(level: u8) -> Color {
|
||||
if level < 128 {
|
||||
Color::DarkGray
|
||||
} else if level < 192 {
|
||||
Color::Gray
|
||||
} else {
|
||||
Color::White
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
#[test]
|
||||
fn renders_without_left_border_or_padding() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut w = StatusIndicatorWidget::new(tx);
|
||||
w.restart_with_text("Hello".to_string());
|
||||
|
||||
let area = ratatui::layout::Rect::new(0, 0, 30, 1);
|
||||
// Allow a short delay so the typewriter reveals the first character.
|
||||
std::thread::sleep(std::time::Duration::from_millis(120));
|
||||
let mut buf = ratatui::buffer::Buffer::empty(area);
|
||||
w.render_ref(area, &mut buf);
|
||||
|
||||
// Leftmost column has the left bar
|
||||
let ch0 = buf[(0, 0)].symbol().chars().next().unwrap_or(' ');
|
||||
assert_eq!(ch0, '▌', "expected left bar at col 0: {ch0:?}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn working_header_is_present_on_last_line() {
|
||||
let (tx_raw, _rx) = channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut w = StatusIndicatorWidget::new(tx);
|
||||
w.restart_with_text("Hi".to_string());
|
||||
// Ensure some frames elapse so we get a stable state.
|
||||
std::thread::sleep(std::time::Duration::from_millis(120));
|
||||
|
||||
let area = ratatui::layout::Rect::new(0, 0, 30, 1);
|
||||
let mut buf = ratatui::buffer::Buffer::empty(area);
|
||||
w.render_ref(area, &mut buf);
|
||||
|
||||
// Single line; it should contain the animated "Working" header.
|
||||
let mut row = String::new();
|
||||
for x in 0..area.width {
|
||||
row.push(buf[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(row.contains("Working"), "expected Working header: {row:?}");
|
||||
}
|
||||
}
|
||||
|
||||
214
codex-rs/tui/tests/vt100_history.rs
Normal file
214
codex-rs/tui/tests/vt100_history.rs
Normal file
@@ -0,0 +1,214 @@
|
||||
#![cfg(feature = "vt100-tests")]
|
||||
#![expect(clippy::expect_used)]
|
||||
|
||||
use ratatui::backend::TestBackend;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::style::Color;
|
||||
use ratatui::style::Style;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Span;
|
||||
|
||||
// Small helper macro to assert a collection contains an item with a clearer
|
||||
// failure message.
|
||||
macro_rules! assert_contains {
|
||||
($collection:expr, $item:expr $(,)?) => {
|
||||
assert!(
|
||||
$collection.contains(&$item),
|
||||
"Expected {:?} to contain {:?}",
|
||||
$collection,
|
||||
$item
|
||||
);
|
||||
};
|
||||
($collection:expr, $item:expr, $($arg:tt)+) => {
|
||||
assert!($collection.contains(&$item), $($arg)+);
|
||||
};
|
||||
}
|
||||
|
||||
struct TestScenario {
|
||||
width: u16,
|
||||
height: u16,
|
||||
term: codex_tui::custom_terminal::Terminal<TestBackend>,
|
||||
}
|
||||
|
||||
impl TestScenario {
|
||||
fn new(width: u16, height: u16, viewport: Rect) -> Self {
|
||||
let backend = TestBackend::new(width, height);
|
||||
let mut term = codex_tui::custom_terminal::Terminal::with_options(backend)
|
||||
.expect("failed to construct terminal");
|
||||
term.set_viewport_area(viewport);
|
||||
Self {
|
||||
width,
|
||||
height,
|
||||
term,
|
||||
}
|
||||
}
|
||||
|
||||
fn run_insert(&mut self, lines: Vec<Line<'static>>) -> Vec<u8> {
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
codex_tui::insert_history::insert_history_lines_to_writer(&mut self.term, &mut buf, lines);
|
||||
buf
|
||||
}
|
||||
|
||||
fn screen_rows_from_bytes(&self, bytes: &[u8]) -> Vec<String> {
|
||||
let mut parser = vt100::Parser::new(self.height, self.width, 0);
|
||||
parser.process(bytes);
|
||||
let screen = parser.screen();
|
||||
|
||||
let mut rows: Vec<String> = Vec::with_capacity(self.height as usize);
|
||||
for row in 0..self.height {
|
||||
let mut s = String::with_capacity(self.width as usize);
|
||||
for col in 0..self.width {
|
||||
if let Some(cell) = screen.cell(row, col) {
|
||||
if let Some(ch) = cell.contents().chars().next() {
|
||||
s.push(ch);
|
||||
} else {
|
||||
s.push(' ');
|
||||
}
|
||||
} else {
|
||||
s.push(' ');
|
||||
}
|
||||
}
|
||||
rows.push(s.trim_end().to_string());
|
||||
}
|
||||
rows
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hist_001_basic_insertion_no_wrap() {
|
||||
// Screen of 20x6; viewport is the last row (height=1 at y=5)
|
||||
let area = Rect::new(0, 5, 20, 1);
|
||||
let mut scenario = TestScenario::new(20, 6, area);
|
||||
|
||||
let lines = vec![Line::from("first"), Line::from("second")];
|
||||
let buf = scenario.run_insert(lines);
|
||||
let rows = scenario.screen_rows_from_bytes(&buf);
|
||||
assert_contains!(rows, String::from("first"));
|
||||
assert_contains!(rows, String::from("second"));
|
||||
let first_idx = rows
|
||||
.iter()
|
||||
.position(|r| r == "first")
|
||||
.expect("expected 'first' row to be present");
|
||||
let second_idx = rows
|
||||
.iter()
|
||||
.position(|r| r == "second")
|
||||
.expect("expected 'second' row to be present");
|
||||
assert_eq!(second_idx, first_idx + 1, "rows should be adjacent");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hist_002_long_token_wraps() {
|
||||
let area = Rect::new(0, 5, 20, 1);
|
||||
let mut scenario = TestScenario::new(20, 6, area);
|
||||
|
||||
let long = "A".repeat(45); // > 2 lines at width 20
|
||||
let lines = vec![Line::from(long.clone())];
|
||||
let buf = scenario.run_insert(lines);
|
||||
let mut parser = vt100::Parser::new(6, 20, 0);
|
||||
parser.process(&buf);
|
||||
let screen = parser.screen();
|
||||
|
||||
// Count total A's on the screen
|
||||
let mut count_a = 0usize;
|
||||
for row in 0..6 {
|
||||
for col in 0..20 {
|
||||
if let Some(cell) = screen.cell(row, col) {
|
||||
if let Some(ch) = cell.contents().chars().next() {
|
||||
if ch == 'A' {
|
||||
count_a += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
count_a,
|
||||
long.len(),
|
||||
"wrapped content did not preserve all characters"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hist_003_emoji_and_cjk() {
|
||||
let area = Rect::new(0, 5, 20, 1);
|
||||
let mut scenario = TestScenario::new(20, 6, area);
|
||||
|
||||
let text = String::from("😀😀😀😀😀 你好世界");
|
||||
let lines = vec![Line::from(text.clone())];
|
||||
let buf = scenario.run_insert(lines);
|
||||
let rows = scenario.screen_rows_from_bytes(&buf);
|
||||
let reconstructed: String = rows.join("").chars().filter(|c| *c != ' ').collect();
|
||||
for ch in text.chars().filter(|c| !c.is_whitespace()) {
|
||||
assert!(
|
||||
reconstructed.contains(ch),
|
||||
"missing character {ch:?} in reconstructed screen"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hist_004_mixed_ansi_spans() {
|
||||
let area = Rect::new(0, 5, 20, 1);
|
||||
let mut scenario = TestScenario::new(20, 6, area);
|
||||
|
||||
let line = Line::from(vec![
|
||||
Span::styled("red", Style::default().fg(Color::Red)),
|
||||
Span::raw("+plain"),
|
||||
]);
|
||||
let buf = scenario.run_insert(vec![line]);
|
||||
let rows = scenario.screen_rows_from_bytes(&buf);
|
||||
assert_contains!(rows, String::from("red+plain"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hist_006_cursor_restoration() {
|
||||
let area = Rect::new(0, 5, 20, 1);
|
||||
let mut scenario = TestScenario::new(20, 6, area);
|
||||
|
||||
let lines = vec![Line::from("x")];
|
||||
let buf = scenario.run_insert(lines);
|
||||
let s = String::from_utf8_lossy(&buf);
|
||||
// CUP to 1;1 (ANSI: ESC[1;1H)
|
||||
assert!(
|
||||
s.contains("\u{1b}[1;1H"),
|
||||
"expected final CUP to 1;1 in output, got: {s:?}"
|
||||
);
|
||||
// Reset scroll region
|
||||
assert!(
|
||||
s.contains("\u{1b}[r"),
|
||||
"expected reset scroll region in output, got: {s:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hist_005_pre_scroll_region_down() {
|
||||
// Viewport not at bottom: y=3 (0-based), height=1
|
||||
let area = Rect::new(0, 3, 20, 1);
|
||||
let mut scenario = TestScenario::new(20, 6, area);
|
||||
|
||||
let lines = vec![Line::from("first"), Line::from("second")];
|
||||
let buf = scenario.run_insert(lines);
|
||||
let s = String::from_utf8_lossy(&buf);
|
||||
// Expect we limited scroll region to [top+1 .. screen_height] => [4 .. 6] (1-based)
|
||||
assert!(
|
||||
s.contains("\u{1b}[4;6r"),
|
||||
"expected pre-scroll SetScrollRegion 4..6, got: {s:?}"
|
||||
);
|
||||
// Expect we moved cursor to top of that region: row 3 (0-based) => CUP 4;1H
|
||||
assert!(
|
||||
s.contains("\u{1b}[4;1H"),
|
||||
"expected cursor at top of pre-scroll region, got: {s:?}"
|
||||
);
|
||||
// Expect at least two Reverse Index commands (ESC M) for two inserted lines
|
||||
let ri_count = s.matches("\u{1b}M").count();
|
||||
assert!(
|
||||
ri_count >= 1,
|
||||
"expected at least one RI (ESC M), got: {s:?}"
|
||||
);
|
||||
// After pre-scroll, we set insertion scroll region to [1 .. new_top] => [1 .. 5]
|
||||
assert!(
|
||||
s.contains("\u{1b}[1;5r"),
|
||||
"expected insertion SetScrollRegion 1..5, got: {s:?}"
|
||||
);
|
||||
}
|
||||
101
codex-rs/tui/tests/vt100_live_commit.rs
Normal file
101
codex-rs/tui/tests/vt100_live_commit.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
#![cfg(feature = "vt100-tests")]
|
||||
|
||||
use ratatui::backend::TestBackend;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::text::Line;
|
||||
|
||||
#[test]
|
||||
fn live_001_commit_on_overflow() {
|
||||
let backend = TestBackend::new(20, 6);
|
||||
let mut term = match codex_tui::custom_terminal::Terminal::with_options(backend) {
|
||||
Ok(t) => t,
|
||||
Err(e) => panic!("failed to construct terminal: {e}"),
|
||||
};
|
||||
let area = Rect::new(0, 5, 20, 1);
|
||||
term.set_viewport_area(area);
|
||||
|
||||
// Build 5 explicit rows at width 20.
|
||||
let mut rb = codex_tui::live_wrap::RowBuilder::new(20);
|
||||
rb.push_fragment("one\n");
|
||||
rb.push_fragment("two\n");
|
||||
rb.push_fragment("three\n");
|
||||
rb.push_fragment("four\n");
|
||||
rb.push_fragment("five\n");
|
||||
|
||||
// Keep the last 3 in the live ring; commit the first 2.
|
||||
let commit_rows = rb.drain_commit_ready(3);
|
||||
let lines: Vec<Line<'static>> = commit_rows
|
||||
.into_iter()
|
||||
.map(|r| Line::from(r.text))
|
||||
.collect();
|
||||
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, lines);
|
||||
|
||||
let mut parser = vt100::Parser::new(6, 20, 0);
|
||||
parser.process(&buf);
|
||||
let screen = parser.screen();
|
||||
|
||||
// The words "one" and "two" should appear above the viewport.
|
||||
let mut joined = String::new();
|
||||
for row in 0..6 {
|
||||
for col in 0..20 {
|
||||
if let Some(cell) = screen.cell(row, col) {
|
||||
if let Some(ch) = cell.contents().chars().next() {
|
||||
joined.push(ch);
|
||||
} else {
|
||||
joined.push(' ');
|
||||
}
|
||||
}
|
||||
}
|
||||
joined.push('\n');
|
||||
}
|
||||
assert!(
|
||||
joined.contains("one"),
|
||||
"expected committed 'one' to be visible\n{joined}"
|
||||
);
|
||||
assert!(
|
||||
joined.contains("two"),
|
||||
"expected committed 'two' to be visible\n{joined}"
|
||||
);
|
||||
// The last three (three,four,five) remain in the live ring, not committed here.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn live_002_pre_scroll_and_commit() {
|
||||
let backend = TestBackend::new(20, 6);
|
||||
let mut term = match codex_tui::custom_terminal::Terminal::with_options(backend) {
|
||||
Ok(t) => t,
|
||||
Err(e) => panic!("failed to construct terminal: {e}"),
|
||||
};
|
||||
// Viewport not at bottom: y=3
|
||||
let area = Rect::new(0, 3, 20, 1);
|
||||
term.set_viewport_area(area);
|
||||
|
||||
let mut rb = codex_tui::live_wrap::RowBuilder::new(20);
|
||||
rb.push_fragment("alpha\n");
|
||||
rb.push_fragment("beta\n");
|
||||
rb.push_fragment("gamma\n");
|
||||
rb.push_fragment("delta\n");
|
||||
|
||||
// Keep 3, commit 1.
|
||||
let commit_rows = rb.drain_commit_ready(3);
|
||||
let lines: Vec<Line<'static>> = commit_rows
|
||||
.into_iter()
|
||||
.map(|r| Line::from(r.text))
|
||||
.collect();
|
||||
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, lines);
|
||||
let s = String::from_utf8_lossy(&buf);
|
||||
|
||||
// Expect a SetScrollRegion to [area.top()+1 .. screen_height] and a cursor move to top of that region.
|
||||
assert!(
|
||||
s.contains("\u{1b}[4;6r"),
|
||||
"expected pre-scroll region 4..6, got: {s:?}"
|
||||
);
|
||||
assert!(
|
||||
s.contains("\u{1b}[4;1H"),
|
||||
"expected cursor CUP 4;1H, got: {s:?}"
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user