mirror of
https://github.com/openai/codex.git
synced 2026-02-03 15:33:41 +00:00
Compare commits
28 Commits
delete_his
...
codex/impl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d465d71955 | ||
|
|
643ab1f582 | ||
|
|
d3dbc10479 | ||
|
|
0bc7ee9193 | ||
|
|
2bd3314886 | ||
|
|
5b820c5ce7 | ||
|
|
3d1cfe31a2 | ||
|
|
f14b5adabf | ||
|
|
d6e934f7cd | ||
|
|
0b83f2965c | ||
|
|
d4dc3b11bc | ||
|
|
9c0b413fd1 | ||
|
|
3777e18243 | ||
|
|
bcbe02ff1d | ||
|
|
0f8ac92390 | ||
|
|
51257e2fd0 | ||
|
|
0ece374c58 | ||
|
|
c46bb67d77 | ||
|
|
94f5cad895 | ||
|
|
72504f1d9c | ||
|
|
fa6d507c51 | ||
|
|
a52a2fe7a9 | ||
|
|
f532554924 | ||
|
|
f9609cc9bf | ||
|
|
781798b4ed | ||
|
|
5bafe0dc59 | ||
|
|
bfeb8c92a5 | ||
|
|
9e58076cf5 |
16
.github/actions/codex/bun.lock
vendored
16
.github/actions/codex/bun.lock
vendored
@@ -8,8 +8,8 @@
|
||||
"@actions/github": "^6.0.1",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.2.11",
|
||||
"@types/node": "^22.15.21",
|
||||
"@types/bun": "^1.2.18",
|
||||
"@types/node": "^24.0.13",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3",
|
||||
},
|
||||
@@ -48,13 +48,17 @@
|
||||
|
||||
"@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.2.13", "", { "dependencies": { "bun-types": "1.2.13" } }, "sha512-u6vXep/i9VBxoJl3GjZsl/BFIsvML8DfVDO0RYLEwtSZSp981kEO1V5NwRcO1CPJ7AmvpbnDCiMKo3JvbDEjAg=="],
|
||||
"@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="],
|
||||
|
||||
"@types/node": ["@types/node@22.15.21", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-EV/37Td6c+MgKAbkcLG6vqZ2zEYHD7bvSrzqqs2RIhbA6w3x+Dqz8MZM3sP6kGTeLrdoOgKZe+Xja7tUB2DNkQ=="],
|
||||
"@types/node": ["@types/node@24.0.13", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.2.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-rRjA1T6n7wto4gxhAO/ErZEtOXyEZEmnIHQfl0Dt1QQSB4QV0iP6BZ9/YB5fZaHFQ2dwHFrmPaRQ9GGMX01k9Q=="],
|
||||
"bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
@@ -68,7 +72,7 @@
|
||||
|
||||
"undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="],
|
||||
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
"undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="],
|
||||
|
||||
"universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
|
||||
|
||||
|
||||
4
.github/actions/codex/package.json
vendored
4
.github/actions/codex/package.json
vendored
@@ -13,8 +13,8 @@
|
||||
"@actions/github": "^6.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.2.11",
|
||||
"@types/node": "^22.15.21",
|
||||
"@types/bun": "^1.2.18",
|
||||
"@types/node": "^24.0.13",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3"
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
* current platform / architecture, an error is thrown.
|
||||
*/
|
||||
|
||||
import { spawnSync } from "child_process";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath, pathToFileURL } from "url";
|
||||
@@ -35,7 +34,7 @@ const wantsNative = fs.existsSync(path.join(__dirname, "use-native")) ||
|
||||
: false);
|
||||
|
||||
// Try native binary if requested.
|
||||
if (wantsNative) {
|
||||
if (wantsNative && process.platform !== 'win32') {
|
||||
const { platform, arch } = process;
|
||||
|
||||
let targetTriple = null;
|
||||
@@ -74,22 +73,76 @@ if (wantsNative) {
|
||||
}
|
||||
|
||||
const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
|
||||
const result = spawnSync(binaryPath, process.argv.slice(2), {
|
||||
|
||||
// Use an asynchronous spawn instead of spawnSync so that Node is able to
|
||||
// respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is
|
||||
// executing. This allows us to forward those signals to the child process
|
||||
// and guarantees that when either the child terminates or the parent
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||
stdio: "inherit",
|
||||
});
|
||||
|
||||
const exitCode = typeof result.status === "number" ? result.status : 1;
|
||||
process.exit(exitCode);
|
||||
}
|
||||
child.on("error", (err) => {
|
||||
// Typically triggered when the binary is missing or not executable.
|
||||
// Re-throwing here will terminate the parent with a non-zero exit code
|
||||
// while still printing a helpful stack trace.
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Fallback: execute the original JavaScript CLI.
|
||||
// Forward common termination signals to the child so that it shuts down
|
||||
// gracefully. In the handler we temporarily disable the default behavior of
|
||||
// exiting immediately; once the child has been signaled we simply wait for
|
||||
// its exit event which will in turn terminate the parent (see below).
|
||||
const forwardSignal = (signal) => {
|
||||
if (child.killed) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
child.kill(signal);
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
};
|
||||
|
||||
// Resolve the path to the compiled CLI bundle
|
||||
const cliPath = path.resolve(__dirname, "../dist/cli.js");
|
||||
const cliUrl = pathToFileURL(cliPath).href;
|
||||
["SIGINT", "SIGTERM", "SIGHUP"].forEach((sig) => {
|
||||
process.on(sig, () => forwardSignal(sig));
|
||||
});
|
||||
|
||||
// Load and execute the CLI
|
||||
(async () => {
|
||||
// When the child exits, mirror its termination reason in the parent so that
|
||||
// shell scripts and other tooling observe the correct exit status.
|
||||
// Wrap the lifetime of the child process in a Promise so that we can await
|
||||
// its termination in a structured way. The Promise resolves with an object
|
||||
// describing how the child exited: either via exit code or due to a signal.
|
||||
const childResult = await new Promise((resolve) => {
|
||||
child.on("exit", (code, signal) => {
|
||||
if (signal) {
|
||||
resolve({ type: "signal", signal });
|
||||
} else {
|
||||
resolve({ type: "code", exitCode: code ?? 1 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (childResult.type === "signal") {
|
||||
// Re-emit the same signal so that the parent terminates with the expected
|
||||
// semantics (this also sets the correct exit code of 128 + n).
|
||||
process.kill(process.pid, childResult.signal);
|
||||
} else {
|
||||
process.exit(childResult.exitCode);
|
||||
}
|
||||
} else {
|
||||
// Fallback: execute the original JavaScript CLI.
|
||||
|
||||
// Resolve the path to the compiled CLI bundle
|
||||
const cliPath = path.resolve(__dirname, "../dist/cli.js");
|
||||
const cliUrl = pathToFileURL(cliPath).href;
|
||||
|
||||
// Load and execute the CLI
|
||||
try {
|
||||
await import(cliUrl);
|
||||
} catch (err) {
|
||||
@@ -97,4 +150,4 @@ const cliUrl = pathToFileURL(cliPath).href;
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
9
codex-cli/scripts/README.md
Normal file
9
codex-cli/scripts/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# npm releases
|
||||
|
||||
Run the following:
|
||||
|
||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0
|
||||
```
|
||||
@@ -4,10 +4,7 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# Stages an npm release for @openai/codex.
|
||||
#
|
||||
# The script used to accept a single optional positional argument that indicated
|
||||
# the temporary directory in which to stage the package. We now support a
|
||||
# flag-based interface so that we can extend the command with further options
|
||||
# without breaking the call-site contract.
|
||||
# Usage:
|
||||
#
|
||||
# --tmp <dir> : Use <dir> instead of a freshly created temp directory.
|
||||
# --native : Bundle the pre-built Rust CLI binaries for Linux alongside
|
||||
@@ -141,7 +138,8 @@ popd >/dev/null
|
||||
echo "Staged version $VERSION for release in $TMPDIR"
|
||||
|
||||
if [[ "$INCLUDE_NATIVE" -eq 1 ]]; then
|
||||
echo "Test Rust:"
|
||||
echo "Verify the CLI:"
|
||||
echo " node ${TMPDIR}/bin/codex.js --version"
|
||||
echo " node ${TMPDIR}/bin/codex.js --help"
|
||||
else
|
||||
echo "Test Node:"
|
||||
|
||||
101
codex-rs/Cargo.lock
generated
101
codex-rs/Cargo.lock
generated
@@ -250,6 +250,28 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-stream"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
|
||||
dependencies = [
|
||||
"async-stream-impl",
|
||||
"futures-core",
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-stream-impl"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.88"
|
||||
@@ -575,12 +597,30 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-cli"
|
||||
name = "codex-chatgpt"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-login",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-cli"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"codex-chatgpt",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-exec",
|
||||
@@ -588,10 +628,14 @@ dependencies = [
|
||||
"codex-login",
|
||||
"codex-mcp-server",
|
||||
"codex-tui",
|
||||
"indoc",
|
||||
"predicates",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -637,6 +681,7 @@ dependencies = [
|
||||
"thiserror 2.0.12",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-test",
|
||||
"tokio-util",
|
||||
"toml 0.9.1",
|
||||
"tracing",
|
||||
@@ -770,6 +815,7 @@ dependencies = [
|
||||
"color-eyre",
|
||||
"crossterm",
|
||||
"image",
|
||||
"insta",
|
||||
"lazy_static",
|
||||
"mcp-types",
|
||||
"path-clean",
|
||||
@@ -854,6 +900,18 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.15.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.6.0"
|
||||
@@ -1213,6 +1271,12 @@ dependencies = [
|
||||
"log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encode_unicode"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
|
||||
|
||||
[[package]]
|
||||
name = "encoding_rs"
|
||||
version = "0.8.35"
|
||||
@@ -2093,6 +2157,17 @@ version = "2.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.43.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371"
|
||||
dependencies = [
|
||||
"console",
|
||||
"once_cell",
|
||||
"similar",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "instability"
|
||||
version = "0.3.7"
|
||||
@@ -4469,6 +4544,30 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-stream"
|
||||
version = "0.1.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-test"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
|
||||
dependencies = [
|
||||
"async-stream",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.15"
|
||||
|
||||
21
codex-rs/chatgpt/Cargo.toml
Normal file
21
codex-rs/chatgpt/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "codex-chatgpt"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-login = { path = "../login" }
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
5
codex-rs/chatgpt/README.md
Normal file
5
codex-rs/chatgpt/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# ChatGPT
|
||||
|
||||
This crate pertains to first party ChatGPT APIs and products such as Codex agent.
|
||||
|
||||
This crate should be primarily built and maintained by OpenAI employees. Please reach out to a maintainer before making an external contribution.
|
||||
89
codex-rs/chatgpt/src/apply_command.rs
Normal file
89
codex-rs/chatgpt/src/apply_command.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
use crate::get_task::GetTaskResponse;
|
||||
use crate::get_task::OutputItem;
|
||||
use crate::get_task::PrOutputItem;
|
||||
use crate::get_task::get_task;
|
||||
|
||||
/// Applies the latest diff from a Codex agent task.
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct ApplyCommand {
|
||||
pub task_id: String,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
}
|
||||
pub async fn run_apply_command(apply_cli: ApplyCommand) -> anyhow::Result<()> {
|
||||
let config = Config::load_with_cli_overrides(
|
||||
apply_cli
|
||||
.config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?,
|
||||
ConfigOverrides::default(),
|
||||
)?;
|
||||
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
|
||||
let task_response = get_task(&config, apply_cli.task_id).await?;
|
||||
apply_diff_from_task(task_response).await
|
||||
}
|
||||
|
||||
pub async fn apply_diff_from_task(task_response: GetTaskResponse) -> anyhow::Result<()> {
|
||||
let diff_turn = match task_response.current_diff_task_turn {
|
||||
Some(turn) => turn,
|
||||
None => anyhow::bail!("No diff turn found"),
|
||||
};
|
||||
let output_diff = diff_turn.output_items.iter().find_map(|item| match item {
|
||||
OutputItem::Pr(PrOutputItem { output_diff }) => Some(output_diff),
|
||||
_ => None,
|
||||
});
|
||||
match output_diff {
|
||||
Some(output_diff) => apply_diff(&output_diff.diff).await,
|
||||
None => anyhow::bail!("No PR output item found"),
|
||||
}
|
||||
}
|
||||
|
||||
async fn apply_diff(diff: &str) -> anyhow::Result<()> {
|
||||
let toplevel_output = tokio::process::Command::new("git")
|
||||
.args(vec!["rev-parse", "--show-toplevel"])
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !toplevel_output.status.success() {
|
||||
anyhow::bail!("apply must be run from a git repository.");
|
||||
}
|
||||
|
||||
let repo_root = String::from_utf8(toplevel_output.stdout)?
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let mut git_apply_cmd = tokio::process::Command::new("git")
|
||||
.args(vec!["apply", "--3way"])
|
||||
.current_dir(&repo_root)
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
if let Some(mut stdin) = git_apply_cmd.stdin.take() {
|
||||
tokio::io::AsyncWriteExt::write_all(&mut stdin, diff.as_bytes()).await?;
|
||||
drop(stdin);
|
||||
}
|
||||
|
||||
let output = git_apply_cmd.wait_with_output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"Git apply failed with status {}: {}",
|
||||
output.status,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
|
||||
println!("Successfully applied diff");
|
||||
Ok(())
|
||||
}
|
||||
45
codex-rs/chatgpt/src/chatgpt_client.rs
Normal file
45
codex-rs/chatgpt/src/chatgpt_client.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use codex_core::config::Config;
|
||||
|
||||
use crate::chatgpt_token::get_chatgpt_token_data;
|
||||
use crate::chatgpt_token::init_chatgpt_token_from_auth;
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
/// Make a GET request to the ChatGPT backend API.
|
||||
pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
config: &Config,
|
||||
path: String,
|
||||
) -> anyhow::Result<T> {
|
||||
let chatgpt_base_url = &config.chatgpt_base_url;
|
||||
init_chatgpt_token_from_auth(&config.codex_home).await?;
|
||||
|
||||
// Make direct HTTP request to ChatGPT backend API with the token
|
||||
let client = reqwest::Client::new();
|
||||
let url = format!("{chatgpt_base_url}{path}");
|
||||
|
||||
let token =
|
||||
get_chatgpt_token_data().ok_or_else(|| anyhow::anyhow!("ChatGPT token not available"))?;
|
||||
|
||||
let response = client
|
||||
.get(&url)
|
||||
.bearer_auth(&token.access_token)
|
||||
.header("chatgpt-account-id", &token.account_id)
|
||||
.header("Content-Type", "application/json")
|
||||
.header("User-Agent", "codex-cli")
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to send request")?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: T = response
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse JSON response")?;
|
||||
Ok(result)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {}: {}", status, body)
|
||||
}
|
||||
}
|
||||
24
codex-rs/chatgpt/src/chatgpt_token.rs
Normal file
24
codex-rs/chatgpt/src/chatgpt_token.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use std::path::Path;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use codex_login::TokenData;
|
||||
|
||||
static CHATGPT_TOKEN: LazyLock<RwLock<Option<TokenData>>> = LazyLock::new(|| RwLock::new(None));
|
||||
|
||||
pub fn get_chatgpt_token_data() -> Option<TokenData> {
|
||||
CHATGPT_TOKEN.read().ok()?.clone()
|
||||
}
|
||||
|
||||
pub fn set_chatgpt_token_data(value: TokenData) {
|
||||
if let Ok(mut guard) = CHATGPT_TOKEN.write() {
|
||||
*guard = Some(value);
|
||||
}
|
||||
}
|
||||
|
||||
/// Initialize the ChatGPT token from auth.json file
|
||||
pub async fn init_chatgpt_token_from_auth(codex_home: &Path) -> std::io::Result<()> {
|
||||
let auth_json = codex_login::try_read_auth_json(codex_home).await?;
|
||||
set_chatgpt_token_data(auth_json.tokens.clone());
|
||||
Ok(())
|
||||
}
|
||||
40
codex-rs/chatgpt/src/get_task.rs
Normal file
40
codex-rs/chatgpt/src/get_task.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use codex_core::config::Config;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::chatgpt_client::chatgpt_get_request;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct GetTaskResponse {
|
||||
pub current_diff_task_turn: Option<AssistantTurn>,
|
||||
}
|
||||
|
||||
// Only relevant fields for our extraction
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct AssistantTurn {
|
||||
pub output_items: Vec<OutputItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum OutputItem {
|
||||
#[serde(rename = "pr")]
|
||||
Pr(PrOutputItem),
|
||||
|
||||
#[serde(other)]
|
||||
Other,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct PrOutputItem {
|
||||
pub output_diff: OutputDiff,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct OutputDiff {
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
pub(crate) async fn get_task(config: &Config, task_id: String) -> anyhow::Result<GetTaskResponse> {
|
||||
let path = format!("/wham/tasks/{task_id}");
|
||||
chatgpt_get_request(config, path).await
|
||||
}
|
||||
4
codex-rs/chatgpt/src/lib.rs
Normal file
4
codex-rs/chatgpt/src/lib.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod apply_command;
|
||||
mod chatgpt_client;
|
||||
mod chatgpt_token;
|
||||
pub mod get_task;
|
||||
191
codex-rs/chatgpt/tests/apply_command_e2e.rs
Normal file
191
codex-rs/chatgpt/tests/apply_command_e2e.rs
Normal file
@@ -0,0 +1,191 @@
|
||||
#![expect(clippy::expect_used)]
|
||||
|
||||
use codex_chatgpt::apply_command::apply_diff_from_task;
|
||||
use codex_chatgpt::get_task::GetTaskResponse;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::process::Command;
|
||||
|
||||
/// Creates a temporary git repository with initial commit
|
||||
async fn create_temp_git_repo() -> anyhow::Result<TempDir> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let repo_path = temp_dir.path();
|
||||
|
||||
let output = Command::new("git")
|
||||
.args(["init"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"Failed to initialize git repo: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
|
||||
Command::new("git")
|
||||
.args(["config", "user.email", "test@example.com"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
Command::new("git")
|
||||
.args(["config", "user.name", "Test User"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
std::fs::write(repo_path.join("README.md"), "# Test Repo\n")?;
|
||||
|
||||
Command::new("git")
|
||||
.args(["add", "README.md"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
let output = Command::new("git")
|
||||
.args(["commit", "-m", "Initial commit"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"Failed to create initial commit: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
|
||||
Ok(temp_dir)
|
||||
}
|
||||
|
||||
async fn mock_get_task_with_fixture() -> anyhow::Result<GetTaskResponse> {
|
||||
let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/task_turn_fixture.json");
|
||||
let fixture_content = std::fs::read_to_string(fixture_path)?;
|
||||
let response: GetTaskResponse = serde_json::from_str(&fixture_content)?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_apply_command_creates_fibonacci_file() {
|
||||
let temp_repo = create_temp_git_repo()
|
||||
.await
|
||||
.expect("Failed to create temp git repo");
|
||||
let repo_path = temp_repo.path();
|
||||
|
||||
let task_response = mock_get_task_with_fixture()
|
||||
.await
|
||||
.expect("Failed to load fixture");
|
||||
|
||||
let original_dir = std::env::current_dir().expect("Failed to get current dir");
|
||||
std::env::set_current_dir(repo_path).expect("Failed to change directory");
|
||||
struct DirGuard(std::path::PathBuf);
|
||||
impl Drop for DirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = std::env::set_current_dir(&self.0);
|
||||
}
|
||||
}
|
||||
let _guard = DirGuard(original_dir);
|
||||
|
||||
apply_diff_from_task(task_response)
|
||||
.await
|
||||
.expect("Failed to apply diff from task");
|
||||
|
||||
// Assert that fibonacci.js was created in scripts/ directory
|
||||
let fibonacci_path = repo_path.join("scripts/fibonacci.js");
|
||||
assert!(fibonacci_path.exists(), "fibonacci.js was not created");
|
||||
|
||||
// Verify the file contents match expected
|
||||
let contents = std::fs::read_to_string(&fibonacci_path).expect("Failed to read fibonacci.js");
|
||||
assert!(
|
||||
contents.contains("function fibonacci(n)"),
|
||||
"fibonacci.js doesn't contain expected function"
|
||||
);
|
||||
assert!(
|
||||
contents.contains("#!/usr/bin/env node"),
|
||||
"fibonacci.js doesn't have shebang"
|
||||
);
|
||||
assert!(
|
||||
contents.contains("module.exports = fibonacci;"),
|
||||
"fibonacci.js doesn't export function"
|
||||
);
|
||||
|
||||
// Verify file has correct number of lines (31 as specified in fixture)
|
||||
let line_count = contents.lines().count();
|
||||
assert_eq!(
|
||||
line_count, 31,
|
||||
"fibonacci.js should have 31 lines, got {line_count}",
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_apply_command_with_merge_conflicts() {
|
||||
let temp_repo = create_temp_git_repo()
|
||||
.await
|
||||
.expect("Failed to create temp git repo");
|
||||
let repo_path = temp_repo.path();
|
||||
|
||||
// Create conflicting fibonacci.js file first
|
||||
let scripts_dir = repo_path.join("scripts");
|
||||
std::fs::create_dir_all(&scripts_dir).expect("Failed to create scripts directory");
|
||||
|
||||
let conflicting_content = r#"#!/usr/bin/env node
|
||||
|
||||
// This is a different fibonacci implementation
|
||||
function fib(num) {
|
||||
if (num <= 1) return num;
|
||||
return fib(num - 1) + fib(num - 2);
|
||||
}
|
||||
|
||||
console.log("Running fibonacci...");
|
||||
console.log(fib(10));
|
||||
"#;
|
||||
|
||||
let fibonacci_path = scripts_dir.join("fibonacci.js");
|
||||
std::fs::write(&fibonacci_path, conflicting_content).expect("Failed to write conflicting file");
|
||||
|
||||
Command::new("git")
|
||||
.args(["add", "scripts/fibonacci.js"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to add fibonacci.js");
|
||||
|
||||
Command::new("git")
|
||||
.args(["commit", "-m", "Add conflicting fibonacci implementation"])
|
||||
.current_dir(repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to commit conflicting file");
|
||||
|
||||
let original_dir = std::env::current_dir().expect("Failed to get current dir");
|
||||
std::env::set_current_dir(repo_path).expect("Failed to change directory");
|
||||
struct DirGuard(std::path::PathBuf);
|
||||
impl Drop for DirGuard {
|
||||
fn drop(&mut self) {
|
||||
let _ = std::env::set_current_dir(&self.0);
|
||||
}
|
||||
}
|
||||
let _guard = DirGuard(original_dir);
|
||||
|
||||
let task_response = mock_get_task_with_fixture()
|
||||
.await
|
||||
.expect("Failed to load fixture");
|
||||
|
||||
let apply_result = apply_diff_from_task(task_response).await;
|
||||
|
||||
assert!(
|
||||
apply_result.is_err(),
|
||||
"Expected apply to fail due to merge conflicts"
|
||||
);
|
||||
|
||||
let contents = std::fs::read_to_string(&fibonacci_path).expect("Failed to read fibonacci.js");
|
||||
|
||||
assert!(
|
||||
contents.contains("<<<<<<< HEAD")
|
||||
|| contents.contains("=======")
|
||||
|| contents.contains(">>>>>>> "),
|
||||
"fibonacci.js should contain merge conflict markers, got: {contents}",
|
||||
);
|
||||
}
|
||||
65
codex-rs/chatgpt/tests/task_turn_fixture.json
Normal file
65
codex-rs/chatgpt/tests/task_turn_fixture.json
Normal file
@@ -0,0 +1,65 @@
|
||||
{
|
||||
"current_diff_task_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "pr",
|
||||
"pr_title": "Add fibonacci script",
|
||||
"pr_message": "## Summary\n- add a basic Fibonacci script under `scripts/`\n\n## Testing\n- `node scripts/fibonacci.js 10`\n- `npm run lint` *(fails: next not found)*",
|
||||
"output_diff": {
|
||||
"type": "output_diff",
|
||||
"repo_id": "/workspace/rddit-vercel",
|
||||
"base_commit_sha": "1a2e9baf2ce2fdd0c126b47b1bcfd512de2a9f7b",
|
||||
"diff": "diff --git a/scripts/fibonacci.js b/scripts/fibonacci.js\nnew file mode 100644\nindex 0000000000000000000000000000000000000000..6c9fdfdbf8669b7968936411050525b995d0a9a6\n--- /dev/null\n+++ b/scripts/fibonacci.js\n@@ -0,0 +1,31 @@\n+#!/usr/bin/env node\n+\n+function fibonacci(n) {\n+ if (n < 0) {\n+ throw new Error(\"n must be non-negative\");\n+ }\n+ let a = 0;\n+ let b = 1;\n+ for (let i = 0; i < n; i++) {\n+ const next = a + b;\n+ a = b;\n+ b = next;\n+ }\n+ return a;\n+}\n+\n+function printUsage() {\n+ console.log(\"Usage: node scripts/fibonacci.js <n>\");\n+}\n+\n+if (require.main === module) {\n+ const arg = process.argv[2];\n+ if (arg === undefined || isNaN(Number(arg))) {\n+ printUsage();\n+ process.exit(1);\n+ }\n+ const n = Number(arg);\n+ console.log(fibonacci(n));\n+}\n+\n+module.exports = fibonacci;\n",
|
||||
"external_storage_diff": {
|
||||
"file_id": "file_00000000114c61f786900f8c2130ace7",
|
||||
"ttl": null
|
||||
},
|
||||
"files_modified": 1,
|
||||
"lines_added": 31,
|
||||
"lines_removed": 0,
|
||||
"commit_message": "Add fibonacci script"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"content": [
|
||||
{
|
||||
"content_type": "text",
|
||||
"text": "**Summary**\n\n- Created a command-line Fibonacci script that validates input and prints the result when executed with Node"
|
||||
},
|
||||
{
|
||||
"content_type": "repo_file_citation",
|
||||
"path": "scripts/fibonacci.js",
|
||||
"line_range_start": 1,
|
||||
"line_range_end": 31
|
||||
},
|
||||
{
|
||||
"content_type": "text",
|
||||
"text": "\n\n**Testing**\n\n- ❌ `npm run lint` (failed to run `next lint`)"
|
||||
},
|
||||
{
|
||||
"content_type": "terminal_chunk_citation",
|
||||
"terminal_chunk_id": "7dd543",
|
||||
"line_range_start": 1,
|
||||
"line_range_end": 5
|
||||
},
|
||||
{
|
||||
"content_type": "text",
|
||||
"text": "\n- ✅ `node scripts/fibonacci.js 10` produced “55”"
|
||||
},
|
||||
{
|
||||
"content_type": "terminal_chunk_citation",
|
||||
"terminal_chunk_id": "6ee559",
|
||||
"line_range_start": 1,
|
||||
"line_range_end": 3
|
||||
},
|
||||
{
|
||||
"content_type": "text",
|
||||
"text": "\n\nCodex couldn't run certain commands due to environment limitations. Consider configuring a setup script or internet access in your Codex environment to install dependencies."
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,7 @@ workspace = true
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
clap_complete = "4"
|
||||
codex-chatgpt = { path = "../chatgpt" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-exec = { path = "../exec" }
|
||||
@@ -35,3 +36,11 @@ tokio = { version = "1", features = [
|
||||
] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = "0.3.19"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
predicates = "3"
|
||||
tempfile = "3"
|
||||
wiremock = "0.6"
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
indoc = "2"
|
||||
|
||||
@@ -2,6 +2,8 @@ use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
use clap_complete::generate;
|
||||
use codex_chatgpt::apply_command::ApplyCommand;
|
||||
use codex_chatgpt::apply_command::run_apply_command;
|
||||
use codex_cli::LandlockCommand;
|
||||
use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::login::run_login_with_chatgpt;
|
||||
@@ -55,6 +57,10 @@ enum Subcommand {
|
||||
|
||||
/// Internal debugging commands.
|
||||
Debug(DebugArgs),
|
||||
|
||||
/// Apply the latest diff produced by Codex agent as a `git apply` to your local working tree.
|
||||
#[clap(visible_alias = "a")]
|
||||
Apply(ApplyCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -137,6 +143,10 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
.await?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(&mut apply_cli.config_overrides, cli.config_overrides);
|
||||
run_apply_command(apply_cli).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
223
codex-rs/cli/tests/integration.rs
Normal file
223
codex-rs/cli/tests/integration.rs
Normal file
@@ -0,0 +1,223 @@
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
//! End-to-end integration tests for the `codex` CLI.
|
||||
//!
|
||||
//! These spin up a local [`wiremock`][] server to stand in for the MCP server
|
||||
//! and then run the real compiled `codex` binary against it. The goal is to
|
||||
//! verify the high-level request/response flow rather than the details of the
|
||||
//! individual async functions.
|
||||
//!
|
||||
//! [`wiremock`]: https://docs.rs/wiremock
|
||||
|
||||
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use predicates::prelude::*;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
// ----- tests -----
|
||||
|
||||
/// Sends a single simple prompt and verifies that the streamed response is
|
||||
/// surfaced to the user. This exercises the most common "ask a question, get a
|
||||
/// textual answer" flow.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn full_conversation_turn_integration() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!("Skipping test because network is disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_message("Hello, world."), "text/event-stream"),
|
||||
)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
// Disable retries — the mock server will fail hard if we make an unexpected
|
||||
// request, so retries only slow the test down.
|
||||
unsafe {
|
||||
std::env::set_var("OPENAI_REQUEST_MAX_RETRIES", "0");
|
||||
std::env::set_var("OPENAI_STREAM_MAX_RETRIES", "0");
|
||||
}
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let sandbox = TempDir::new().unwrap();
|
||||
write_config(codex_home.path(), &server);
|
||||
|
||||
// Capture the agent's final message in a file so we can assert on it precisely.
|
||||
let last_message_file = sandbox.path().join("last_message.txt");
|
||||
|
||||
let mut cmd = assert_cmd::Command::cargo_bin("codex").unwrap();
|
||||
cmd.env("CODEX_HOME", codex_home.path())
|
||||
.current_dir(sandbox.path())
|
||||
.arg("exec")
|
||||
.arg("--skip-git-repo-check")
|
||||
.arg("--output-last-message")
|
||||
.arg(&last_message_file)
|
||||
.arg("Hello");
|
||||
|
||||
cmd.assert()
|
||||
.success()
|
||||
.stdout(predicate::str::contains("Hello, world."));
|
||||
|
||||
// Assert on the captured last message file (more robust than stdout formatting).
|
||||
let last = fs::read_to_string(&last_message_file).unwrap();
|
||||
let expected = "Hello, world.";
|
||||
assert_eq!(last.trim(), expected);
|
||||
}
|
||||
|
||||
/// Simulates a tool invocation (`shell`) followed by a second assistant message
|
||||
/// once the tool call completes.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn tool_invocation_flow() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!("Skipping test because network is disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
let server = MockServer::start().await;
|
||||
|
||||
// The first request returns a function-call item; the second returns the
|
||||
// final assistant message. Use an atomic counter to serve them in order.
|
||||
struct SeqResponder {
|
||||
count: std::sync::atomic::AtomicUsize,
|
||||
}
|
||||
impl wiremock::Respond for SeqResponder {
|
||||
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
|
||||
use std::sync::atomic::Ordering;
|
||||
match self.count.fetch_add(1, Ordering::SeqCst) {
|
||||
0 => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_function_call(), "text/event-stream"),
|
||||
_ => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_final_after_call(), "text/event-stream"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(SeqResponder {
|
||||
count: std::sync::atomic::AtomicUsize::new(0),
|
||||
})
|
||||
.expect(2)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
unsafe {
|
||||
std::env::set_var("OPENAI_REQUEST_MAX_RETRIES", "0");
|
||||
std::env::set_var("OPENAI_STREAM_MAX_RETRIES", "0");
|
||||
}
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let sandbox = TempDir::new().unwrap();
|
||||
write_config(codex_home.path(), &server);
|
||||
|
||||
// Capture final assistant message after tool invocation.
|
||||
let last_message_file = sandbox.path().join("last_message.txt");
|
||||
|
||||
let mut cmd = assert_cmd::Command::cargo_bin("codex").unwrap();
|
||||
cmd.env("CODEX_HOME", codex_home.path())
|
||||
.current_dir(sandbox.path())
|
||||
.arg("exec")
|
||||
.arg("--skip-git-repo-check")
|
||||
.arg("--output-last-message")
|
||||
.arg(&last_message_file)
|
||||
.arg("Run shell");
|
||||
|
||||
cmd.assert()
|
||||
.success()
|
||||
.stdout(predicate::str::contains("exec echo hi"))
|
||||
.stdout(predicate::str::contains("hi"));
|
||||
|
||||
// Assert that the final assistant message (second response) was 'done'.
|
||||
let last = fs::read_to_string(&last_message_file).unwrap();
|
||||
let expected = "done";
|
||||
assert_eq!(last.trim(), expected);
|
||||
}
|
||||
|
||||
/// Write a minimal `config.toml` pointing the CLI at the mock server.
|
||||
fn write_config(codex_home: &Path, server: &MockServer) {
|
||||
fs::write(
|
||||
codex_home.join("config.toml"),
|
||||
format!(
|
||||
r#"
|
||||
model_provider = "mock"
|
||||
model = "test-model"
|
||||
|
||||
[model_providers.mock]
|
||||
name = "mock"
|
||||
base_url = "{}/v1"
|
||||
env_key = "PATH"
|
||||
wire_api = "responses"
|
||||
"#,
|
||||
server.uri()
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
/// Small helper to generate an SSE stream with a single assistant message.
|
||||
fn sse_message(text: &str) -> String {
|
||||
const TEMPLATE: &str = r#"event: response.output_item.done
|
||||
data: {"type":"response.output_item.done","item":{"type":"message","role":"assistant","content":[{"type":"output_text","text":"TEXT_PLACEHOLDER"}]}}
|
||||
|
||||
event: response.completed
|
||||
data: {"type":"response.completed","response":{"id":"resp1","output":[]}}
|
||||
|
||||
|
||||
"#;
|
||||
|
||||
TEMPLATE.replace("TEXT_PLACEHOLDER", text)
|
||||
}
|
||||
|
||||
/// Helper to craft an SSE stream that returns a `function_call`.
|
||||
fn sse_function_call() -> String {
|
||||
let call = serde_json::json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "function_call",
|
||||
"name": "shell",
|
||||
"arguments": "{\"command\":[\"echo\",\"hi\"]}",
|
||||
"call_id": "call1"
|
||||
}
|
||||
});
|
||||
let completed = serde_json::json!({
|
||||
"type": "response.completed",
|
||||
"response": {"id": "resp1", "output": []}
|
||||
});
|
||||
|
||||
format!(
|
||||
"event: response.output_item.done\ndata: {call}\n\n\
|
||||
event: response.completed\ndata: {completed}\n\n\n"
|
||||
)
|
||||
}
|
||||
|
||||
/// SSE stream for the assistant's final message after the tool call returns.
|
||||
fn sse_final_after_call() -> String {
|
||||
let msg = serde_json::json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {"type": "message", "role": "assistant", "content": [{"type": "output_text", "text": "done"}]}
|
||||
});
|
||||
let completed = serde_json::json!({
|
||||
"type": "response.completed",
|
||||
"response": {"id": "resp2", "output": []}
|
||||
});
|
||||
|
||||
format!(
|
||||
"event: response.output_item.done\ndata: {msg}\n\n\
|
||||
event: response.completed\ndata: {completed}\n\n\n"
|
||||
)
|
||||
}
|
||||
@@ -64,4 +64,5 @@ maplit = "1.0.2"
|
||||
predicates = "3"
|
||||
pretty_assertions = "1.4.1"
|
||||
tempfile = "3"
|
||||
tokio-test = "0.4"
|
||||
wiremock = "0.6"
|
||||
|
||||
@@ -134,7 +134,7 @@ pub(crate) async fn stream_chat_completions(
|
||||
|
||||
match res {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(16);
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(1600);
|
||||
let stream = resp.bytes_stream().map_err(CodexErr::Reqwest);
|
||||
tokio::spawn(process_chat_sse(stream, tx_event));
|
||||
return Ok(ResponseStream { rx_event });
|
||||
@@ -426,6 +426,12 @@ where
|
||||
// will never appear in a Chat Completions stream.
|
||||
continue;
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(_))))
|
||||
| Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryDelta(_)))) => {
|
||||
// Deltas are ignored here since aggregation waits for the
|
||||
// final OutputItemDone.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,7 +42,6 @@ use std::sync::Arc;
|
||||
#[derive(Clone)]
|
||||
pub struct ModelClient {
|
||||
config: Arc<Config>,
|
||||
model: String,
|
||||
client: reqwest::Client,
|
||||
provider: ModelProviderInfo,
|
||||
effort: ReasoningEffortConfig,
|
||||
@@ -56,10 +55,8 @@ impl ModelClient {
|
||||
effort: ReasoningEffortConfig,
|
||||
summary: ReasoningSummaryConfig,
|
||||
) -> Self {
|
||||
let model = config.model.clone();
|
||||
Self {
|
||||
config,
|
||||
model: model.to_string(),
|
||||
client: reqwest::Client::new(),
|
||||
provider,
|
||||
effort,
|
||||
@@ -75,9 +72,13 @@ impl ModelClient {
|
||||
WireApi::Responses => self.stream_responses(prompt).await,
|
||||
WireApi::Chat => {
|
||||
// Create the raw streaming connection first.
|
||||
let response_stream =
|
||||
stream_chat_completions(prompt, &self.model, &self.client, &self.provider)
|
||||
.await?;
|
||||
let response_stream = stream_chat_completions(
|
||||
prompt,
|
||||
&self.config.model,
|
||||
&self.client,
|
||||
&self.provider,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Wrap it with the aggregation adapter so callers see *only*
|
||||
// the final assistant message per turn (matching the
|
||||
@@ -111,11 +112,11 @@ impl ModelClient {
|
||||
return stream_from_fixture(path).await;
|
||||
}
|
||||
|
||||
let full_instructions = prompt.get_full_instructions(&self.model);
|
||||
let tools_json = create_tools_json_for_responses_api(prompt, &self.model)?;
|
||||
let full_instructions = prompt.get_full_instructions(&self.config.model);
|
||||
let tools_json = create_tools_json_for_responses_api(prompt, &self.config.model)?;
|
||||
let reasoning = create_reasoning_param_for_request(&self.config, self.effort, self.summary);
|
||||
let payload = ResponsesApiRequest {
|
||||
model: &self.model,
|
||||
model: &self.config.model,
|
||||
instructions: &full_instructions,
|
||||
input: &prompt.input,
|
||||
tools: &tools_json,
|
||||
@@ -124,6 +125,7 @@ impl ModelClient {
|
||||
reasoning,
|
||||
previous_response_id: prompt.prev_id.clone(),
|
||||
store: prompt.store,
|
||||
// TODO: make this configurable
|
||||
stream: true,
|
||||
};
|
||||
|
||||
@@ -147,7 +149,7 @@ impl ModelClient {
|
||||
let res = req_builder.send().await;
|
||||
match res {
|
||||
Ok(resp) if resp.status().is_success() => {
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(16);
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(1600);
|
||||
|
||||
// spawn task to process SSE
|
||||
let stream = resp.bytes_stream().map_err(CodexErr::Reqwest);
|
||||
@@ -204,6 +206,7 @@ struct SseEvent {
|
||||
kind: String,
|
||||
response: Option<Value>,
|
||||
item: Option<Value>,
|
||||
delta: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -336,6 +339,22 @@ where
|
||||
return;
|
||||
}
|
||||
}
|
||||
"response.output_text.delta" => {
|
||||
if let Some(delta) = event.delta {
|
||||
let event = ResponseEvent::OutputTextDelta(delta);
|
||||
if tx_event.send(Ok(event)).await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
"response.reasoning_summary_text.delta" => {
|
||||
if let Some(delta) = event.delta {
|
||||
let event = ResponseEvent::ReasoningSummaryDelta(delta);
|
||||
if tx_event.send(Ok(event)).await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
"response.created" => {
|
||||
if event.response.is_some() {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::Created {})).await;
|
||||
@@ -359,10 +378,8 @@ where
|
||||
| "response.function_call_arguments.delta"
|
||||
| "response.in_progress"
|
||||
| "response.output_item.added"
|
||||
| "response.output_text.delta"
|
||||
| "response.output_text.done"
|
||||
| "response.reasoning_summary_part.added"
|
||||
| "response.reasoning_summary_text.delta"
|
||||
| "response.reasoning_summary_text.done" => {
|
||||
// Currently, we ignore these events, but we handle them
|
||||
// separately to skip the logging message in the `other` case.
|
||||
@@ -374,7 +391,7 @@ where
|
||||
|
||||
/// used in tests to stream from a text SSE file
|
||||
async fn stream_from_fixture(path: impl AsRef<Path>) -> Result<ResponseStream> {
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(16);
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(1600);
|
||||
let f = std::fs::File::open(path.as_ref())?;
|
||||
let lines = std::io::BufReader::new(f).lines();
|
||||
|
||||
@@ -390,3 +407,241 @@ async fn stream_from_fixture(path: impl AsRef<Path>) -> Result<ResponseStream> {
|
||||
tokio::spawn(process_sse(stream, tx_event));
|
||||
Ok(ResponseStream { rx_event })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio_test::io::Builder as IoBuilder;
|
||||
use tokio_util::io::ReaderStream;
|
||||
|
||||
// ────────────────────────────
|
||||
// Helpers
|
||||
// ────────────────────────────
|
||||
|
||||
/// Runs the SSE parser on pre-chunked byte slices and returns every event
|
||||
/// (including any final `Err` from a stream-closure check).
|
||||
async fn collect_events(chunks: &[&[u8]]) -> Vec<Result<ResponseEvent>> {
|
||||
let mut builder = IoBuilder::new();
|
||||
for chunk in chunks {
|
||||
builder.read(chunk);
|
||||
}
|
||||
|
||||
let reader = builder.build();
|
||||
let stream = ReaderStream::new(reader).map_err(CodexErr::Io);
|
||||
let (tx, mut rx) = mpsc::channel::<Result<ResponseEvent>>(16);
|
||||
tokio::spawn(process_sse(stream, tx));
|
||||
|
||||
let mut events = Vec::new();
|
||||
while let Some(ev) = rx.recv().await {
|
||||
events.push(ev);
|
||||
}
|
||||
events
|
||||
}
|
||||
|
||||
/// Builds an in-memory SSE stream from JSON fixtures and returns only the
|
||||
/// successfully parsed events (panics on internal channel errors).
|
||||
async fn run_sse(events: Vec<serde_json::Value>) -> Vec<ResponseEvent> {
|
||||
let mut body = String::new();
|
||||
for e in events {
|
||||
let kind = e
|
||||
.get("type")
|
||||
.and_then(|v| v.as_str())
|
||||
.expect("fixture event missing type");
|
||||
if e.as_object().map(|o| o.len() == 1).unwrap_or(false) {
|
||||
body.push_str(&format!("event: {kind}\n\n"));
|
||||
} else {
|
||||
body.push_str(&format!("event: {kind}\ndata: {e}\n\n"));
|
||||
}
|
||||
}
|
||||
|
||||
let (tx, mut rx) = mpsc::channel::<Result<ResponseEvent>>(8);
|
||||
let stream = ReaderStream::new(std::io::Cursor::new(body)).map_err(CodexErr::Io);
|
||||
tokio::spawn(process_sse(stream, tx));
|
||||
|
||||
let mut out = Vec::new();
|
||||
while let Some(ev) = rx.recv().await {
|
||||
out.push(ev.expect("channel closed"));
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
// ────────────────────────────
|
||||
// Tests from `implement-test-for-responses-api-sse-parser`
|
||||
// ────────────────────────────
|
||||
|
||||
#[tokio::test]
|
||||
async fn parses_items_and_completed() {
|
||||
let item1 = json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"content": [{"type": "output_text", "text": "Hello"}]
|
||||
}
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let item2 = json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"content": [{"type": "output_text", "text": "World"}]
|
||||
}
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let completed = json!({
|
||||
"type": "response.completed",
|
||||
"response": { "id": "resp1" }
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let sse1 = format!("event: response.output_item.done\ndata: {item1}\n\n");
|
||||
let sse2 = format!("event: response.output_item.done\ndata: {item2}\n\n");
|
||||
let sse3 = format!("event: response.completed\ndata: {completed}\n\n");
|
||||
|
||||
let events = collect_events(&[sse1.as_bytes(), sse2.as_bytes(), sse3.as_bytes()]).await;
|
||||
|
||||
assert_eq!(events.len(), 3);
|
||||
|
||||
matches!(
|
||||
&events[0],
|
||||
Ok(ResponseEvent::OutputItemDone(ResponseItem::Message { role, .. }))
|
||||
if role == "assistant"
|
||||
);
|
||||
|
||||
matches!(
|
||||
&events[1],
|
||||
Ok(ResponseEvent::OutputItemDone(ResponseItem::Message { role, .. }))
|
||||
if role == "assistant"
|
||||
);
|
||||
|
||||
match &events[2] {
|
||||
Ok(ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage,
|
||||
}) => {
|
||||
assert_eq!(response_id, "resp1");
|
||||
assert!(token_usage.is_none());
|
||||
}
|
||||
other => panic!("unexpected third event: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn error_when_missing_completed() {
|
||||
let item1 = json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"content": [{"type": "output_text", "text": "Hello"}]
|
||||
}
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let sse1 = format!("event: response.output_item.done\ndata: {item1}\n\n");
|
||||
|
||||
let events = collect_events(&[sse1.as_bytes()]).await;
|
||||
|
||||
assert_eq!(events.len(), 2);
|
||||
|
||||
matches!(events[0], Ok(ResponseEvent::OutputItemDone(_)));
|
||||
|
||||
match &events[1] {
|
||||
Err(CodexErr::Stream(msg)) => {
|
||||
assert_eq!(msg, "stream closed before response.completed")
|
||||
}
|
||||
other => panic!("unexpected second event: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
// ────────────────────────────
|
||||
// Table-driven test from `main`
|
||||
// ────────────────────────────
|
||||
|
||||
/// Verifies that the adapter produces the right `ResponseEvent` for a
|
||||
/// variety of incoming `type` values.
|
||||
#[tokio::test]
|
||||
async fn table_driven_event_kinds() {
|
||||
struct TestCase {
|
||||
name: &'static str,
|
||||
event: serde_json::Value,
|
||||
expect_first: fn(&ResponseEvent) -> bool,
|
||||
expected_len: usize,
|
||||
}
|
||||
|
||||
fn is_created(ev: &ResponseEvent) -> bool {
|
||||
matches!(ev, ResponseEvent::Created)
|
||||
}
|
||||
fn is_output(ev: &ResponseEvent) -> bool {
|
||||
matches!(ev, ResponseEvent::OutputItemDone(_))
|
||||
}
|
||||
fn is_completed(ev: &ResponseEvent) -> bool {
|
||||
matches!(ev, ResponseEvent::Completed { .. })
|
||||
}
|
||||
|
||||
let completed = json!({
|
||||
"type": "response.completed",
|
||||
"response": {
|
||||
"id": "c",
|
||||
"usage": {
|
||||
"input_tokens": 0,
|
||||
"input_tokens_details": null,
|
||||
"output_tokens": 0,
|
||||
"output_tokens_details": null,
|
||||
"total_tokens": 0
|
||||
},
|
||||
"output": []
|
||||
}
|
||||
});
|
||||
|
||||
let cases = vec![
|
||||
TestCase {
|
||||
name: "created",
|
||||
event: json!({"type": "response.created", "response": {}}),
|
||||
expect_first: is_created,
|
||||
expected_len: 2,
|
||||
},
|
||||
TestCase {
|
||||
name: "output_item.done",
|
||||
event: json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"content": [
|
||||
{"type": "output_text", "text": "hi"}
|
||||
]
|
||||
}
|
||||
}),
|
||||
expect_first: is_output,
|
||||
expected_len: 2,
|
||||
},
|
||||
TestCase {
|
||||
name: "unknown",
|
||||
event: json!({"type": "response.new_tool_event"}),
|
||||
expect_first: is_completed,
|
||||
expected_len: 1,
|
||||
},
|
||||
];
|
||||
|
||||
for case in cases {
|
||||
let mut evs = vec![case.event];
|
||||
evs.push(completed.clone());
|
||||
|
||||
let out = run_sse(evs).await;
|
||||
assert_eq!(out.len(), case.expected_len, "case {}", case.name);
|
||||
assert!(
|
||||
(case.expect_first)(&out[0]),
|
||||
"first event mismatch in case {}",
|
||||
case.name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,6 +57,8 @@ pub enum ResponseEvent {
|
||||
response_id: String,
|
||||
token_usage: Option<TokenUsage>,
|
||||
},
|
||||
OutputTextDelta(String),
|
||||
ReasoningSummaryDelta(String),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
|
||||
@@ -61,7 +61,9 @@ use crate::models::ResponseInputItem;
|
||||
use crate::models::ResponseItem;
|
||||
use crate::models::ShellToolCallParams;
|
||||
use crate::project_doc::get_user_instructions;
|
||||
use crate::protocol::AgentMessageDeltaEvent;
|
||||
use crate::protocol::AgentMessageEvent;
|
||||
use crate::protocol::AgentReasoningDeltaEvent;
|
||||
use crate::protocol::AgentReasoningEvent;
|
||||
use crate::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use crate::protocol::AskForApproval;
|
||||
@@ -103,7 +105,7 @@ impl Codex {
|
||||
/// submitted to start the session.
|
||||
pub async fn spawn(config: Config, ctrl_c: Arc<Notify>) -> CodexResult<(Codex, String)> {
|
||||
let (tx_sub, rx_sub) = async_channel::bounded(64);
|
||||
let (tx_event, rx_event) = async_channel::bounded(64);
|
||||
let (tx_event, rx_event) = async_channel::bounded(1600);
|
||||
|
||||
let instructions = get_user_instructions(&config).await;
|
||||
let configure_session = Op::ConfigureSession {
|
||||
@@ -1121,15 +1123,8 @@ async fn try_run_turn(
|
||||
|
||||
let mut stream = sess.client.clone().stream(&prompt).await?;
|
||||
|
||||
// Buffer all the incoming messages from the stream first, then execute them.
|
||||
// If we execute a function call in the middle of handling the stream, it can time out.
|
||||
let mut input = Vec::new();
|
||||
while let Some(event) = stream.next().await {
|
||||
input.push(event?);
|
||||
}
|
||||
|
||||
let mut output = Vec::new();
|
||||
for event in input {
|
||||
while let Some(Ok(event)) = stream.next().await {
|
||||
match event {
|
||||
ResponseEvent::Created => {
|
||||
let mut state = sess.state.lock().unwrap();
|
||||
@@ -1172,6 +1167,20 @@ async fn try_run_turn(
|
||||
state.previous_response_id = Some(response_id);
|
||||
break;
|
||||
}
|
||||
ResponseEvent::OutputTextDelta(delta) => {
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }),
|
||||
};
|
||||
sess.tx_event.send(event).await.ok();
|
||||
}
|
||||
ResponseEvent::ReasoningSummaryDelta(delta) => {
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }),
|
||||
};
|
||||
sess.tx_event.send(event).await.ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
|
||||
@@ -134,6 +134,9 @@ pub struct Config {
|
||||
/// When set to `true`, overrides the default heuristic and forces
|
||||
/// `model_supports_reasoning_summaries()` to return `true`.
|
||||
pub model_supports_reasoning_summaries: bool,
|
||||
|
||||
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
|
||||
pub chatgpt_base_url: String,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -315,6 +318,9 @@ pub struct ConfigToml {
|
||||
|
||||
/// Override to force-enable reasoning summaries for the configured model.
|
||||
pub model_supports_reasoning_summaries: Option<bool>,
|
||||
|
||||
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
impl ConfigToml {
|
||||
@@ -483,6 +489,11 @@ impl Config {
|
||||
model_supports_reasoning_summaries: cfg
|
||||
.model_supports_reasoning_summaries
|
||||
.unwrap_or(false),
|
||||
|
||||
chatgpt_base_url: config_profile
|
||||
.chatgpt_base_url
|
||||
.or(cfg.chatgpt_base_url)
|
||||
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
|
||||
};
|
||||
Ok(config)
|
||||
}
|
||||
@@ -788,6 +799,7 @@ disable_response_storage = true
|
||||
model_reasoning_effort: ReasoningEffort::High,
|
||||
model_reasoning_summary: ReasoningSummary::Detailed,
|
||||
model_supports_reasoning_summaries: false,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
},
|
||||
o3_profile_config
|
||||
);
|
||||
@@ -833,6 +845,7 @@ disable_response_storage = true
|
||||
model_reasoning_effort: ReasoningEffort::default(),
|
||||
model_reasoning_summary: ReasoningSummary::default(),
|
||||
model_supports_reasoning_summaries: false,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
};
|
||||
|
||||
assert_eq!(expected_gpt3_profile_config, gpt3_profile_config);
|
||||
@@ -893,6 +906,7 @@ disable_response_storage = true
|
||||
model_reasoning_effort: ReasoningEffort::default(),
|
||||
model_reasoning_summary: ReasoningSummary::default(),
|
||||
model_supports_reasoning_summaries: false,
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
};
|
||||
|
||||
assert_eq!(expected_zdr_profile_config, zdr_profile_config);
|
||||
|
||||
@@ -16,4 +16,5 @@ pub struct ConfigProfile {
|
||||
pub disable_response_storage: Option<bool>,
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
}
|
||||
|
||||
@@ -79,9 +79,19 @@ impl McpConnectionManager {
|
||||
|
||||
// Launch all configured servers concurrently.
|
||||
let mut join_set = JoinSet::new();
|
||||
let mut errors = ClientStartErrors::new();
|
||||
|
||||
for (server_name, cfg) in mcp_servers {
|
||||
// TODO: Verify server name: require `^[a-zA-Z0-9_-]+$`?
|
||||
// Validate server name before spawning
|
||||
if !is_valid_mcp_server_name(&server_name) {
|
||||
let error = anyhow::anyhow!(
|
||||
"invalid server name '{}': must match pattern ^[a-zA-Z0-9_-]+$",
|
||||
server_name
|
||||
);
|
||||
errors.insert(server_name, error);
|
||||
continue;
|
||||
}
|
||||
|
||||
join_set.spawn(async move {
|
||||
let McpServerConfig { command, args, env } = cfg;
|
||||
let client_res = McpClient::new_stdio_client(command, args, env).await;
|
||||
@@ -117,7 +127,6 @@ impl McpConnectionManager {
|
||||
|
||||
let mut clients: HashMap<String, std::sync::Arc<McpClient>> =
|
||||
HashMap::with_capacity(join_set.len());
|
||||
let mut errors = ClientStartErrors::new();
|
||||
|
||||
while let Some(res) = join_set.join_next().await {
|
||||
let (server_name, client_res) = res?; // JoinError propagation
|
||||
@@ -208,3 +217,10 @@ pub async fn list_all_tools(
|
||||
|
||||
Ok(aggregated)
|
||||
}
|
||||
|
||||
fn is_valid_mcp_server_name(server_name: &str) -> bool {
|
||||
!server_name.is_empty()
|
||||
&& server_name
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '-')
|
||||
}
|
||||
|
||||
@@ -282,9 +282,15 @@ pub enum EventMsg {
|
||||
/// Agent text output message
|
||||
AgentMessage(AgentMessageEvent),
|
||||
|
||||
/// Agent text output delta message
|
||||
AgentMessageDelta(AgentMessageDeltaEvent),
|
||||
|
||||
/// Reasoning event from agent.
|
||||
AgentReasoning(AgentReasoningEvent),
|
||||
|
||||
/// Agent reasoning delta event from agent.
|
||||
AgentReasoningDelta(AgentReasoningDeltaEvent),
|
||||
|
||||
/// Ack the client's configure message.
|
||||
SessionConfigured(SessionConfiguredEvent),
|
||||
|
||||
@@ -340,11 +346,21 @@ pub struct AgentMessageEvent {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AgentMessageDeltaEvent {
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AgentReasoningEvent {
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct AgentReasoningDeltaEvent {
|
||||
pub delta: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct McpToolCallBeginEvent {
|
||||
/// Identifier so this can be paired with the McpToolCallEnd event.
|
||||
|
||||
8
codex-rs/core/tests/cli_responses_fixture.sse
Normal file
8
codex-rs/core/tests/cli_responses_fixture.sse
Normal file
@@ -0,0 +1,8 @@
|
||||
event: response.created
|
||||
data: {"type":"response.created","response":{"id":"resp1"}}
|
||||
|
||||
event: response.output_item.done
|
||||
data: {"type":"response.output_item.done","item":{"type":"message","role":"assistant","content":[{"type":"output_text","text":"fixture hello"}]}}
|
||||
|
||||
event: response.completed
|
||||
data: {"type":"response.completed","response":{"id":"resp1","output":[]}}
|
||||
119
codex-rs/core/tests/cli_stream.rs
Normal file
119
codex-rs/core/tests/cli_stream.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
#![expect(clippy::unwrap_used)]
|
||||
|
||||
use assert_cmd::Command as AssertCommand;
|
||||
use codex_core::exec::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
/// Tests streaming chat completions through the CLI using a mock server.
|
||||
/// This test:
|
||||
/// 1. Sets up a mock server that simulates OpenAI's chat completions API
|
||||
/// 2. Configures codex to use this mock server via a custom provider
|
||||
/// 3. Sends a simple "hello?" prompt and verifies the streamed response
|
||||
/// 4. Ensures the response is received exactly once and contains "hi"
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn chat_mode_stream_cli() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let server = MockServer::start().await;
|
||||
let sse = concat!(
|
||||
"data: {\"choices\":[{\"delta\":{\"content\":\"hi\"}}]}\n\n",
|
||||
"data: {\"choices\":[{\"delta\":{}}]}\n\n",
|
||||
"data: [DONE]\n\n"
|
||||
);
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse, "text/event-stream"),
|
||||
)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let provider_override = format!(
|
||||
"model_providers.mock={{ name = \"mock\", base_url = \"{}/v1\", env_key = \"PATH\", wire_api = \"chat\" }}",
|
||||
server.uri()
|
||||
);
|
||||
let mut cmd = AssertCommand::new("cargo");
|
||||
cmd.arg("run")
|
||||
.arg("-p")
|
||||
.arg("codex-cli")
|
||||
.arg("--quiet")
|
||||
.arg("--")
|
||||
.arg("exec")
|
||||
.arg("--skip-git-repo-check")
|
||||
.arg("-c")
|
||||
.arg(&provider_override)
|
||||
.arg("-c")
|
||||
.arg("model_provider=\"mock\"")
|
||||
.arg("-C")
|
||||
.arg(env!("CARGO_MANIFEST_DIR"))
|
||||
.arg("hello?");
|
||||
cmd.env("CODEX_HOME", home.path())
|
||||
.env("OPENAI_API_KEY", "dummy")
|
||||
.env("OPENAI_BASE_URL", format!("{}/v1", server.uri()));
|
||||
|
||||
let output = cmd.output().unwrap();
|
||||
println!("Status: {}", output.status);
|
||||
println!("Stdout:\n{}", String::from_utf8_lossy(&output.stdout));
|
||||
println!("Stderr:\n{}", String::from_utf8_lossy(&output.stderr));
|
||||
assert!(output.status.success());
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let hi_lines = stdout.lines().filter(|line| line.trim() == "hi").count();
|
||||
assert_eq!(hi_lines, 1, "Expected exactly one line with 'hi'");
|
||||
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
/// Tests streaming responses through the CLI using a local SSE fixture file.
|
||||
/// This test:
|
||||
/// 1. Uses a pre-recorded SSE response fixture instead of a live server
|
||||
/// 2. Configures codex to read from this fixture via CODEX_RS_SSE_FIXTURE env var
|
||||
/// 3. Sends a "hello?" prompt and verifies the response
|
||||
/// 4. Ensures the fixture content is correctly streamed through the CLI
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn responses_api_stream_cli() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let fixture =
|
||||
std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/cli_responses_fixture.sse");
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut cmd = AssertCommand::new("cargo");
|
||||
cmd.arg("run")
|
||||
.arg("-p")
|
||||
.arg("codex-cli")
|
||||
.arg("--quiet")
|
||||
.arg("--")
|
||||
.arg("exec")
|
||||
.arg("--skip-git-repo-check")
|
||||
.arg("-C")
|
||||
.arg(env!("CARGO_MANIFEST_DIR"))
|
||||
.arg("hello?");
|
||||
cmd.env("CODEX_HOME", home.path())
|
||||
.env("OPENAI_API_KEY", "dummy")
|
||||
.env("CODEX_RS_SSE_FIXTURE", fixture)
|
||||
.env("OPENAI_BASE_URL", "http://unused.local");
|
||||
|
||||
let output = cmd.output().unwrap();
|
||||
assert!(output.status.success());
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
assert!(stdout.contains("fixture hello"));
|
||||
}
|
||||
16
codex-rs/core/tests/fixtures/completed_template.json
vendored
Normal file
16
codex-rs/core/tests/fixtures/completed_template.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
[
|
||||
{
|
||||
"type": "response.completed",
|
||||
"response": {
|
||||
"id": "__ID__",
|
||||
"usage": {
|
||||
"input_tokens": 0,
|
||||
"input_tokens_details": null,
|
||||
"output_tokens": 0,
|
||||
"output_tokens_details": null,
|
||||
"total_tokens": 0
|
||||
},
|
||||
"output": []
|
||||
}
|
||||
}
|
||||
]
|
||||
3
codex-rs/core/tests/fixtures/incomplete_sse.json
vendored
Normal file
3
codex-rs/core/tests/fixtures/incomplete_sse.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[
|
||||
{"type": "response.output_item.done"}
|
||||
]
|
||||
@@ -11,6 +11,7 @@ mod test_support;
|
||||
use serde_json::Value;
|
||||
use tempfile::TempDir;
|
||||
use test_support::load_default_config_for_test;
|
||||
use test_support::load_sse_fixture_with_id;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::Match;
|
||||
use wiremock::Mock;
|
||||
@@ -42,12 +43,9 @@ impl Match for HasPrevId {
|
||||
}
|
||||
}
|
||||
|
||||
/// Build minimal SSE stream with completed marker.
|
||||
/// Build minimal SSE stream with completed marker using the JSON fixture.
|
||||
fn sse_completed(id: &str) -> String {
|
||||
format!(
|
||||
"event: response.completed\n\
|
||||
data: {{\"type\":\"response.completed\",\"response\":{{\"id\":\"{id}\",\"output\":[]}}}}\n\n\n"
|
||||
)
|
||||
load_sse_fixture_with_id("tests/fixtures/completed_template.json", id)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
|
||||
@@ -12,6 +12,8 @@ use codex_core::protocol::Op;
|
||||
mod test_support;
|
||||
use tempfile::TempDir;
|
||||
use test_support::load_default_config_for_test;
|
||||
use test_support::load_sse_fixture;
|
||||
use test_support::load_sse_fixture_with_id;
|
||||
use tokio::time::timeout;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
@@ -22,18 +24,16 @@ use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
fn sse_incomplete() -> String {
|
||||
// Only a single line; missing the completed event.
|
||||
"event: response.output_item.done\n\n".to_string()
|
||||
load_sse_fixture("tests/fixtures/incomplete_sse.json")
|
||||
}
|
||||
|
||||
fn sse_completed(id: &str) -> String {
|
||||
format!(
|
||||
"event: response.completed\n\
|
||||
data: {{\"type\":\"response.completed\",\"response\":{{\"id\":\"{id}\",\"output\":[]}}}}\n\n\n"
|
||||
)
|
||||
load_sse_fixture_with_id("tests/fixtures/completed_template.json", id)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
// this test is flaky (has race conditions), so we ignore it for now
|
||||
#[ignore]
|
||||
async fn retries_on_early_close() {
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
|
||||
@@ -21,3 +21,58 @@ pub fn load_default_config_for_test(codex_home: &TempDir) -> Config {
|
||||
)
|
||||
.expect("defaults for test should always succeed")
|
||||
}
|
||||
|
||||
/// Builds an SSE stream body from a JSON fixture.
|
||||
///
|
||||
/// The fixture must contain an array of objects where each object represents a
|
||||
/// single SSE event with at least a `type` field matching the `event:` value.
|
||||
/// Additional fields become the JSON payload for the `data:` line. An object
|
||||
/// with only a `type` field results in an event with no `data:` section. This
|
||||
/// makes it trivial to extend the fixtures as OpenAI adds new event kinds or
|
||||
/// fields.
|
||||
#[allow(dead_code)]
|
||||
pub fn load_sse_fixture(path: impl AsRef<std::path::Path>) -> String {
|
||||
let events: Vec<serde_json::Value> =
|
||||
serde_json::from_reader(std::fs::File::open(path).expect("read fixture"))
|
||||
.expect("parse JSON fixture");
|
||||
events
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
let kind = e
|
||||
.get("type")
|
||||
.and_then(|v| v.as_str())
|
||||
.expect("fixture event missing type");
|
||||
if e.as_object().map(|o| o.len() == 1).unwrap_or(false) {
|
||||
format!("event: {kind}\n\n")
|
||||
} else {
|
||||
format!("event: {kind}\ndata: {e}\n\n")
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Same as [`load_sse_fixture`], but replaces the placeholder `__ID__` in the
|
||||
/// fixture template with the supplied identifier before parsing. This lets a
|
||||
/// single JSON template be reused by multiple tests that each need a unique
|
||||
/// `response_id`.
|
||||
#[allow(dead_code)]
|
||||
pub fn load_sse_fixture_with_id(path: impl AsRef<std::path::Path>, id: &str) -> String {
|
||||
let raw = std::fs::read_to_string(path).expect("read fixture template");
|
||||
let replaced = raw.replace("__ID__", id);
|
||||
let events: Vec<serde_json::Value> =
|
||||
serde_json::from_str(&replaced).expect("parse JSON fixture");
|
||||
events
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
let kind = e
|
||||
.get("type")
|
||||
.and_then(|v| v.as_str())
|
||||
.expect("fixture event missing type");
|
||||
if e.as_object().map(|o| o.len() == 1).unwrap_or(false) {
|
||||
format!("event: {kind}\n\n")
|
||||
} else {
|
||||
format!("event: {kind}\ndata: {e}\n\n")
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -3,7 +3,9 @@ use codex_common::summarize_sandbox_policy;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::model_supports_reasoning_summaries;
|
||||
use codex_core::protocol::AgentMessageDeltaEvent;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::AgentReasoningDeltaEvent;
|
||||
use codex_core::protocol::BackgroundEventEvent;
|
||||
use codex_core::protocol::ErrorEvent;
|
||||
use codex_core::protocol::Event;
|
||||
@@ -21,6 +23,7 @@ use owo_colors::OwoColorize;
|
||||
use owo_colors::Style;
|
||||
use shlex::try_join;
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use std::time::Instant;
|
||||
|
||||
/// This should be configurable. When used in CI, users may not want to impose
|
||||
@@ -50,10 +53,12 @@ pub(crate) struct EventProcessor {
|
||||
|
||||
/// Whether to include `AgentReasoning` events in the output.
|
||||
show_agent_reasoning: bool,
|
||||
answer_started: bool,
|
||||
reasoning_started: bool,
|
||||
}
|
||||
|
||||
impl EventProcessor {
|
||||
pub(crate) fn create_with_ansi(with_ansi: bool, show_agent_reasoning: bool) -> Self {
|
||||
pub(crate) fn create_with_ansi(with_ansi: bool, config: &Config) -> Self {
|
||||
let call_id_to_command = HashMap::new();
|
||||
let call_id_to_patch = HashMap::new();
|
||||
let call_id_to_tool_call = HashMap::new();
|
||||
@@ -70,7 +75,9 @@ impl EventProcessor {
|
||||
green: Style::new().green(),
|
||||
cyan: Style::new().cyan(),
|
||||
call_id_to_tool_call,
|
||||
show_agent_reasoning,
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
answer_started: false,
|
||||
reasoning_started: false,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
@@ -84,7 +91,9 @@ impl EventProcessor {
|
||||
green: Style::new(),
|
||||
cyan: Style::new(),
|
||||
call_id_to_tool_call,
|
||||
show_agent_reasoning,
|
||||
show_agent_reasoning: !config.hide_agent_reasoning,
|
||||
answer_started: false,
|
||||
reasoning_started: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -184,12 +193,45 @@ impl EventProcessor {
|
||||
EventMsg::TokenCount(TokenUsage { total_tokens, .. }) => {
|
||||
ts_println!(self, "tokens used: {total_tokens}");
|
||||
}
|
||||
EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }) => {
|
||||
if !self.answer_started {
|
||||
ts_println!(self, "{}\n", "codex".style(self.italic).style(self.magenta));
|
||||
self.answer_started = true;
|
||||
}
|
||||
print!("{delta}");
|
||||
#[allow(clippy::expect_used)]
|
||||
std::io::stdout().flush().expect("could not flush stdout");
|
||||
}
|
||||
EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }) => {
|
||||
if !self.show_agent_reasoning {
|
||||
return;
|
||||
}
|
||||
if !self.reasoning_started {
|
||||
ts_println!(
|
||||
self,
|
||||
"{}\n",
|
||||
"thinking".style(self.italic).style(self.magenta),
|
||||
);
|
||||
self.reasoning_started = true;
|
||||
}
|
||||
print!("{delta}");
|
||||
#[allow(clippy::expect_used)]
|
||||
std::io::stdout().flush().expect("could not flush stdout");
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { message }) => {
|
||||
ts_println!(
|
||||
self,
|
||||
"{}\n{message}",
|
||||
"codex".style(self.bold).style(self.magenta)
|
||||
);
|
||||
// if answer_started is false, this means we haven't received any
|
||||
// delta. Thus, we need to print the message as a new answer.
|
||||
if !self.answer_started {
|
||||
ts_println!(
|
||||
self,
|
||||
"{}\n{}",
|
||||
"codex".style(self.italic).style(self.magenta),
|
||||
message,
|
||||
);
|
||||
} else {
|
||||
println!();
|
||||
self.answer_started = false;
|
||||
}
|
||||
}
|
||||
EventMsg::ExecCommandBegin(ExecCommandBeginEvent {
|
||||
call_id,
|
||||
@@ -343,7 +385,7 @@ impl EventProcessor {
|
||||
);
|
||||
|
||||
// Pretty-print the patch summary with colored diff markers so
|
||||
// it’s easy to scan in the terminal output.
|
||||
// it's easy to scan in the terminal output.
|
||||
for (path, change) in changes.iter() {
|
||||
match change {
|
||||
FileChange::Add { content } => {
|
||||
@@ -441,12 +483,17 @@ impl EventProcessor {
|
||||
}
|
||||
EventMsg::AgentReasoning(agent_reasoning_event) => {
|
||||
if self.show_agent_reasoning {
|
||||
ts_println!(
|
||||
self,
|
||||
"{}\n{}",
|
||||
"thinking".style(self.italic).style(self.magenta),
|
||||
agent_reasoning_event.text
|
||||
);
|
||||
if !self.reasoning_started {
|
||||
ts_println!(
|
||||
self,
|
||||
"{}\n{}",
|
||||
"codex".style(self.italic).style(self.magenta),
|
||||
agent_reasoning_event.text,
|
||||
);
|
||||
} else {
|
||||
println!();
|
||||
self.reasoning_started = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
EventMsg::SessionConfigured(session_configured_event) => {
|
||||
|
||||
@@ -115,8 +115,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
};
|
||||
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides)?;
|
||||
let mut event_processor =
|
||||
EventProcessor::create_with_ansi(stdout_with_ansi, !config.hide_agent_reasoning);
|
||||
let mut event_processor = EventProcessor::create_with_ansi(stdout_with_ansi, &config);
|
||||
// Print the effective configuration and prompt so users can see what Codex
|
||||
// is using.
|
||||
event_processor.print_config_summary(&config, &prompt);
|
||||
|
||||
@@ -59,6 +59,13 @@ pub async fn login_with_chatgpt(
|
||||
/// Attempt to read the `OPENAI_API_KEY` from the `auth.json` file in the given
|
||||
/// `CODEX_HOME` directory, refreshing it, if necessary.
|
||||
pub async fn try_read_openai_api_key(codex_home: &Path) -> std::io::Result<String> {
|
||||
let auth_dot_json = try_read_auth_json(codex_home).await?;
|
||||
Ok(auth_dot_json.openai_api_key)
|
||||
}
|
||||
|
||||
/// Attempt to read and refresh the `auth.json` file in the given `CODEX_HOME` directory.
|
||||
/// Returns the full AuthDotJson structure after refreshing if necessary.
|
||||
pub async fn try_read_auth_json(codex_home: &Path) -> std::io::Result<AuthDotJson> {
|
||||
let auth_path = codex_home.join("auth.json");
|
||||
let mut file = std::fs::File::open(&auth_path)?;
|
||||
let mut contents = String::new();
|
||||
@@ -88,9 +95,9 @@ pub async fn try_read_openai_api_key(codex_home: &Path) -> std::io::Result<Strin
|
||||
file.flush()?;
|
||||
}
|
||||
|
||||
Ok(auth_dot_json.openai_api_key)
|
||||
Ok(auth_dot_json)
|
||||
} else {
|
||||
Ok(auth_dot_json.openai_api_key)
|
||||
Ok(auth_dot_json)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,23 +153,24 @@ struct RefreshResponse {
|
||||
|
||||
/// Expected structure for $CODEX_HOME/auth.json.
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct AuthDotJson {
|
||||
pub struct AuthDotJson {
|
||||
#[serde(rename = "OPENAI_API_KEY")]
|
||||
openai_api_key: String,
|
||||
pub openai_api_key: String,
|
||||
|
||||
tokens: TokenData,
|
||||
pub tokens: TokenData,
|
||||
|
||||
last_refresh: DateTime<Utc>,
|
||||
pub last_refresh: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct TokenData {
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
pub struct TokenData {
|
||||
/// This is a JWT.
|
||||
id_token: String,
|
||||
pub id_token: String,
|
||||
|
||||
/// This is a JWT.
|
||||
#[allow(dead_code)]
|
||||
access_token: String,
|
||||
pub access_token: String,
|
||||
|
||||
refresh_token: String,
|
||||
pub refresh_token: String,
|
||||
|
||||
pub account_id: String,
|
||||
}
|
||||
|
||||
@@ -51,6 +51,7 @@ class TokenData:
|
||||
id_token: str
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
account_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -240,20 +241,26 @@ class _ApiKeyHTTPHandler(http.server.BaseHTTPRequestHandler):
|
||||
)
|
||||
) as resp:
|
||||
payload = json.loads(resp.read().decode())
|
||||
|
||||
# Extract chatgpt_account_id from id_token
|
||||
id_token_parts = payload["id_token"].split(".")
|
||||
if len(id_token_parts) != 3:
|
||||
raise ValueError("Invalid ID token")
|
||||
id_token_claims = _decode_jwt_segment(id_token_parts[1])
|
||||
auth_claims = id_token_claims.get("https://api.openai.com/auth", {})
|
||||
chatgpt_account_id = auth_claims.get("chatgpt_account_id", "")
|
||||
|
||||
token_data = TokenData(
|
||||
id_token=payload["id_token"],
|
||||
access_token=payload["access_token"],
|
||||
refresh_token=payload["refresh_token"],
|
||||
account_id=chatgpt_account_id,
|
||||
)
|
||||
|
||||
id_token_parts = token_data.id_token.split(".")
|
||||
if len(id_token_parts) != 3:
|
||||
raise ValueError("Invalid ID token")
|
||||
access_token_parts = token_data.access_token.split(".")
|
||||
if len(access_token_parts) != 3:
|
||||
raise ValueError("Invalid access token")
|
||||
|
||||
id_token_claims = _decode_jwt_segment(id_token_parts[1])
|
||||
access_token_claims = _decode_jwt_segment(access_token_parts[1])
|
||||
|
||||
token_claims = id_token_claims.get("https://api.openai.com/auth", {})
|
||||
@@ -375,6 +382,7 @@ def _write_auth_file(*, auth: AuthBundle, codex_home: str) -> bool:
|
||||
"id_token": auth.token_data.id_token,
|
||||
"access_token": auth.token_data.access_token,
|
||||
"refresh_token": auth.token_data.refresh_token,
|
||||
"account_id": auth.token_data.account_id,
|
||||
},
|
||||
"last_refresh": auth.last_refresh,
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::Submission;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use mcp_types::CallToolResult;
|
||||
use mcp_types::CallToolResultContent;
|
||||
@@ -66,14 +67,24 @@ pub async fn run_codex_tool_session(
|
||||
.send(codex_event_to_notification(&first_event))
|
||||
.await;
|
||||
|
||||
if let Err(e) = codex
|
||||
.submit(Op::UserInput {
|
||||
// Use the original MCP request ID as the `sub_id` for the Codex submission so that
|
||||
// any events emitted for this tool-call can be correlated with the
|
||||
// originating `tools/call` request.
|
||||
let sub_id = match &id {
|
||||
RequestId::String(s) => s.clone(),
|
||||
RequestId::Integer(n) => n.to_string(),
|
||||
};
|
||||
|
||||
let submission = Submission {
|
||||
id: sub_id,
|
||||
op: Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: initial_prompt.clone(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
{
|
||||
},
|
||||
};
|
||||
|
||||
if let Err(e) = codex.submit_with_id(submission).await {
|
||||
tracing::error!("Failed to submit initial prompt: {e}");
|
||||
}
|
||||
|
||||
@@ -160,6 +171,12 @@ pub async fn run_codex_tool_session(
|
||||
EventMsg::SessionConfigured(_) => {
|
||||
tracing::error!("unexpected SessionConfigured event");
|
||||
}
|
||||
EventMsg::AgentMessageDelta(_) => {
|
||||
// TODO: think how we want to support this in the MCP
|
||||
}
|
||||
EventMsg::AgentReasoningDelta(_) => {
|
||||
// TODO: think how we want to support this in the MCP
|
||||
}
|
||||
EventMsg::Error(_)
|
||||
| EventMsg::TaskStarted
|
||||
| EventMsg::TokenCount(_)
|
||||
|
||||
@@ -61,4 +61,5 @@ unicode-segmentation = "1.12.0"
|
||||
uuid = "1"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "1.43.1"
|
||||
pretty_assertions = "1"
|
||||
|
||||
@@ -98,21 +98,7 @@ impl<'a> App<'a> {
|
||||
scroll_event_helper.scroll_down();
|
||||
}
|
||||
crossterm::event::Event::Paste(pasted) => {
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
for ch in pasted.chars() {
|
||||
let key_event = match ch {
|
||||
'\n' | '\r' => {
|
||||
// Represent newline as <Shift+Enter> so that the bottom
|
||||
// pane treats it as a literal newline instead of a submit
|
||||
// action (submission is only triggered on Enter *without*
|
||||
// any modifiers).
|
||||
KeyEvent::new(KeyCode::Enter, KeyModifiers::SHIFT)
|
||||
}
|
||||
_ => KeyEvent::new(KeyCode::Char(ch), KeyModifiers::empty()),
|
||||
};
|
||||
app_event_tx.send(AppEvent::KeyEvent(key_event));
|
||||
}
|
||||
app_event_tx.send(AppEvent::Paste(pasted));
|
||||
}
|
||||
_ => {
|
||||
// Ignore any other events.
|
||||
@@ -213,7 +199,21 @@ impl<'a> App<'a> {
|
||||
modifiers: crossterm::event::KeyModifiers::CONTROL,
|
||||
..
|
||||
} => {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
match &mut self.app_state {
|
||||
AppState::Chat { widget } => {
|
||||
if widget.composer_is_empty() {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
} else {
|
||||
// Treat Ctrl+D as a normal key event when the composer
|
||||
// is not empty so that it doesn't quit the application
|
||||
// prematurely.
|
||||
self.dispatch_key_event(key_event);
|
||||
}
|
||||
}
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {
|
||||
self.app_event_tx.send(AppEvent::ExitRequest);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.dispatch_key_event(key_event);
|
||||
@@ -223,6 +223,9 @@ impl<'a> App<'a> {
|
||||
AppEvent::Scroll(scroll_delta) => {
|
||||
self.dispatch_scroll_event(scroll_delta);
|
||||
}
|
||||
AppEvent::Paste(text) => {
|
||||
self.dispatch_paste_event(text);
|
||||
}
|
||||
AppEvent::CodexEvent(event) => {
|
||||
self.dispatch_codex_event(event);
|
||||
}
|
||||
@@ -294,6 +297,8 @@ impl<'a> App<'a> {
|
||||
}
|
||||
|
||||
fn draw_next_frame(&mut self, terminal: &mut tui::Tui) -> Result<()> {
|
||||
// TODO: add a throttle to avoid redrawing too often
|
||||
|
||||
match &mut self.app_state {
|
||||
AppState::Chat { widget } => {
|
||||
terminal.draw(|frame| frame.render_widget_ref(&**widget, frame.area()))?;
|
||||
@@ -343,6 +348,13 @@ impl<'a> App<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn dispatch_paste_event(&mut self, pasted: String) {
|
||||
match &mut self.app_state {
|
||||
AppState::Chat { widget } => widget.handle_paste(pasted),
|
||||
AppState::Login { .. } | AppState::GitWarning { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn dispatch_scroll_event(&mut self, scroll_delta: i32) {
|
||||
match &mut self.app_state {
|
||||
AppState::Chat { widget } => widget.handle_scroll_delta(scroll_delta),
|
||||
|
||||
@@ -12,6 +12,9 @@ pub(crate) enum AppEvent {
|
||||
|
||||
KeyEvent(KeyEvent),
|
||||
|
||||
/// Text pasted from the terminal clipboard.
|
||||
Paste(String),
|
||||
|
||||
/// Scroll event with a value representing the "scroll delta" as the net
|
||||
/// scroll up/down events within a short time window.
|
||||
Scroll(i32),
|
||||
|
||||
@@ -28,6 +28,9 @@ const MIN_TEXTAREA_ROWS: usize = 1;
|
||||
const BORDER_LINES: u16 = 2;
|
||||
|
||||
const BASE_PLACEHOLDER_TEXT: &str = "send a message";
|
||||
/// If the pasted content exceeds this number of characters, replace it with a
|
||||
/// placeholder in the UI.
|
||||
const LARGE_PASTE_CHAR_THRESHOLD: usize = 1000;
|
||||
|
||||
/// Result returned when the user interacts with the text area.
|
||||
pub enum InputResult {
|
||||
@@ -43,6 +46,7 @@ pub(crate) struct ChatComposer<'a> {
|
||||
ctrl_c_quit_hint: bool,
|
||||
dismissed_file_popup_token: Option<String>,
|
||||
current_file_query: Option<String>,
|
||||
pending_pastes: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
/// Popup state – at most one can be visible at any time.
|
||||
@@ -66,11 +70,17 @@ impl ChatComposer<'_> {
|
||||
ctrl_c_quit_hint: false,
|
||||
dismissed_file_popup_token: None,
|
||||
current_file_query: None,
|
||||
pending_pastes: Vec::new(),
|
||||
};
|
||||
this.update_border(has_input_focus);
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns true if the composer currently contains no user input.
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.textarea.is_empty()
|
||||
}
|
||||
|
||||
/// Update the cached *context-left* percentage and refresh the placeholder
|
||||
/// text. The UI relies on the placeholder to convey the remaining
|
||||
/// context when the composer is empty.
|
||||
@@ -126,6 +136,20 @@ impl ChatComposer<'_> {
|
||||
self.update_border(has_focus);
|
||||
}
|
||||
|
||||
pub fn handle_paste(&mut self, pasted: String) -> bool {
|
||||
let char_count = pasted.chars().count();
|
||||
if char_count > LARGE_PASTE_CHAR_THRESHOLD {
|
||||
let placeholder = format!("[Pasted Content {char_count} chars]");
|
||||
self.textarea.insert_str(&placeholder);
|
||||
self.pending_pastes.push((placeholder, pasted));
|
||||
} else {
|
||||
self.textarea.insert_str(&pasted);
|
||||
}
|
||||
self.sync_command_popup();
|
||||
self.sync_file_search_popup();
|
||||
true
|
||||
}
|
||||
|
||||
/// Integrate results from an asynchronous file search.
|
||||
pub(crate) fn on_file_search_result(&mut self, query: String, matches: Vec<FileMatch>) {
|
||||
// Only apply if user is still editing a token starting with `query`.
|
||||
@@ -414,10 +438,18 @@ impl ChatComposer<'_> {
|
||||
alt: false,
|
||||
ctrl: false,
|
||||
} => {
|
||||
let text = self.textarea.lines().join("\n");
|
||||
let mut text = self.textarea.lines().join("\n");
|
||||
self.textarea.select_all();
|
||||
self.textarea.cut();
|
||||
|
||||
// Replace all pending pastes in the text
|
||||
for (placeholder, actual) in &self.pending_pastes {
|
||||
if text.contains(placeholder) {
|
||||
text = text.replace(placeholder, actual);
|
||||
}
|
||||
}
|
||||
self.pending_pastes.clear();
|
||||
|
||||
if text.is_empty() {
|
||||
(InputResult::None, true)
|
||||
} else {
|
||||
@@ -443,10 +475,71 @@ impl ChatComposer<'_> {
|
||||
|
||||
/// Handle generic Input events that modify the textarea content.
|
||||
fn handle_input_basic(&mut self, input: Input) -> (InputResult, bool) {
|
||||
// Special handling for backspace on placeholders
|
||||
if let Input {
|
||||
key: Key::Backspace,
|
||||
..
|
||||
} = input
|
||||
{
|
||||
if self.try_remove_placeholder_at_cursor() {
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Normal input handling
|
||||
self.textarea.input(input);
|
||||
let text_after = self.textarea.lines().join("\n");
|
||||
|
||||
// Check if any placeholders were removed and remove their corresponding pending pastes
|
||||
self.pending_pastes
|
||||
.retain(|(placeholder, _)| text_after.contains(placeholder));
|
||||
|
||||
(InputResult::None, true)
|
||||
}
|
||||
|
||||
/// Attempts to remove a placeholder if the cursor is at the end of one.
|
||||
/// Returns true if a placeholder was removed.
|
||||
fn try_remove_placeholder_at_cursor(&mut self) -> bool {
|
||||
let (row, col) = self.textarea.cursor();
|
||||
let line = self
|
||||
.textarea
|
||||
.lines()
|
||||
.get(row)
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("");
|
||||
|
||||
// Find any placeholder that ends at the cursor position
|
||||
let placeholder_to_remove = self.pending_pastes.iter().find_map(|(ph, _)| {
|
||||
if col < ph.len() {
|
||||
return None;
|
||||
}
|
||||
let potential_ph_start = col - ph.len();
|
||||
if line[potential_ph_start..col] == *ph {
|
||||
Some(ph.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(placeholder) = placeholder_to_remove {
|
||||
// Remove the entire placeholder from the text
|
||||
let placeholder_len = placeholder.len();
|
||||
for _ in 0..placeholder_len {
|
||||
self.textarea.input(Input {
|
||||
key: Key::Backspace,
|
||||
ctrl: false,
|
||||
alt: false,
|
||||
shift: false,
|
||||
});
|
||||
}
|
||||
// Remove from pending pastes
|
||||
self.pending_pastes.retain(|(ph, _)| ph != &placeholder);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Synchronize `self.command_popup` with the current text in the
|
||||
/// textarea. This must be called after every modification that can change
|
||||
/// the text so the popup is shown/updated/hidden as appropriate.
|
||||
@@ -624,7 +717,10 @@ impl WidgetRef for &ChatComposer<'_> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::bottom_pane::AppEventSender;
|
||||
use crate::bottom_pane::ChatComposer;
|
||||
use crate::bottom_pane::InputResult;
|
||||
use crate::bottom_pane::chat_composer::LARGE_PASTE_CHAR_THRESHOLD;
|
||||
use tui_textarea::TextArea;
|
||||
|
||||
#[test]
|
||||
@@ -770,4 +866,324 @@ mod tests {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handle_paste_small_inserts_text() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, _rx) = std::sync::mpsc::channel();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(true, sender);
|
||||
|
||||
let needs_redraw = composer.handle_paste("hello".to_string());
|
||||
assert!(needs_redraw);
|
||||
assert_eq!(composer.textarea.lines(), ["hello"]);
|
||||
assert!(composer.pending_pastes.is_empty());
|
||||
|
||||
let (result, _) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
match result {
|
||||
InputResult::Submitted(text) => assert_eq!(text, "hello"),
|
||||
_ => panic!("expected Submitted"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handle_paste_large_uses_placeholder_and_replaces_on_submit() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, _rx) = std::sync::mpsc::channel();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(true, sender);
|
||||
|
||||
let large = "x".repeat(LARGE_PASTE_CHAR_THRESHOLD + 10);
|
||||
let needs_redraw = composer.handle_paste(large.clone());
|
||||
assert!(needs_redraw);
|
||||
let placeholder = format!("[Pasted Content {} chars]", large.chars().count());
|
||||
assert_eq!(composer.textarea.lines(), [placeholder.as_str()]);
|
||||
assert_eq!(composer.pending_pastes.len(), 1);
|
||||
assert_eq!(composer.pending_pastes[0].0, placeholder);
|
||||
assert_eq!(composer.pending_pastes[0].1, large);
|
||||
|
||||
let (result, _) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
match result {
|
||||
InputResult::Submitted(text) => assert_eq!(text, large),
|
||||
_ => panic!("expected Submitted"),
|
||||
}
|
||||
assert!(composer.pending_pastes.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn edit_clears_pending_paste() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let large = "y".repeat(LARGE_PASTE_CHAR_THRESHOLD + 1);
|
||||
let (tx, _rx) = std::sync::mpsc::channel();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(true, sender);
|
||||
|
||||
composer.handle_paste(large);
|
||||
assert_eq!(composer.pending_pastes.len(), 1);
|
||||
|
||||
// Any edit that removes the placeholder should clear pending_paste
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
assert!(composer.pending_pastes.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ui_snapshots() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
use insta::assert_snapshot;
|
||||
use ratatui::Terminal;
|
||||
use ratatui::backend::TestBackend;
|
||||
|
||||
let (tx, _rx) = std::sync::mpsc::channel();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut terminal = match Terminal::new(TestBackend::new(100, 10)) {
|
||||
Ok(t) => t,
|
||||
Err(e) => panic!("Failed to create terminal: {e}"),
|
||||
};
|
||||
|
||||
let test_cases = vec![
|
||||
("empty", None),
|
||||
("small", Some("short".to_string())),
|
||||
("large", Some("z".repeat(LARGE_PASTE_CHAR_THRESHOLD + 5))),
|
||||
("multiple_pastes", None),
|
||||
("backspace_after_pastes", None),
|
||||
];
|
||||
|
||||
for (name, input) in test_cases {
|
||||
// Create a fresh composer for each test case
|
||||
let mut composer = ChatComposer::new(true, sender.clone());
|
||||
|
||||
if let Some(text) = input {
|
||||
composer.handle_paste(text);
|
||||
} else if name == "multiple_pastes" {
|
||||
// First large paste
|
||||
composer.handle_paste("x".repeat(LARGE_PASTE_CHAR_THRESHOLD + 3));
|
||||
// Second large paste
|
||||
composer.handle_paste("y".repeat(LARGE_PASTE_CHAR_THRESHOLD + 7));
|
||||
// Small paste
|
||||
composer.handle_paste(" another short paste".to_string());
|
||||
} else if name == "backspace_after_pastes" {
|
||||
// Three large pastes
|
||||
composer.handle_paste("a".repeat(LARGE_PASTE_CHAR_THRESHOLD + 2));
|
||||
composer.handle_paste("b".repeat(LARGE_PASTE_CHAR_THRESHOLD + 4));
|
||||
composer.handle_paste("c".repeat(LARGE_PASTE_CHAR_THRESHOLD + 6));
|
||||
// Move cursor to end and press backspace
|
||||
composer.textarea.move_cursor(tui_textarea::CursorMove::End);
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
}
|
||||
|
||||
terminal
|
||||
.draw(|f| f.render_widget_ref(&composer, f.area()))
|
||||
.unwrap_or_else(|e| panic!("Failed to draw {name} composer: {e}"));
|
||||
|
||||
assert_snapshot!(name, terminal.backend());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_pastes_submission() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, _rx) = std::sync::mpsc::channel();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(true, sender);
|
||||
|
||||
// Define test cases: (paste content, is_large)
|
||||
let test_cases = [
|
||||
("x".repeat(LARGE_PASTE_CHAR_THRESHOLD + 3), true),
|
||||
(" and ".to_string(), false),
|
||||
("y".repeat(LARGE_PASTE_CHAR_THRESHOLD + 7), true),
|
||||
];
|
||||
|
||||
// Expected states after each paste
|
||||
let mut expected_text = String::new();
|
||||
let mut expected_pending_count = 0;
|
||||
|
||||
// Apply all pastes and build expected state
|
||||
let states: Vec<_> = test_cases
|
||||
.iter()
|
||||
.map(|(content, is_large)| {
|
||||
composer.handle_paste(content.clone());
|
||||
if *is_large {
|
||||
let placeholder = format!("[Pasted Content {} chars]", content.chars().count());
|
||||
expected_text.push_str(&placeholder);
|
||||
expected_pending_count += 1;
|
||||
} else {
|
||||
expected_text.push_str(content);
|
||||
}
|
||||
(expected_text.clone(), expected_pending_count)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Verify all intermediate states were correct
|
||||
assert_eq!(
|
||||
states,
|
||||
vec![
|
||||
(
|
||||
format!("[Pasted Content {} chars]", test_cases[0].0.chars().count()),
|
||||
1
|
||||
),
|
||||
(
|
||||
format!(
|
||||
"[Pasted Content {} chars] and ",
|
||||
test_cases[0].0.chars().count()
|
||||
),
|
||||
1
|
||||
),
|
||||
(
|
||||
format!(
|
||||
"[Pasted Content {} chars] and [Pasted Content {} chars]",
|
||||
test_cases[0].0.chars().count(),
|
||||
test_cases[2].0.chars().count()
|
||||
),
|
||||
2
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
// Submit and verify final expansion
|
||||
let (result, _) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
if let InputResult::Submitted(text) = result {
|
||||
assert_eq!(text, format!("{} and {}", test_cases[0].0, test_cases[2].0));
|
||||
} else {
|
||||
panic!("expected Submitted");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_placeholder_deletion() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, _rx) = std::sync::mpsc::channel();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(true, sender);
|
||||
|
||||
// Define test cases: (content, is_large)
|
||||
let test_cases = [
|
||||
("a".repeat(LARGE_PASTE_CHAR_THRESHOLD + 5), true),
|
||||
(" and ".to_string(), false),
|
||||
("b".repeat(LARGE_PASTE_CHAR_THRESHOLD + 6), true),
|
||||
];
|
||||
|
||||
// Apply all pastes
|
||||
let mut current_pos = 0;
|
||||
let states: Vec<_> = test_cases
|
||||
.iter()
|
||||
.map(|(content, is_large)| {
|
||||
composer.handle_paste(content.clone());
|
||||
if *is_large {
|
||||
let placeholder = format!("[Pasted Content {} chars]", content.chars().count());
|
||||
current_pos += placeholder.len();
|
||||
} else {
|
||||
current_pos += content.len();
|
||||
}
|
||||
(
|
||||
composer.textarea.lines().join("\n"),
|
||||
composer.pending_pastes.len(),
|
||||
current_pos,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Delete placeholders one by one and collect states
|
||||
let mut deletion_states = vec![];
|
||||
|
||||
// First deletion
|
||||
composer
|
||||
.textarea
|
||||
.move_cursor(tui_textarea::CursorMove::Jump(0, states[0].2 as u16));
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
deletion_states.push((
|
||||
composer.textarea.lines().join("\n"),
|
||||
composer.pending_pastes.len(),
|
||||
));
|
||||
|
||||
// Second deletion
|
||||
composer
|
||||
.textarea
|
||||
.move_cursor(tui_textarea::CursorMove::Jump(
|
||||
0,
|
||||
composer.textarea.lines().join("\n").len() as u16,
|
||||
));
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
deletion_states.push((
|
||||
composer.textarea.lines().join("\n"),
|
||||
composer.pending_pastes.len(),
|
||||
));
|
||||
|
||||
// Verify all states
|
||||
assert_eq!(
|
||||
deletion_states,
|
||||
vec![
|
||||
(" and [Pasted Content 1006 chars]".to_string(), 1),
|
||||
(" and ".to_string(), 0),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_partial_placeholder_deletion() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, _rx) = std::sync::mpsc::channel();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(true, sender);
|
||||
|
||||
// Define test cases: (cursor_position_from_end, expected_pending_count)
|
||||
let test_cases = [
|
||||
5, // Delete from middle - should clear tracking
|
||||
0, // Delete from end - should clear tracking
|
||||
];
|
||||
|
||||
let paste = "x".repeat(LARGE_PASTE_CHAR_THRESHOLD + 4);
|
||||
let placeholder = format!("[Pasted Content {} chars]", paste.chars().count());
|
||||
|
||||
let states: Vec<_> = test_cases
|
||||
.into_iter()
|
||||
.map(|pos_from_end| {
|
||||
composer.handle_paste(paste.clone());
|
||||
composer
|
||||
.textarea
|
||||
.move_cursor(tui_textarea::CursorMove::Jump(
|
||||
0,
|
||||
(placeholder.len() - pos_from_end) as u16,
|
||||
));
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
let result = (
|
||||
composer.textarea.lines().join("\n").contains(&placeholder),
|
||||
composer.pending_pastes.len(),
|
||||
);
|
||||
composer.textarea.select_all();
|
||||
composer.textarea.cut();
|
||||
result
|
||||
})
|
||||
.collect();
|
||||
|
||||
assert_eq!(
|
||||
states,
|
||||
vec![
|
||||
(false, 0), // After deleting from middle
|
||||
(false, 0), // After deleting from end
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,8 +72,7 @@ impl ChatComposerHistory {
|
||||
return false;
|
||||
}
|
||||
|
||||
let lines = textarea.lines();
|
||||
if lines.len() == 1 && lines[0].is_empty() {
|
||||
if textarea.is_empty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -85,6 +84,7 @@ impl ChatComposerHistory {
|
||||
return false;
|
||||
}
|
||||
|
||||
let lines = textarea.lines();
|
||||
matches!(&self.last_history_text, Some(prev) if prev == &lines.join("\n"))
|
||||
}
|
||||
|
||||
|
||||
@@ -82,6 +82,15 @@ impl BottomPane<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn handle_paste(&mut self, pasted: String) {
|
||||
if self.active_view.is_none() {
|
||||
let needs_redraw = self.composer.handle_paste(pasted);
|
||||
if needs_redraw {
|
||||
self.request_redraw();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the status indicator text (only when the `StatusIndicatorView` is
|
||||
/// active).
|
||||
pub(crate) fn update_status_text(&mut self, text: String) {
|
||||
@@ -153,6 +162,10 @@ impl BottomPane<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn composer_is_empty(&self) -> bool {
|
||||
self.composer.is_empty()
|
||||
}
|
||||
|
||||
pub(crate) fn is_task_running(&self) -> bool {
|
||||
self.is_task_running
|
||||
}
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tui/src/bottom_pane/chat_composer.rs
|
||||
expression: terminal.backend()
|
||||
---
|
||||
"╭──────────────────────────────────────────────────────────────────────────────────────────────────╮"
|
||||
"│[Pasted Content 1002 chars][Pasted Content 1004 chars] │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"╰───────────────────────────────────────────────Enter to send | Ctrl+D to quit | Ctrl+J for newline╯"
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tui/src/bottom_pane/chat_composer.rs
|
||||
expression: terminal.backend()
|
||||
---
|
||||
"╭──────────────────────────────────────────────────────────────────────────────────────────────────╮"
|
||||
"│ send a message │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"╰───────────────────────────────────────────────Enter to send | Ctrl+D to quit | Ctrl+J for newline╯"
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tui/src/bottom_pane/chat_composer.rs
|
||||
expression: terminal.backend()
|
||||
---
|
||||
"╭──────────────────────────────────────────────────────────────────────────────────────────────────╮"
|
||||
"│[Pasted Content 1005 chars] │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"╰───────────────────────────────────────────────Enter to send | Ctrl+D to quit | Ctrl+J for newline╯"
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tui/src/bottom_pane/chat_composer.rs
|
||||
expression: terminal.backend()
|
||||
---
|
||||
"╭──────────────────────────────────────────────────────────────────────────────────────────────────╮"
|
||||
"│[Pasted Content 1003 chars][Pasted Content 1007 chars] another short paste │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"╰───────────────────────────────────────────────Enter to send | Ctrl+D to quit | Ctrl+J for newline╯"
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tui/src/bottom_pane/chat_composer.rs
|
||||
expression: terminal.backend()
|
||||
---
|
||||
"╭──────────────────────────────────────────────────────────────────────────────────────────────────╮"
|
||||
"│short │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"│ │"
|
||||
"╰───────────────────────────────────────────────Enter to send | Ctrl+D to quit | Ctrl+J for newline╯"
|
||||
@@ -3,7 +3,9 @@ use std::sync::Arc;
|
||||
|
||||
use codex_core::codex_wrapper::init_codex;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::AgentMessageDeltaEvent;
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::AgentReasoningDeltaEvent;
|
||||
use codex_core::protocol::AgentReasoningEvent;
|
||||
use codex_core::protocol::ApplyPatchApprovalRequestEvent;
|
||||
use codex_core::protocol::ErrorEvent;
|
||||
@@ -49,6 +51,8 @@ pub(crate) struct ChatWidget<'a> {
|
||||
config: Config,
|
||||
initial_user_message: Option<UserMessage>,
|
||||
token_usage: TokenUsage,
|
||||
reasoning_buffer: String,
|
||||
answer_buffer: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
@@ -135,6 +139,8 @@ impl ChatWidget<'_> {
|
||||
initial_images,
|
||||
),
|
||||
token_usage: TokenUsage::default(),
|
||||
reasoning_buffer: String::new(),
|
||||
answer_buffer: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,6 +180,12 @@ impl ChatWidget<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn handle_paste(&mut self, text: String) {
|
||||
if matches!(self.input_focus, InputFocus::BottomPane) {
|
||||
self.bottom_pane.handle_paste(text);
|
||||
}
|
||||
}
|
||||
|
||||
fn submit_user_message(&mut self, user_message: UserMessage) {
|
||||
let UserMessage { text, image_paths } = user_message;
|
||||
let mut items: Vec<InputItem> = Vec::new();
|
||||
@@ -234,16 +246,51 @@ impl ChatWidget<'_> {
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentMessage(AgentMessageEvent { message }) => {
|
||||
// if the answer buffer is empty, this means we haven't received any
|
||||
// delta. Thus, we need to print the message as a new answer.
|
||||
if self.answer_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, message);
|
||||
} else {
|
||||
self.conversation_history
|
||||
.replace_prev_agent_message(&self.config, message);
|
||||
}
|
||||
self.answer_buffer.clear();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }) => {
|
||||
if self.answer_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, "".to_string());
|
||||
}
|
||||
self.answer_buffer.push_str(&delta.clone());
|
||||
self.conversation_history
|
||||
.add_agent_message(&self.config, message);
|
||||
.replace_prev_agent_message(&self.config, self.answer_buffer.clone());
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentReasoningDelta(AgentReasoningDeltaEvent { delta }) => {
|
||||
if self.reasoning_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_reasoning(&self.config, "".to_string());
|
||||
}
|
||||
self.reasoning_buffer.push_str(&delta.clone());
|
||||
self.conversation_history
|
||||
.replace_prev_agent_reasoning(&self.config, self.reasoning_buffer.clone());
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent { text }) => {
|
||||
if !self.config.hide_agent_reasoning {
|
||||
// if the reasoning buffer is empty, this means we haven't received any
|
||||
// delta. Thus, we need to print the message as a new reasoning.
|
||||
if self.reasoning_buffer.is_empty() {
|
||||
self.conversation_history
|
||||
.add_agent_reasoning(&self.config, text);
|
||||
self.request_redraw();
|
||||
.add_agent_reasoning(&self.config, "".to_string());
|
||||
} else {
|
||||
// else, we rerender one last time.
|
||||
self.conversation_history
|
||||
.replace_prev_agent_reasoning(&self.config, text);
|
||||
}
|
||||
self.reasoning_buffer.clear();
|
||||
self.request_redraw();
|
||||
}
|
||||
EventMsg::TaskStarted => {
|
||||
self.bottom_pane.clear_ctrl_c_quit_hint();
|
||||
@@ -426,6 +473,10 @@ impl ChatWidget<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn composer_is_empty(&self) -> bool {
|
||||
self.bottom_pane.composer_is_empty()
|
||||
}
|
||||
|
||||
/// Forward an `Op` directly to codex.
|
||||
pub(crate) fn submit_op(&self, op: Op) {
|
||||
if let Err(e) = self.codex_op_tx.send(op) {
|
||||
|
||||
@@ -202,6 +202,14 @@ impl ConversationHistoryWidget {
|
||||
self.add_to_history(HistoryCell::new_agent_reasoning(config, text));
|
||||
}
|
||||
|
||||
pub fn replace_prev_agent_reasoning(&mut self, config: &Config, text: String) {
|
||||
self.replace_last_agent_reasoning(config, text);
|
||||
}
|
||||
|
||||
pub fn replace_prev_agent_message(&mut self, config: &Config, text: String) {
|
||||
self.replace_last_agent_message(config, text);
|
||||
}
|
||||
|
||||
pub fn add_background_event(&mut self, message: String) {
|
||||
self.add_to_history(HistoryCell::new_background_event(message));
|
||||
}
|
||||
@@ -249,6 +257,42 @@ impl ConversationHistoryWidget {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn replace_last_agent_reasoning(&mut self, config: &Config, text: String) {
|
||||
if let Some(idx) = self
|
||||
.entries
|
||||
.iter()
|
||||
.rposition(|entry| matches!(entry.cell, HistoryCell::AgentReasoning { .. }))
|
||||
{
|
||||
let width = self.cached_width.get();
|
||||
let entry = &mut self.entries[idx];
|
||||
entry.cell = HistoryCell::new_agent_reasoning(config, text);
|
||||
let height = if width > 0 {
|
||||
entry.cell.height(width)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
entry.line_count.set(height);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn replace_last_agent_message(&mut self, config: &Config, text: String) {
|
||||
if let Some(idx) = self
|
||||
.entries
|
||||
.iter()
|
||||
.rposition(|entry| matches!(entry.cell, HistoryCell::AgentMessage { .. }))
|
||||
{
|
||||
let width = self.cached_width.get();
|
||||
let entry = &mut self.entries[idx];
|
||||
entry.cell = HistoryCell::new_agent_message(config, text);
|
||||
let height = if width > 0 {
|
||||
entry.cell.height(width)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
entry.line_count.set(height);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn record_completed_exec_command(
|
||||
&mut self,
|
||||
call_id: String,
|
||||
@@ -454,7 +498,7 @@ impl WidgetRef for ConversationHistoryWidget {
|
||||
|
||||
{
|
||||
// Choose a thumb color that stands out only when this pane has focus so that the
|
||||
// user’s attention is naturally drawn to the active viewport. When unfocused we show
|
||||
// user's attention is naturally drawn to the active viewport. When unfocused we show
|
||||
// a low-contrast thumb so the scrollbar fades into the background without becoming
|
||||
// invisible.
|
||||
let thumb_style = if self.has_input_focus {
|
||||
|
||||
Reference in New Issue
Block a user