mirror of
https://github.com/openai/codex.git
synced 2026-02-01 22:47:52 +00:00
Compare commits
65 Commits
rust-v0.45
...
patch-guar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27018edc50 | ||
|
|
8a281cd1f4 | ||
|
|
e8863b233b | ||
|
|
8fed0b53c4 | ||
|
|
00debb6399 | ||
|
|
0144fb4fab | ||
|
|
0a0a10d8b3 | ||
|
|
13035561cd | ||
|
|
9be704a934 | ||
|
|
f7b4e29609 | ||
|
|
d6c5df9a0a | ||
|
|
8662162f45 | ||
|
|
57584d6f34 | ||
|
|
268a10f917 | ||
|
|
5346cc422d | ||
|
|
26f7c46856 | ||
|
|
90af046c5c | ||
|
|
961ed31901 | ||
|
|
85e7357973 | ||
|
|
f98fa85b44 | ||
|
|
ddcaf3dccd | ||
|
|
56296cad82 | ||
|
|
95b41dd7f1 | ||
|
|
bf82353f45 | ||
|
|
0308febc23 | ||
|
|
7b4a4c2219 | ||
|
|
3ddd4d47d0 | ||
|
|
ca6a0358de | ||
|
|
0026b12615 | ||
|
|
4300236681 | ||
|
|
ec238a2c39 | ||
|
|
b6165aee0c | ||
|
|
f4bc03d7c0 | ||
|
|
3c5e12e2a4 | ||
|
|
c89229db97 | ||
|
|
d3820f4782 | ||
|
|
e896db1180 | ||
|
|
96acb8a74e | ||
|
|
687a13bbe5 | ||
|
|
fe8122e514 | ||
|
|
876d4f450a | ||
|
|
f52320be86 | ||
|
|
a43ae86b6c | ||
|
|
496cb801e1 | ||
|
|
abd517091f | ||
|
|
b8b04514bc | ||
|
|
0e5d72cc57 | ||
|
|
60f9e85c16 | ||
|
|
b016a3e7d8 | ||
|
|
a0d56541cf | ||
|
|
226215f36d | ||
|
|
338c2c873c | ||
|
|
4b0f5eb6a8 | ||
|
|
75176dae70 | ||
|
|
12fd2b4160 | ||
|
|
f2555422b9 | ||
|
|
27f169bb91 | ||
|
|
b16c985ed2 | ||
|
|
35a770e871 | ||
|
|
b09f62a1c3 | ||
|
|
5833508a17 | ||
|
|
d73055c5b1 | ||
|
|
7e3a272b29 | ||
|
|
661663c98a | ||
|
|
721003c552 |
22
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
22
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -20,6 +20,14 @@ body:
|
||||
attributes:
|
||||
label: What version of Codex is running?
|
||||
description: Copy the output of `codex --version`
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: model
|
||||
attributes:
|
||||
@@ -32,11 +40,18 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -44,11 +59,6 @@ body:
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
24
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
24
.github/ISSUE_TEMPLATE/5-vs-code-extension.yml
vendored
@@ -14,11 +14,21 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: What version of the VS Code extension are you using?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: plan
|
||||
attributes:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: ide
|
||||
attributes:
|
||||
label: Which IDE are you using?
|
||||
description: Like `VS Code`, `Cursor`, `Windsurf`, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: platform
|
||||
attributes:
|
||||
@@ -26,11 +36,18 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
description: Explain the bug and provide a code snippet that can reproduce it. Please include session id, token limit usage, context window usage if applicable.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -38,11 +55,6 @@ body:
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
id: notes
|
||||
attributes:
|
||||
|
||||
7
.github/workflows/issue-deduplicator.yml
vendored
7
.github/workflows/issue-deduplicator.yml
vendored
@@ -105,6 +105,10 @@ jobs:
|
||||
|
||||
const issues = Array.isArray(parsed?.issues) ? parsed.issues : [];
|
||||
const currentIssueNumber = String(context.payload.issue.number);
|
||||
|
||||
console.log(`Current issue number: ${currentIssueNumber}`);
|
||||
console.log(issues);
|
||||
|
||||
const filteredIssues = issues.filter((value) => String(value) !== currentIssueNumber);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
@@ -113,7 +117,8 @@ jobs:
|
||||
}
|
||||
|
||||
const lines = [
|
||||
'Potential duplicates detected:',
|
||||
'Potential duplicates detected. Please review them and close your issue if it is a duplicate.',
|
||||
'',
|
||||
...filteredIssues.map((value) => `- #${String(value)}`),
|
||||
'',
|
||||
'*Powered by [Codex Action](https://github.com/openai/codex-action)*'];
|
||||
|
||||
42
.github/workflows/rust-ci.yml
vendored
42
.github/workflows/rust-ci.yml
vendored
@@ -148,15 +148,26 @@ jobs:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- uses: actions/cache@v4
|
||||
# Explicit cache restore: split cargo home vs target, so we can
|
||||
# avoid caching the large target dir on the gnu-dev job.
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Restore target cache (except gnu-dev)
|
||||
id: cache_target_restore
|
||||
if: ${{ !(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release') }}
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
@@ -194,6 +205,31 @@ jobs:
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
# Save caches explicitly; make non-fatal so cache packaging
|
||||
# never fails the overall job. Only save when key wasn't hit.
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Save target cache (except gnu-dev)
|
||||
if: >-
|
||||
always() && !cancelled() &&
|
||||
(steps.cache_target_restore.outputs.cache-hit != 'true') &&
|
||||
!(matrix.target == 'x86_64-unknown-linux-gnu' && matrix.profile != 'release')
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-target-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
# Fail the job if any of the previous steps failed.
|
||||
- name: verify all steps passed
|
||||
if: |
|
||||
|
||||
134
.github/workflows/rust-release.yml
vendored
134
.github/workflows/rust-release.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
|
||||
build:
|
||||
needs: tag-check
|
||||
name: ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
@@ -94,11 +94,118 @@ jobs:
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
run: |
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: actions
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
|
||||
echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
|
||||
|
||||
keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
security set-keychain-settings -lut 21600 "$keychain_path"
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
|
||||
keychain_args=()
|
||||
cleanup_keychain() {
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}" || true
|
||||
security default-keychain -s "${keychain_args[0]}" || true
|
||||
else
|
||||
security list-keychains -s || true
|
||||
fi
|
||||
if [[ -f "$keychain_path" ]]; then
|
||||
security delete-keychain "$keychain_path" || true
|
||||
fi
|
||||
}
|
||||
|
||||
while IFS= read -r keychain; do
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "$keychain_path" "${keychain_args[@]}"
|
||||
else
|
||||
security list-keychains -s "$keychain_path"
|
||||
fi
|
||||
|
||||
security default-keychain -s "$keychain_path"
|
||||
security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
|
||||
|
||||
codesign_hashes=()
|
||||
while IFS= read -r hash; do
|
||||
[[ -n "$hash" ]] && codesign_hashes+=("$hash")
|
||||
done < <(security find-identity -v -p codesigning "$keychain_path" \
|
||||
| sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
|
||||
| sort -u)
|
||||
|
||||
if ((${#codesign_hashes[@]} == 0)); then
|
||||
echo "No signing identities found in $keychain_path"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ((${#codesign_hashes[@]} > 1)); then
|
||||
echo "Multiple signing identities found in $keychain_path:"
|
||||
printf ' %s\n' "${codesign_hashes[@]}"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
|
||||
|
||||
rm -f "$cert_path"
|
||||
|
||||
echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-14' }}
|
||||
name: Sign macOS binaries
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
|
||||
echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
path="target/${{ matrix.target }}/release/${binary}"
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -157,6 +264,29 @@ jobs:
|
||||
zstd -T0 -19 --rm "$dest/$base"
|
||||
done
|
||||
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() && matrix.runner == 'macos-14' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
|
||||
keychain_args=()
|
||||
while IFS= read -r keychain; do
|
||||
[[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}"
|
||||
security default-keychain -s "${keychain_args[0]}"
|
||||
fi
|
||||
|
||||
if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
|
||||
security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
|
||||
fi
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
|
||||
25
AGENTS.md
25
AGENTS.md
@@ -73,3 +73,28 @@ If you don’t have the tool:
|
||||
### Test assertions
|
||||
|
||||
- Tests should use pretty_assertions::assert_eq for clearer diffs. Import this at the top of the test module if it isn't already.
|
||||
|
||||
### Integration tests (core)
|
||||
|
||||
- Prefer the utilities in `core_test_support::responses` when writing end-to-end Codex tests.
|
||||
|
||||
- All `mount_sse*` helpers return a `ResponseMock`; hold onto it so you can assert against outbound `/responses` POST bodies.
|
||||
- Use `ResponseMock::single_request()` when a test should only issue one POST, or `ResponseMock::requests()` to inspect every captured `ResponsesRequest`.
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
```rust
|
||||
let mock = responses::mount_sse_once(&server, responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
])).await;
|
||||
|
||||
codex.submit(Op::UserTurn { ... }).await?;
|
||||
|
||||
// Assert request body if needed.
|
||||
let request = mock.single_request();
|
||||
// assert using request.function_call_output(call_id) or request.json_body() or other helpers.
|
||||
```
|
||||
|
||||
@@ -61,7 +61,7 @@ You can also use Codex with an API key, but this requires [additional setup](./d
|
||||
|
||||
### Model Context Protocol (MCP)
|
||||
|
||||
Codex CLI supports [MCP servers](./docs/advanced.md#model-context-protocol-mcp). Enable by adding an `mcp_servers` section to your `~/.codex/config.toml`.
|
||||
Codex can access MCP servers. To configure them, refer to the [config docs](./docs/config.md#mcp_servers).
|
||||
|
||||
### Configuration
|
||||
|
||||
|
||||
35
codex-cli/bin/codex.js
Executable file → Normal file
35
codex-cli/bin/codex.js
Executable file → Normal file
@@ -80,6 +80,32 @@ function getUpdatedPath(newDirs) {
|
||||
return updatedPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use heuristics to detect the package manager that was used to install Codex
|
||||
* in order to give the user a hint about how to update it.
|
||||
*/
|
||||
function detectPackageManager() {
|
||||
const userAgent = process.env.npm_config_user_agent || "";
|
||||
if (/\bbun\//.test(userAgent)) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
const execPath = process.env.npm_execpath || "";
|
||||
if (execPath.includes("bun")) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
if (
|
||||
process.env.BUN_INSTALL ||
|
||||
process.env.BUN_INSTALL_GLOBAL_DIR ||
|
||||
process.env.BUN_INSTALL_BIN_DIR
|
||||
) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
return userAgent ? "npm" : null;
|
||||
}
|
||||
|
||||
const additionalDirs = [];
|
||||
const pathDir = path.join(archRoot, "path");
|
||||
if (existsSync(pathDir)) {
|
||||
@@ -87,9 +113,16 @@ if (existsSync(pathDir)) {
|
||||
}
|
||||
const updatedPath = getUpdatedPath(additionalDirs);
|
||||
|
||||
const env = { ...process.env, PATH: updatedPath };
|
||||
const packageManagerEnvVar =
|
||||
detectPackageManager() === "bun"
|
||||
? "CODEX_MANAGED_BY_BUN"
|
||||
: "CODEX_MANAGED_BY_NPM";
|
||||
env[packageManagerEnvVar] = "1";
|
||||
|
||||
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||
stdio: "inherit",
|
||||
env: { ...process.env, PATH: updatedPath, CODEX_MANAGED_BY_NPM: "1" },
|
||||
env,
|
||||
});
|
||||
|
||||
child.on("error", (err) => {
|
||||
|
||||
107
codex-rs/Cargo.lock
generated
107
codex-rs/Cargo.lock
generated
@@ -992,7 +992,7 @@ dependencies = [
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"unicode-width 0.1.14",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1051,6 +1051,7 @@ dependencies = [
|
||||
"escargot",
|
||||
"eventsource-stream",
|
||||
"futures",
|
||||
"ignore",
|
||||
"indexmap 2.10.0",
|
||||
"landlock",
|
||||
"libc",
|
||||
@@ -1069,6 +1070,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"shlex",
|
||||
"similar",
|
||||
"strum_macros 0.27.2",
|
||||
@@ -1352,6 +1354,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
"codex-protocol",
|
||||
"dirs",
|
||||
"futures",
|
||||
"keyring",
|
||||
@@ -1424,6 +1427,8 @@ dependencies = [
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
"tracing-subscriber",
|
||||
"tree-sitter-bash",
|
||||
"tree-sitter-highlight",
|
||||
"unicode-segmentation",
|
||||
"unicode-width 0.2.1",
|
||||
"url",
|
||||
@@ -1574,10 +1579,12 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"codex-core",
|
||||
"notify",
|
||||
"regex-lite",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"walkdir",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
@@ -2368,6 +2375,15 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fsevent-sys"
|
||||
version = "4.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures"
|
||||
version = "0.3.31"
|
||||
@@ -3054,6 +3070,26 @@ version = "2.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify-sys"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inout"
|
||||
version = "0.1.4"
|
||||
@@ -3254,6 +3290,26 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kqueue"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a"
|
||||
dependencies = [
|
||||
"kqueue-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kqueue-sys"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lalrpop"
|
||||
version = "0.19.12"
|
||||
@@ -3653,6 +3709,30 @@ version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "8.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
"libc",
|
||||
"log",
|
||||
"mio",
|
||||
"notify-types",
|
||||
"walkdir",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "notify-types"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.50.1"
|
||||
@@ -4770,9 +4850,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp"
|
||||
version = "0.8.0"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "583d060e99feb3a3683fb48a1e4bf5f8d4a50951f429726f330ee5ff548837f8"
|
||||
checksum = "6f35acda8f89fca5fd8c96cae3c6d5b4c38ea0072df4c8030915f3b5ff469c1c"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -4804,9 +4884,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rmcp-macros"
|
||||
version = "0.8.0"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "421d8b0ba302f479214889486f9550e63feca3af310f1190efcf6e2016802693"
|
||||
checksum = "c9f1d5220aaa23b79c3d02e18f7a554403b3ccea544bbb6c69d6bcb3e854a274"
|
||||
dependencies = [
|
||||
"darling 0.21.3",
|
||||
"proc-macro2",
|
||||
@@ -6019,6 +6099,7 @@ dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"futures-util",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
@@ -6260,9 +6341,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.9"
|
||||
version = "0.25.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccd2a058a86cfece0bf96f7cce1021efef9c8ed0e892ab74639173e5ed7a34fa"
|
||||
checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -6282,6 +6363,18 @@ dependencies = [
|
||||
"tree-sitter-language",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-highlight"
|
||||
version = "0.25.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc5f880ad8d8f94e88cb81c3557024cf1a8b75e3b504c50481ed4f5a6006ff3"
|
||||
dependencies = [
|
||||
"regex",
|
||||
"streaming-iterator",
|
||||
"thiserror 2.0.16",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-language"
|
||||
version = "0.1.5"
|
||||
|
||||
@@ -122,6 +122,7 @@ log = "0.4"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
@@ -175,8 +176,9 @@ tracing = "0.1.41"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.9"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
|
||||
@@ -23,9 +23,15 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
### Model Context Protocol Support
|
||||
|
||||
Codex CLI functions as an MCP client that can connect to MCP servers on startup. See the [`mcp_servers`](../docs/config.md#mcp_servers) section in the configuration documentation for details.
|
||||
#### MCP client
|
||||
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp-server`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#mcp_servers) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
Codex can be launched as an MCP _server_ by running `codex mcp-server`. This allows _other_ MCP clients to use Codex as a tool for another agent.
|
||||
|
||||
Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
|
||||
```shell
|
||||
npx @modelcontextprotocol/inspector codex mcp-server
|
||||
|
||||
@@ -3,11 +3,30 @@ use ansi_to_tui::IntoText;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::text::Text;
|
||||
|
||||
// Expand tabs in a best-effort way for transcript rendering.
|
||||
// Tabs can interact poorly with left-gutter prefixes in our TUI and CLI
|
||||
// transcript views (e.g., `nl` separates line numbers from content with a tab).
|
||||
// Replacing tabs with spaces avoids odd visual artifacts without changing
|
||||
// semantics for our use cases.
|
||||
fn expand_tabs(s: &str) -> std::borrow::Cow<'_, str> {
|
||||
if s.contains('\t') {
|
||||
// Keep it simple: replace each tab with 4 spaces.
|
||||
// We do not try to align to tab stops since most usages (like `nl`)
|
||||
// look acceptable with a fixed substitution and this avoids stateful math
|
||||
// across spans.
|
||||
std::borrow::Cow::Owned(s.replace('\t', " "))
|
||||
} else {
|
||||
std::borrow::Cow::Borrowed(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// This function should be used when the contents of `s` are expected to match
|
||||
/// a single line. If multiple lines are found, a warning is logged and only the
|
||||
/// first line is returned.
|
||||
pub fn ansi_escape_line(s: &str) -> Line<'static> {
|
||||
let text = ansi_escape(s);
|
||||
// Normalize tabs to spaces to avoid odd gutter collisions in transcript mode.
|
||||
let s = expand_tabs(s);
|
||||
let text = ansi_escape(&s);
|
||||
match text.lines.as_slice() {
|
||||
[] => "".into(),
|
||||
[only] => only.clone(),
|
||||
|
||||
@@ -26,6 +26,8 @@ use supports_color::Stream;
|
||||
mod mcp_cmd;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
|
||||
/// Codex CLI
|
||||
///
|
||||
@@ -45,6 +47,9 @@ struct MultitoolCli {
|
||||
#[clap(flatten)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
#[clap(flatten)]
|
||||
pub feature_toggles: FeatureToggles,
|
||||
|
||||
#[clap(flatten)]
|
||||
interactive: TuiCli,
|
||||
|
||||
@@ -97,6 +102,9 @@ enum Subcommand {
|
||||
/// Internal: run the responses API proxy.
|
||||
#[clap(hide = true)]
|
||||
ResponsesApiProxy(ResponsesApiProxyArgs),
|
||||
|
||||
/// Inspect feature flags.
|
||||
Features(FeaturesCli),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -157,9 +165,7 @@ struct LoginCommand {
|
||||
)]
|
||||
api_key: Option<String>,
|
||||
|
||||
/// EXPERIMENTAL: Use device code flow (not yet supported)
|
||||
/// This feature is experimental and may changed in future releases.
|
||||
#[arg(long = "experimental_use-device-code", hide = true)]
|
||||
#[arg(long = "device-auth")]
|
||||
use_device_code: bool,
|
||||
|
||||
/// EXPERIMENTAL: Use custom OAuth issuer base URL (advanced)
|
||||
@@ -233,6 +239,53 @@ fn print_exit_messages(exit_info: AppExitInfo) {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Parser, Clone)]
|
||||
struct FeatureToggles {
|
||||
/// Enable a feature (repeatable). Equivalent to `-c features.<name>=true`.
|
||||
#[arg(long = "enable", value_name = "FEATURE", action = clap::ArgAction::Append, global = true)]
|
||||
enable: Vec<String>,
|
||||
|
||||
/// Disable a feature (repeatable). Equivalent to `-c features.<name>=false`.
|
||||
#[arg(long = "disable", value_name = "FEATURE", action = clap::ArgAction::Append, global = true)]
|
||||
disable: Vec<String>,
|
||||
}
|
||||
|
||||
impl FeatureToggles {
|
||||
fn to_overrides(&self) -> Vec<String> {
|
||||
let mut v = Vec::new();
|
||||
for k in &self.enable {
|
||||
v.push(format!("features.{k}=true"));
|
||||
}
|
||||
for k in &self.disable {
|
||||
v.push(format!("features.{k}=false"));
|
||||
}
|
||||
v
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct FeaturesCli {
|
||||
#[command(subcommand)]
|
||||
sub: FeaturesSubcommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
enum FeaturesSubcommand {
|
||||
/// List known features with their stage and effective state.
|
||||
List,
|
||||
}
|
||||
|
||||
fn stage_str(stage: codex_core::features::Stage) -> &'static str {
|
||||
use codex_core::features::Stage;
|
||||
match stage {
|
||||
Stage::Experimental => "experimental",
|
||||
Stage::Beta => "beta",
|
||||
Stage::Stable => "stable",
|
||||
Stage::Deprecated => "deprecated",
|
||||
Stage::Removed => "removed",
|
||||
}
|
||||
}
|
||||
|
||||
/// As early as possible in the process lifecycle, apply hardening measures. We
|
||||
/// skip this in debug builds to avoid interfering with debugging.
|
||||
#[ctor::ctor]
|
||||
@@ -250,11 +303,17 @@ fn main() -> anyhow::Result<()> {
|
||||
|
||||
async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let MultitoolCli {
|
||||
config_overrides: root_config_overrides,
|
||||
config_overrides: mut root_config_overrides,
|
||||
feature_toggles,
|
||||
mut interactive,
|
||||
subcommand,
|
||||
} = MultitoolCli::parse();
|
||||
|
||||
// Fold --enable/--disable into config overrides so they flow to all subcommands.
|
||||
root_config_overrides
|
||||
.raw_overrides
|
||||
.extend(feature_toggles.to_overrides());
|
||||
|
||||
match subcommand {
|
||||
None => {
|
||||
prepend_config_flags(
|
||||
@@ -294,7 +353,8 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
last,
|
||||
config_overrides,
|
||||
);
|
||||
codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
print_exit_messages(exit_info);
|
||||
}
|
||||
Some(Subcommand::Login(mut login_cli)) => {
|
||||
prepend_config_flags(
|
||||
@@ -382,6 +442,30 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::Features(FeaturesCli { sub })) => match sub {
|
||||
FeaturesSubcommand::List => {
|
||||
// Respect root-level `-c` overrides plus top-level flags like `--profile`.
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(|e| anyhow::anyhow!(e))?;
|
||||
|
||||
// Thread through relevant top-level flags (at minimum, `--profile`).
|
||||
// Also honor `--search` since it maps to a feature toggle.
|
||||
let overrides = ConfigOverrides {
|
||||
config_profile: interactive.config_profile.clone(),
|
||||
tools_web_search_request: interactive.web_search.then_some(true),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides).await?;
|
||||
for def in codex_core::features::FEATURES.iter() {
|
||||
let name = def.key;
|
||||
let stage = stage_str(def.stage);
|
||||
let enabled = config.features.enabled(def.id);
|
||||
println!("{name}\t{stage}\t{enabled}");
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -485,6 +569,7 @@ mod tests {
|
||||
interactive,
|
||||
config_overrides: root_overrides,
|
||||
subcommand,
|
||||
feature_toggles: _,
|
||||
} = cli;
|
||||
|
||||
let Subcommand::Resume(ResumeCommand {
|
||||
|
||||
@@ -4,6 +4,7 @@ use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::bail;
|
||||
use clap::ArgGroup;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
@@ -12,8 +13,12 @@ use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::mcp::auth::compute_auth_statuses;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::delete_oauth_tokens;
|
||||
use codex_rmcp_client::perform_oauth_login;
|
||||
use codex_rmcp_client::supports_oauth_login;
|
||||
|
||||
/// [experimental] Launch Codex as an MCP server or manage configured MCP servers.
|
||||
///
|
||||
@@ -77,13 +82,61 @@ pub struct AddArgs {
|
||||
/// Name for the MCP server configuration.
|
||||
pub name: String,
|
||||
|
||||
/// Environment variables to set when launching the server.
|
||||
#[arg(long, value_parser = parse_env_pair, value_name = "KEY=VALUE")]
|
||||
pub env: Vec<(String, String)>,
|
||||
#[command(flatten)]
|
||||
pub transport_args: AddMcpTransportArgs,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
#[command(
|
||||
group(
|
||||
ArgGroup::new("transport")
|
||||
.args(["command", "url"])
|
||||
.required(true)
|
||||
.multiple(false)
|
||||
)
|
||||
)]
|
||||
pub struct AddMcpTransportArgs {
|
||||
#[command(flatten)]
|
||||
pub stdio: Option<AddMcpStdioArgs>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub streamable_http: Option<AddMcpStreamableHttpArgs>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct AddMcpStdioArgs {
|
||||
/// Command to launch the MCP server.
|
||||
#[arg(trailing_var_arg = true, num_args = 1..)]
|
||||
/// Use --url for a streamable HTTP server.
|
||||
#[arg(
|
||||
trailing_var_arg = true,
|
||||
num_args = 0..,
|
||||
)]
|
||||
pub command: Vec<String>,
|
||||
|
||||
/// Environment variables to set when launching the server.
|
||||
/// Only valid with stdio servers.
|
||||
#[arg(
|
||||
long,
|
||||
value_parser = parse_env_pair,
|
||||
value_name = "KEY=VALUE",
|
||||
)]
|
||||
pub env: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct AddMcpStreamableHttpArgs {
|
||||
/// URL for a streamable HTTP MCP server.
|
||||
#[arg(long)]
|
||||
pub url: String,
|
||||
|
||||
/// Optional environment variable to read for a bearer token.
|
||||
/// Only valid with streamable HTTP servers.
|
||||
#[arg(
|
||||
long = "bearer-token-env-var",
|
||||
value_name = "ENV_VAR",
|
||||
requires = "url"
|
||||
)]
|
||||
pub bearer_token_env_var: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
@@ -138,39 +191,61 @@ impl McpCli {
|
||||
|
||||
async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<()> {
|
||||
// Validate any provided overrides even though they are not currently applied.
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
let AddArgs { name, env, command } = add_args;
|
||||
let AddArgs {
|
||||
name,
|
||||
transport_args,
|
||||
} = add_args;
|
||||
|
||||
validate_server_name(&name)?;
|
||||
|
||||
let mut command_parts = command.into_iter();
|
||||
let command_bin = command_parts
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("command is required"))?;
|
||||
let command_args: Vec<String> = command_parts.collect();
|
||||
|
||||
let env_map = if env.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let mut map = HashMap::new();
|
||||
for (key, value) in env {
|
||||
map.insert(key, value);
|
||||
}
|
||||
Some(map)
|
||||
};
|
||||
|
||||
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
|
||||
let mut servers = load_global_mcp_servers(&codex_home)
|
||||
.await
|
||||
.with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?;
|
||||
|
||||
let new_entry = McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
let transport = match transport_args {
|
||||
AddMcpTransportArgs {
|
||||
stdio: Some(stdio), ..
|
||||
} => {
|
||||
let mut command_parts = stdio.command.into_iter();
|
||||
let command_bin = command_parts
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("command is required"))?;
|
||||
let command_args: Vec<String> = command_parts.collect();
|
||||
|
||||
let env_map = if stdio.env.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(stdio.env.into_iter().collect::<HashMap<_, _>>())
|
||||
};
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
}
|
||||
}
|
||||
AddMcpTransportArgs {
|
||||
streamable_http:
|
||||
Some(AddMcpStreamableHttpArgs {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
}),
|
||||
..
|
||||
} => McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
},
|
||||
AddMcpTransportArgs { .. } => bail!("exactly one of --command or --url must be provided"),
|
||||
};
|
||||
|
||||
let new_entry = McpServerConfig {
|
||||
transport: transport.clone(),
|
||||
enabled: true,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
};
|
||||
@@ -182,6 +257,17 @@ async fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Re
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
|
||||
if let McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var: None,
|
||||
} = transport
|
||||
&& matches!(supports_oauth_login(&url).await, Ok(true))
|
||||
{
|
||||
println!("Detected OAuth support. Starting OAuth flow…");
|
||||
perform_oauth_login(&name, &url, config.mcp_oauth_credentials_store_mode).await?;
|
||||
println!("Successfully logged in.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -219,7 +305,7 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
.await
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
if !config.use_experimental_use_rmcp_client {
|
||||
if !config.features.enabled(Feature::RmcpClient) {
|
||||
bail!(
|
||||
"OAuth login is only supported when experimental_use_rmcp_client is true in config.toml."
|
||||
);
|
||||
@@ -236,7 +322,7 @@ async fn run_login(config_overrides: &CliConfigOverrides, login_args: LoginArgs)
|
||||
_ => bail!("OAuth login is only supported for streamable HTTP servers."),
|
||||
};
|
||||
|
||||
perform_oauth_login(&name, &url).await?;
|
||||
perform_oauth_login(&name, &url, config.mcp_oauth_credentials_store_mode).await?;
|
||||
println!("Successfully logged in to MCP server '{name}'.");
|
||||
Ok(())
|
||||
}
|
||||
@@ -259,7 +345,7 @@ async fn run_logout(config_overrides: &CliConfigOverrides, logout_args: LogoutAr
|
||||
_ => bail!("OAuth logout is only supported for streamable_http transports."),
|
||||
};
|
||||
|
||||
match delete_oauth_tokens(&name, &url) {
|
||||
match delete_oauth_tokens(&name, &url, config.mcp_oauth_credentials_store_mode) {
|
||||
Ok(true) => println!("Removed OAuth credentials for '{name}'."),
|
||||
Ok(false) => println!("No OAuth credentials stored for '{name}'."),
|
||||
Err(err) => return Err(anyhow!("failed to delete OAuth credentials: {err}")),
|
||||
@@ -276,11 +362,20 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
|
||||
let mut entries: Vec<_> = config.mcp_servers.iter().collect();
|
||||
entries.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
let auth_statuses = compute_auth_statuses(
|
||||
config.mcp_servers.iter(),
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
|
||||
if list_args.json {
|
||||
let json_entries: Vec<_> = entries
|
||||
.into_iter()
|
||||
.map(|(name, cfg)| {
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(McpAuthStatus::Unsupported);
|
||||
let transport = match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
@@ -288,17 +383,21 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
"bearer_token_env_var": bearer_token_env_var,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
serde_json::json!({
|
||||
"name": name,
|
||||
"enabled": cfg.enabled,
|
||||
"transport": transport,
|
||||
"startup_timeout_sec": cfg
|
||||
.startup_timeout_sec
|
||||
@@ -306,6 +405,7 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
"tool_timeout_sec": cfg
|
||||
.tool_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
"auth_status": auth_status,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
@@ -319,8 +419,8 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut stdio_rows: Vec<[String; 4]> = Vec::new();
|
||||
let mut http_rows: Vec<[String; 3]> = Vec::new();
|
||||
let mut stdio_rows: Vec<[String; 6]> = Vec::new();
|
||||
let mut http_rows: Vec<[String; 5]> = Vec::new();
|
||||
|
||||
for (name, cfg) in entries {
|
||||
match &cfg.transport {
|
||||
@@ -343,21 +443,59 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
stdio_rows.push([name.clone(), command.clone(), args_display, env_display]);
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
let has_bearer = if bearer_token.is_some() {
|
||||
"True"
|
||||
let status = if cfg.enabled {
|
||||
"enabled".to_string()
|
||||
} else {
|
||||
"False"
|
||||
"disabled".to_string()
|
||||
};
|
||||
http_rows.push([name.clone(), url.clone(), has_bearer.into()]);
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
stdio_rows.push([
|
||||
name.clone(),
|
||||
command.clone(),
|
||||
args_display,
|
||||
env_display,
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
let status = if cfg.enabled {
|
||||
"enabled".to_string()
|
||||
} else {
|
||||
"disabled".to_string()
|
||||
};
|
||||
let auth_status = auth_statuses
|
||||
.get(name.as_str())
|
||||
.copied()
|
||||
.unwrap_or(McpAuthStatus::Unsupported)
|
||||
.to_string();
|
||||
http_rows.push([
|
||||
name.clone(),
|
||||
url.clone(),
|
||||
bearer_token_env_var.clone().unwrap_or("-".to_string()),
|
||||
status,
|
||||
auth_status,
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()];
|
||||
let mut widths = [
|
||||
"Name".len(),
|
||||
"Command".len(),
|
||||
"Args".len(),
|
||||
"Env".len(),
|
||||
"Status".len(),
|
||||
"Auth".len(),
|
||||
];
|
||||
for row in &stdio_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
@@ -365,28 +503,36 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
"Name",
|
||||
"Command",
|
||||
"Args",
|
||||
"Env",
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = "Name",
|
||||
command = "Command",
|
||||
args = "Args",
|
||||
env = "Env",
|
||||
status = "Status",
|
||||
auth = "Auth",
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
status_w = widths[4],
|
||||
auth_w = widths[5],
|
||||
);
|
||||
|
||||
for row in &stdio_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
row[3],
|
||||
"{name:<name_w$} {command:<cmd_w$} {args:<args_w$} {env:<env_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = row[0].as_str(),
|
||||
command = row[1].as_str(),
|
||||
args = row[2].as_str(),
|
||||
env = row[3].as_str(),
|
||||
status = row[4].as_str(),
|
||||
auth = row[5].as_str(),
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
status_w = widths[4],
|
||||
auth_w = widths[5],
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -396,7 +542,13 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
if !http_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Url".len(), "Has Bearer Token".len()];
|
||||
let mut widths = [
|
||||
"Name".len(),
|
||||
"Url".len(),
|
||||
"Bearer Token Env Var".len(),
|
||||
"Status".len(),
|
||||
"Auth".len(),
|
||||
];
|
||||
for row in &http_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
@@ -404,24 +556,32 @@ async fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) ->
|
||||
}
|
||||
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
"Name",
|
||||
"Url",
|
||||
"Has Bearer Token",
|
||||
"{name:<name_w$} {url:<url_w$} {token:<token_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = "Name",
|
||||
url = "Url",
|
||||
token = "Bearer Token Env Var",
|
||||
status = "Status",
|
||||
auth = "Auth",
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
status_w = widths[3],
|
||||
auth_w = widths[4],
|
||||
);
|
||||
|
||||
for row in &http_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
"{name:<name_w$} {url:<url_w$} {token:<token_w$} {status:<status_w$} {auth:<auth_w$}",
|
||||
name = row[0].as_str(),
|
||||
url = row[1].as_str(),
|
||||
token = row[2].as_str(),
|
||||
status = row[3].as_str(),
|
||||
auth = row[4].as_str(),
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
status_w = widths[3],
|
||||
auth_w = widths[4],
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -447,14 +607,18 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => serde_json::json!({
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
"bearer_token_env_var": bearer_token_env_var,
|
||||
}),
|
||||
};
|
||||
let output = serde_json::to_string_pretty(&serde_json::json!({
|
||||
"name": get_args.name,
|
||||
"enabled": server.enabled,
|
||||
"transport": transport,
|
||||
"startup_timeout_sec": server
|
||||
.startup_timeout_sec
|
||||
@@ -468,6 +632,7 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
}
|
||||
|
||||
println!("{}", get_args.name);
|
||||
println!(" enabled: {}", server.enabled);
|
||||
match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
println!(" transport: stdio");
|
||||
@@ -493,11 +658,14 @@ async fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Re
|
||||
};
|
||||
println!(" env: {env_display}");
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let bearer = bearer_token.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token: {bearer}");
|
||||
let env_var = bearer_token_env_var.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token_env_var: {env_var}");
|
||||
}
|
||||
}
|
||||
if let Some(timeout) = server.startup_timeout_sec {
|
||||
|
||||
@@ -35,6 +35,7 @@ async fn add_and_remove_server_updates_global_config() -> Result<()> {
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert!(docs.enabled);
|
||||
|
||||
let mut remove_cmd = codex_command(codex_home.path())?;
|
||||
remove_cmd
|
||||
@@ -90,6 +91,122 @@ async fn add_with_env_preserves_key_order_and_values() -> Result<()> {
|
||||
assert_eq!(env.len(), 2);
|
||||
assert_eq!(env.get("FOO"), Some(&"bar".to_string()));
|
||||
assert_eq!(env.get("ALPHA"), Some(&"beta".to_string()));
|
||||
assert!(envy.enabled);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_streamable_http_without_manual_token() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args(["mcp", "add", "github", "--url", "https://example.com/mcp"])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let github = servers.get("github").expect("github server should exist");
|
||||
match &github.transport {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert!(bearer_token_env_var.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert!(github.enabled);
|
||||
|
||||
assert!(!codex_home.path().join(".credentials.json").exists());
|
||||
assert!(!codex_home.path().join(".env").exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_streamable_http_with_custom_env_var() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"issues",
|
||||
"--url",
|
||||
"https://example.com/issues",
|
||||
"--bearer-token-env-var",
|
||||
"GITHUB_TOKEN",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let issues = servers.get("issues").expect("issues server should exist");
|
||||
match &issues.transport {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/issues");
|
||||
assert_eq!(bearer_token_env_var.as_deref(), Some("GITHUB_TOKEN"));
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert!(issues.enabled);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_streamable_http_rejects_removed_flag() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"github",
|
||||
"--url",
|
||||
"https://example.com/mcp",
|
||||
"--with-bearer-token",
|
||||
])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("--with-bearer-token"));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn add_cant_add_command_and_url() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"github",
|
||||
"--url",
|
||||
"https://example.com/mcp",
|
||||
"--command",
|
||||
"--",
|
||||
"echo",
|
||||
"hello",
|
||||
])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("unexpected argument '--command' found"));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path()).await?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use predicates::prelude::PredicateBooleanExt;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value as JsonValue;
|
||||
@@ -53,6 +54,10 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("docs-server"));
|
||||
assert!(stdout.contains("TOKEN=secret"));
|
||||
assert!(stdout.contains("Status"));
|
||||
assert!(stdout.contains("Auth"));
|
||||
assert!(stdout.contains("enabled"));
|
||||
assert!(stdout.contains("Unsupported"));
|
||||
|
||||
let mut list_json_cmd = codex_command(codex_home.path())?;
|
||||
let json_output = list_json_cmd.args(["mcp", "list", "--json"]).output()?;
|
||||
@@ -64,6 +69,7 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
json!([
|
||||
{
|
||||
"name": "docs",
|
||||
"enabled": true,
|
||||
"transport": {
|
||||
"type": "stdio",
|
||||
"command": "docs-server",
|
||||
@@ -76,7 +82,8 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
}
|
||||
},
|
||||
"startup_timeout_sec": null,
|
||||
"tool_timeout_sec": null
|
||||
"tool_timeout_sec": null,
|
||||
"auth_status": "unsupported"
|
||||
}
|
||||
]
|
||||
)
|
||||
@@ -91,6 +98,7 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
assert!(stdout.contains("enabled: true"));
|
||||
assert!(stdout.contains("remove: codex mcp remove docs"));
|
||||
|
||||
let mut get_json_cmd = codex_command(codex_home.path())?;
|
||||
@@ -98,7 +106,7 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
.args(["mcp", "get", "docs", "--json"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"docs\""));
|
||||
.stdout(contains("\"name\": \"docs\"").and(contains("\"enabled\": true")));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -11,10 +11,10 @@ path = "src/lib.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
base64 = "0.22"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
anyhow = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = [
|
||||
"mock",
|
||||
"online",
|
||||
@@ -23,16 +23,16 @@ codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-login = { path = "../login" }
|
||||
codex-tui = { path = "../tui" }
|
||||
crossterm = { version = "0.28.1", features = ["event-stream"] }
|
||||
ratatui = { version = "0.29.0" }
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
tokio-stream = "0.1.17"
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
unicode-width = "0.1"
|
||||
crossterm = { workspace = true, features = ["event-stream"] }
|
||||
ratatui = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["json"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
|
||||
tokio-stream = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
||||
unicode-width = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
async-trait = "0.1"
|
||||
async-trait = { workspace = true }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use clap::Args;
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
@@ -6,4 +7,43 @@ use codex_common::CliConfigOverrides;
|
||||
pub struct Cli {
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub command: Option<Command>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Submit a new Codex Cloud task without launching the TUI.
|
||||
Exec(ExecCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ExecCommand {
|
||||
/// Task prompt to run in Codex Cloud.
|
||||
#[arg(value_name = "QUERY")]
|
||||
pub query: Option<String>,
|
||||
|
||||
/// Target environment identifier (see `codex cloud` to browse).
|
||||
#[arg(long = "env", value_name = "ENV_ID")]
|
||||
pub environment: String,
|
||||
|
||||
/// Number of assistant attempts (best-of-N).
|
||||
#[arg(
|
||||
long = "attempts",
|
||||
default_value_t = 1usize,
|
||||
value_parser = parse_attempts
|
||||
)]
|
||||
pub attempts: usize,
|
||||
}
|
||||
|
||||
fn parse_attempts(input: &str) -> Result<usize, String> {
|
||||
let value: usize = input
|
||||
.parse()
|
||||
.map_err(|_| "attempts must be an integer between 1 and 4".to_string())?;
|
||||
if (1..=4).contains(&value) {
|
||||
Ok(value)
|
||||
} else {
|
||||
Err("attempts must be between 1 and 4".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,9 @@ mod ui;
|
||||
pub mod util;
|
||||
pub use cli::Cli;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
@@ -23,6 +25,175 @@ struct ApplyJob {
|
||||
diff_override: Option<String>,
|
||||
}
|
||||
|
||||
struct BackendContext {
|
||||
backend: Arc<dyn codex_cloud_tasks_client::CloudBackend>,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
async fn init_backend(user_agent_suffix: &str) -> anyhow::Result<BackendContext> {
|
||||
let use_mock = matches!(
|
||||
std::env::var("CODEX_CLOUD_TASKS_MODE").ok().as_deref(),
|
||||
Some("mock") | Some("MOCK")
|
||||
);
|
||||
let base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
|
||||
set_user_agent_suffix(user_agent_suffix);
|
||||
|
||||
if use_mock {
|
||||
return Ok(BackendContext {
|
||||
backend: Arc::new(codex_cloud_tasks_client::MockClient),
|
||||
base_url,
|
||||
});
|
||||
}
|
||||
|
||||
let ua = codex_core::default_client::get_codex_user_agent();
|
||||
let mut http = codex_cloud_tasks_client::HttpClient::new(base_url.clone())?.with_user_agent(ua);
|
||||
let style = if base_url.contains("/backend-api") {
|
||||
"wham"
|
||||
} else {
|
||||
"codex-api"
|
||||
};
|
||||
append_error_log(format!("startup: base_url={base_url} path_style={style}"));
|
||||
|
||||
let auth = match codex_core::config::find_codex_home()
|
||||
.ok()
|
||||
.map(|home| codex_login::AuthManager::new(home, false))
|
||||
.and_then(|am| am.auth())
|
||||
{
|
||||
Some(auth) => auth,
|
||||
None => {
|
||||
eprintln!(
|
||||
"Not signed in. Please run 'codex login' to sign in with ChatGPT, then re-run 'codex cloud'."
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(acc) = auth.get_account_id() {
|
||||
append_error_log(format!("auth: mode=ChatGPT account_id={acc}"));
|
||||
}
|
||||
|
||||
let token = match auth.get_token().await {
|
||||
Ok(t) if !t.is_empty() => t,
|
||||
_ => {
|
||||
eprintln!(
|
||||
"Not signed in. Please run 'codex login' to sign in with ChatGPT, then re-run 'codex cloud'."
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
http = http.with_bearer_token(token.clone());
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| util::extract_chatgpt_account_id(&token))
|
||||
{
|
||||
append_error_log(format!("auth: set ChatGPT-Account-Id header: {acc}"));
|
||||
http = http.with_chatgpt_account_id(acc);
|
||||
}
|
||||
|
||||
Ok(BackendContext {
|
||||
backend: Arc::new(http),
|
||||
base_url,
|
||||
})
|
||||
}
|
||||
|
||||
async fn run_exec_command(args: crate::cli::ExecCommand) -> anyhow::Result<()> {
|
||||
let crate::cli::ExecCommand {
|
||||
query,
|
||||
environment,
|
||||
attempts,
|
||||
} = args;
|
||||
let ctx = init_backend("codex_cloud_tasks_exec").await?;
|
||||
let prompt = resolve_query_input(query)?;
|
||||
let env_id = resolve_environment_id(&ctx, &environment).await?;
|
||||
let created = codex_cloud_tasks_client::CloudBackend::create_task(
|
||||
&*ctx.backend,
|
||||
&env_id,
|
||||
&prompt,
|
||||
"main",
|
||||
false,
|
||||
attempts,
|
||||
)
|
||||
.await?;
|
||||
let url = util::task_url(&ctx.base_url, &created.id.0);
|
||||
println!("{url}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn resolve_environment_id(ctx: &BackendContext, requested: &str) -> anyhow::Result<String> {
|
||||
let trimmed = requested.trim();
|
||||
if trimmed.is_empty() {
|
||||
return Err(anyhow!("environment id must not be empty"));
|
||||
}
|
||||
let normalized = util::normalize_base_url(&ctx.base_url);
|
||||
let headers = util::build_chatgpt_headers().await;
|
||||
let environments = crate::env_detect::list_environments(&normalized, &headers).await?;
|
||||
if environments.is_empty() {
|
||||
return Err(anyhow!(
|
||||
"no cloud environments are available for this workspace"
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(row) = environments.iter().find(|row| row.id == trimmed) {
|
||||
return Ok(row.id.clone());
|
||||
}
|
||||
|
||||
let label_matches = environments
|
||||
.iter()
|
||||
.filter(|row| {
|
||||
row.label
|
||||
.as_deref()
|
||||
.map(|label| label.eq_ignore_ascii_case(trimmed))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
match label_matches.as_slice() {
|
||||
[] => Err(anyhow!(
|
||||
"environment '{trimmed}' not found; run `codex cloud` to list available environments"
|
||||
)),
|
||||
[single] => Ok(single.id.clone()),
|
||||
[first, rest @ ..] => {
|
||||
let first_id = &first.id;
|
||||
if rest.iter().all(|row| row.id == *first_id) {
|
||||
Ok(first_id.clone())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"environment label '{trimmed}' is ambiguous; run `codex cloud` to pick the desired environment id"
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_query_input(query_arg: Option<String>) -> anyhow::Result<String> {
|
||||
match query_arg {
|
||||
Some(q) if q != "-" => Ok(q),
|
||||
maybe_dash => {
|
||||
let force_stdin = matches!(maybe_dash.as_deref(), Some("-"));
|
||||
if std::io::stdin().is_terminal() && !force_stdin {
|
||||
return Err(anyhow!(
|
||||
"no query provided. Pass one as an argument or pipe it via stdin."
|
||||
));
|
||||
}
|
||||
if !force_stdin {
|
||||
eprintln!("Reading query from stdin...");
|
||||
}
|
||||
let mut buffer = String::new();
|
||||
std::io::stdin()
|
||||
.read_to_string(&mut buffer)
|
||||
.map_err(|e| anyhow!("failed to read query from stdin: {e}"))?;
|
||||
if buffer.trim().is_empty() {
|
||||
return Err(anyhow!(
|
||||
"no query provided via stdin (received empty input)."
|
||||
));
|
||||
}
|
||||
Ok(buffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn level_from_status(status: codex_cloud_tasks_client::ApplyStatus) -> app::ApplyResultLevel {
|
||||
match status {
|
||||
codex_cloud_tasks_client::ApplyStatus::Success => app::ApplyResultLevel::Success,
|
||||
@@ -148,7 +319,14 @@ fn spawn_apply(
|
||||
// (no standalone patch summarizer needed – UI displays raw diffs)
|
||||
|
||||
/// Entry point for the `codex cloud` subcommand.
|
||||
pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
pub async fn run_main(cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
if let Some(command) = cli.command {
|
||||
return match command {
|
||||
crate::cli::Command::Exec(args) => run_exec_command(args).await,
|
||||
};
|
||||
}
|
||||
let Cli { .. } = cli;
|
||||
|
||||
// Very minimal logging setup; mirrors other crates' pattern.
|
||||
let default_level = "error";
|
||||
let _ = tracing_subscriber::fmt()
|
||||
@@ -162,72 +340,8 @@ pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option<PathBuf>) -> a
|
||||
.try_init();
|
||||
|
||||
info!("Launching Cloud Tasks list UI");
|
||||
set_user_agent_suffix("codex_cloud_tasks_tui");
|
||||
|
||||
// Default to online unless explicitly configured to use mock.
|
||||
let use_mock = matches!(
|
||||
std::env::var("CODEX_CLOUD_TASKS_MODE").ok().as_deref(),
|
||||
Some("mock") | Some("MOCK")
|
||||
);
|
||||
|
||||
let backend: Arc<dyn codex_cloud_tasks_client::CloudBackend> = if use_mock {
|
||||
Arc::new(codex_cloud_tasks_client::MockClient)
|
||||
} else {
|
||||
// Build an HTTP client against the configured (or default) base URL.
|
||||
let base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
let ua = codex_core::default_client::get_codex_user_agent();
|
||||
let mut http =
|
||||
codex_cloud_tasks_client::HttpClient::new(base_url.clone())?.with_user_agent(ua);
|
||||
// Log which base URL and path style we're going to use.
|
||||
let style = if base_url.contains("/backend-api") {
|
||||
"wham"
|
||||
} else {
|
||||
"codex-api"
|
||||
};
|
||||
append_error_log(format!("startup: base_url={base_url} path_style={style}"));
|
||||
|
||||
// Require ChatGPT login (SWIC). Exit with a clear message if missing.
|
||||
let _token = match codex_core::config::find_codex_home()
|
||||
.ok()
|
||||
.map(|home| codex_login::AuthManager::new(home, false))
|
||||
.and_then(|am| am.auth())
|
||||
{
|
||||
Some(auth) => {
|
||||
// Log account context for debugging workspace selection.
|
||||
if let Some(acc) = auth.get_account_id() {
|
||||
append_error_log(format!("auth: mode=ChatGPT account_id={acc}"));
|
||||
}
|
||||
match auth.get_token().await {
|
||||
Ok(t) if !t.is_empty() => {
|
||||
// Attach token and ChatGPT-Account-Id header if available
|
||||
http = http.with_bearer_token(t.clone());
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| util::extract_chatgpt_account_id(&t))
|
||||
{
|
||||
append_error_log(format!("auth: set ChatGPT-Account-Id header: {acc}"));
|
||||
http = http.with_chatgpt_account_id(acc);
|
||||
}
|
||||
t
|
||||
}
|
||||
_ => {
|
||||
eprintln!(
|
||||
"Not signed in. Please run 'codex login' to sign in with ChatGPT, then re-run 'codex cloud'."
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
eprintln!(
|
||||
"Not signed in. Please run 'codex login' to sign in with ChatGPT, then re-run 'codex cloud'."
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
Arc::new(http)
|
||||
};
|
||||
let BackendContext { backend, .. } = init_backend("codex_cloud_tasks_tui").await?;
|
||||
let backend = backend;
|
||||
|
||||
// Terminal setup
|
||||
use crossterm::ExecutableCommand;
|
||||
|
||||
@@ -91,3 +91,18 @@ pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
}
|
||||
headers
|
||||
}
|
||||
|
||||
/// Construct a browser-friendly task URL for the given backend base URL.
|
||||
pub fn task_url(base_url: &str, task_id: &str) -> String {
|
||||
let normalized = normalize_base_url(base_url);
|
||||
if let Some(root) = normalized.strip_suffix("/backend-api") {
|
||||
return format!("{root}/codex/tasks/{task_id}");
|
||||
}
|
||||
if let Some(root) = normalized.strip_suffix("/api/codex") {
|
||||
return format!("{root}/codex/tasks/{task_id}");
|
||||
}
|
||||
if normalized.ends_with("/codex") {
|
||||
return format!("{normalized}/tasks/{task_id}");
|
||||
}
|
||||
format!("{normalized}/codex/tasks/{task_id}")
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ env-flags = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
ignore = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
@@ -43,6 +44,7 @@ reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha1 = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
@@ -61,7 +63,7 @@ tokio = { workspace = true, features = [
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = { workspace = true }
|
||||
tokio-util = { workspace = true, features = ["rt"] }
|
||||
toml = { workspace = true }
|
||||
toml_edit = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
|
||||
@@ -389,10 +389,12 @@ async fn process_chat_sse<S>(
|
||||
let mut reasoning_text = String::new();
|
||||
|
||||
loop {
|
||||
let sse = match otel_event_manager
|
||||
.log_sse_event(|| timeout(idle_timeout, stream.next()))
|
||||
.await
|
||||
{
|
||||
let start = std::time::Instant::now();
|
||||
let response = timeout(idle_timeout, stream.next()).await;
|
||||
let duration = start.elapsed();
|
||||
otel_event_manager.log_sse_event(&response, duration);
|
||||
|
||||
let sse = match response {
|
||||
Ok(Some(Ok(ev))) => ev,
|
||||
Ok(Some(Err(e))) => {
|
||||
let _ = tx_event
|
||||
|
||||
@@ -47,6 +47,7 @@ use crate::openai_tools::create_tools_json_for_responses_api;
|
||||
use crate::protocol::RateLimitSnapshot;
|
||||
use crate::protocol::RateLimitWindow;
|
||||
use crate::protocol::TokenUsage;
|
||||
use crate::state::TaskKind;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::util::backoff;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
@@ -123,8 +124,16 @@ impl ModelClient {
|
||||
/// the provider config. Public callers always invoke `stream()` – the
|
||||
/// specialised helpers are private to avoid accidental misuse.
|
||||
pub async fn stream(&self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
self.stream_with_task_kind(prompt, TaskKind::Regular).await
|
||||
}
|
||||
|
||||
pub(crate) async fn stream_with_task_kind(
|
||||
&self,
|
||||
prompt: &Prompt,
|
||||
task_kind: TaskKind,
|
||||
) -> Result<ResponseStream> {
|
||||
match self.provider.wire_api {
|
||||
WireApi::Responses => self.stream_responses(prompt).await,
|
||||
WireApi::Responses => self.stream_responses(prompt, task_kind).await,
|
||||
WireApi::Chat => {
|
||||
// Create the raw streaming connection first.
|
||||
let response_stream = stream_chat_completions(
|
||||
@@ -165,7 +174,11 @@ impl ModelClient {
|
||||
}
|
||||
|
||||
/// Implementation for the OpenAI *Responses* experimental API.
|
||||
async fn stream_responses(&self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
async fn stream_responses(
|
||||
&self,
|
||||
prompt: &Prompt,
|
||||
task_kind: TaskKind,
|
||||
) -> Result<ResponseStream> {
|
||||
if let Some(path) = &*CODEX_RS_SSE_FIXTURE {
|
||||
// short circuit for tests
|
||||
warn!(path, "Streaming from fixture");
|
||||
@@ -244,7 +257,7 @@ impl ModelClient {
|
||||
let max_attempts = self.provider.request_max_retries();
|
||||
for attempt in 0..=max_attempts {
|
||||
match self
|
||||
.attempt_stream_responses(attempt, &payload_json, &auth_manager)
|
||||
.attempt_stream_responses(attempt, &payload_json, &auth_manager, task_kind)
|
||||
.await
|
||||
{
|
||||
Ok(stream) => {
|
||||
@@ -272,6 +285,7 @@ impl ModelClient {
|
||||
attempt: u64,
|
||||
payload_json: &Value,
|
||||
auth_manager: &Option<Arc<AuthManager>>,
|
||||
task_kind: TaskKind,
|
||||
) -> std::result::Result<ResponseStream, StreamAttemptError> {
|
||||
// Always fetch the latest auth in case a prior attempt refreshed the token.
|
||||
let auth = auth_manager.as_ref().and_then(|m| m.auth());
|
||||
@@ -294,6 +308,7 @@ impl ModelClient {
|
||||
.header("conversation_id", self.conversation_id.to_string())
|
||||
.header("session_id", self.conversation_id.to_string())
|
||||
.header(reqwest::header::ACCEPT, "text/event-stream")
|
||||
.header("Codex-Task-Type", task_kind.header_value())
|
||||
.json(payload_json);
|
||||
|
||||
if let Some(auth) = auth.as_ref()
|
||||
@@ -649,10 +664,12 @@ async fn process_sse<S>(
|
||||
let mut response_error: Option<CodexErr> = None;
|
||||
|
||||
loop {
|
||||
let sse = match otel_event_manager
|
||||
.log_sse_event(|| timeout(idle_timeout, stream.next()))
|
||||
.await
|
||||
{
|
||||
let start = std::time::Instant::now();
|
||||
let response = timeout(idle_timeout, stream.next()).await;
|
||||
let duration = start.elapsed();
|
||||
otel_event_manager.log_sse_event(&response, duration);
|
||||
|
||||
let sse = match response {
|
||||
Ok(Some(Ok(sse))) => sse,
|
||||
Ok(Some(Err(e))) => {
|
||||
debug!("SSE Error: {e:#}");
|
||||
|
||||
168
codex-rs/core/src/codebase_change_notice.rs
Normal file
168
codex-rs/core/src/codebase_change_notice.rs
Normal file
@@ -0,0 +1,168 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::codebase_snapshot::SnapshotDiff;
|
||||
|
||||
pub(crate) const CODEBASE_CHANGE_NOTICE_MAX_PATHS: usize = 40;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct CodebaseChangeNotice {
|
||||
added: Vec<String>,
|
||||
removed: Vec<String>,
|
||||
modified: Vec<String>,
|
||||
truncated: bool,
|
||||
}
|
||||
|
||||
impl CodebaseChangeNotice {
|
||||
pub(crate) fn new(diff: SnapshotDiff, limit: usize) -> Self {
|
||||
let mut remaining = limit;
|
||||
let mut truncated = false;
|
||||
|
||||
let added = take_paths(diff.added, &mut remaining, &mut truncated);
|
||||
let removed = take_paths(diff.removed, &mut remaining, &mut truncated);
|
||||
let modified = take_paths(diff.modified, &mut remaining, &mut truncated);
|
||||
|
||||
Self {
|
||||
added,
|
||||
removed,
|
||||
modified,
|
||||
truncated,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.added.is_empty() && self.removed.is_empty() && self.modified.is_empty()
|
||||
}
|
||||
|
||||
pub(crate) fn serialize_to_xml(&self) -> String {
|
||||
let mut output = String::new();
|
||||
if self.truncated {
|
||||
let _ = writeln!(output, "<codebase_changes truncated=\"true\">");
|
||||
} else {
|
||||
let _ = writeln!(output, "<codebase_changes>");
|
||||
}
|
||||
|
||||
let mut summary_parts = Vec::new();
|
||||
if !self.added.is_empty() {
|
||||
summary_parts.push(format!("added {}", self.added.len()));
|
||||
}
|
||||
if !self.removed.is_empty() {
|
||||
summary_parts.push(format!("removed {}", self.removed.len()));
|
||||
}
|
||||
if !self.modified.is_empty() {
|
||||
summary_parts.push(format!("modified {}", self.modified.len()));
|
||||
}
|
||||
|
||||
if summary_parts.is_empty() {
|
||||
let _ = writeln!(output, " <summary>no changes</summary>");
|
||||
} else {
|
||||
let summary = summary_parts.join(", ");
|
||||
let _ = writeln!(output, " <summary>{summary}</summary>");
|
||||
}
|
||||
|
||||
serialize_section(&mut output, "added", &self.added);
|
||||
serialize_section(&mut output, "removed", &self.removed);
|
||||
serialize_section(&mut output, "modified", &self.modified);
|
||||
if self.truncated {
|
||||
let _ = writeln!(output, " <note>additional paths omitted</note>");
|
||||
}
|
||||
|
||||
let _ = writeln!(output, "</codebase_changes>");
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
fn take_paths(mut paths: Vec<String>, remaining: &mut usize, truncated: &mut bool) -> Vec<String> {
|
||||
if *remaining == 0 {
|
||||
if !paths.is_empty() {
|
||||
*truncated = true;
|
||||
}
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if paths.len() > *remaining {
|
||||
paths.truncate(*remaining);
|
||||
*truncated = true;
|
||||
}
|
||||
|
||||
*remaining -= paths.len();
|
||||
paths
|
||||
}
|
||||
|
||||
fn serialize_section(output: &mut String, tag: &str, paths: &[String]) {
|
||||
if paths.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = writeln!(output, " <{tag}>");
|
||||
for path in paths {
|
||||
let _ = writeln!(output, " <path>{}</path>", escape_xml(path));
|
||||
}
|
||||
let _ = writeln!(output, " </{tag}>");
|
||||
}
|
||||
|
||||
fn escape_xml(value: &str) -> String {
|
||||
let mut escaped = String::with_capacity(value.len());
|
||||
for ch in value.chars() {
|
||||
match ch {
|
||||
'&' => escaped.push_str("&"),
|
||||
'<' => escaped.push_str("<"),
|
||||
'>' => escaped.push_str(">"),
|
||||
'"' => escaped.push_str("""),
|
||||
'\'' => escaped.push_str("'"),
|
||||
other => escaped.push(other),
|
||||
}
|
||||
}
|
||||
escaped
|
||||
}
|
||||
|
||||
impl From<CodebaseChangeNotice> for ResponseItem {
|
||||
fn from(notice: CodebaseChangeNotice) -> Self {
|
||||
ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: notice.serialize_to_xml(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn constructs_notice_with_limit() {
|
||||
let diff = SnapshotDiff {
|
||||
added: vec!["a.rs".to_string(), "b.rs".to_string()],
|
||||
removed: vec!["c.rs".to_string()],
|
||||
modified: vec!["d.rs".to_string(), "e.rs".to_string()],
|
||||
};
|
||||
|
||||
let notice = CodebaseChangeNotice::new(diff, 3);
|
||||
assert!(notice.truncated);
|
||||
assert_eq!(
|
||||
notice.added.len() + notice.removed.len() + notice.modified.len(),
|
||||
3
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serializes_notice() {
|
||||
let diff = SnapshotDiff {
|
||||
added: vec!["src/lib.rs".to_string()],
|
||||
removed: Vec::new(),
|
||||
modified: vec!["src/main.rs".to_string()],
|
||||
};
|
||||
let notice = CodebaseChangeNotice::new(diff, CODEBASE_CHANGE_NOTICE_MAX_PATHS);
|
||||
let xml = notice.serialize_to_xml();
|
||||
assert!(xml.contains("<added>"));
|
||||
assert!(xml.contains("<modified>"));
|
||||
assert!(xml.contains("src/lib.rs"));
|
||||
assert!(xml.contains("src/main.rs"));
|
||||
}
|
||||
}
|
||||
278
codex-rs/core/src/codebase_snapshot.rs
Normal file
278
codex-rs/core/src/codebase_snapshot.rs
Normal file
@@ -0,0 +1,278 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::time::SystemTime;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use ignore::WalkBuilder;
|
||||
use sha2::Digest;
|
||||
use sha2::Sha256;
|
||||
use tokio::task;
|
||||
use tracing::warn;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct CodebaseSnapshot {
|
||||
root: PathBuf,
|
||||
entries: BTreeMap<String, EntryFingerprint>,
|
||||
root_digest: DigestBytes,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct EntryFingerprint {
|
||||
pub kind: EntryKind,
|
||||
pub digest: DigestBytes,
|
||||
pub size: u64,
|
||||
pub modified_millis: Option<u128>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[repr(u8)]
|
||||
pub(crate) enum EntryKind {
|
||||
File,
|
||||
Symlink,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Default)]
|
||||
pub(crate) struct SnapshotDiff {
|
||||
pub added: Vec<String>,
|
||||
pub removed: Vec<String>,
|
||||
pub modified: Vec<String>,
|
||||
}
|
||||
|
||||
impl SnapshotDiff {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.added.is_empty() && self.removed.is_empty() && self.modified.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type DigestBytes = [u8; 32];
|
||||
|
||||
impl CodebaseSnapshot {
|
||||
pub(crate) async fn capture(root: PathBuf) -> Result<Self> {
|
||||
task::spawn_blocking(move || Self::from_disk(&root))
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("codebase snapshot task failed: {e}"))?
|
||||
}
|
||||
|
||||
pub(crate) fn from_disk(root: &Path) -> Result<Self> {
|
||||
if !root.exists() {
|
||||
return Ok(Self::empty(root));
|
||||
}
|
||||
|
||||
let mut entries: BTreeMap<String, EntryFingerprint> = BTreeMap::new();
|
||||
|
||||
let mut walker = WalkBuilder::new(root);
|
||||
walker
|
||||
.hidden(false)
|
||||
.git_ignore(true)
|
||||
.git_exclude(true)
|
||||
.parents(true)
|
||||
.ignore(true)
|
||||
.follow_links(false);
|
||||
|
||||
for result in walker.build() {
|
||||
let entry = match result {
|
||||
Ok(entry) => entry,
|
||||
Err(err) => {
|
||||
warn!("codebase snapshot failed to read entry: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let path = entry.path();
|
||||
if entry.depth() == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let relative = match path.strip_prefix(root) {
|
||||
Ok(rel) => rel,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if relative.as_os_str().is_empty() {
|
||||
continue;
|
||||
}
|
||||
let rel_string = normalize_rel_path(relative);
|
||||
|
||||
let file_type = match entry.file_type() {
|
||||
Some(file_type) => file_type,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
if file_type.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if file_type.is_file() {
|
||||
match fingerprint_file(path) {
|
||||
Ok(fp) => {
|
||||
entries.insert(rel_string, fp);
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"codebase snapshot failed to hash file {}: {err}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if file_type.is_symlink() {
|
||||
match fingerprint_symlink(path) {
|
||||
Ok(fp) => {
|
||||
entries.insert(rel_string, fp);
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"codebase snapshot failed to hash symlink {}: {err}",
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let root_digest = compute_root_digest(&entries);
|
||||
|
||||
Ok(Self {
|
||||
root: root.to_path_buf(),
|
||||
entries,
|
||||
root_digest,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn diff(&self, newer: &CodebaseSnapshot) -> SnapshotDiff {
|
||||
let mut diff = SnapshotDiff::default();
|
||||
|
||||
for (path, fingerprint) in &newer.entries {
|
||||
match self.entries.get(path) {
|
||||
None => diff.added.push(path.clone()),
|
||||
Some(existing) if existing != fingerprint => diff.modified.push(path.clone()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
for path in self.entries.keys() {
|
||||
if !newer.entries.contains_key(path) {
|
||||
diff.removed.push(path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
diff
|
||||
}
|
||||
|
||||
pub(crate) fn root(&self) -> &Path {
|
||||
&self.root
|
||||
}
|
||||
|
||||
fn empty(root: &Path) -> Self {
|
||||
Self {
|
||||
root: root.to_path_buf(),
|
||||
entries: BTreeMap::new(),
|
||||
root_digest: Sha256::digest(b"").into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fingerprint_file(path: &Path) -> Result<EntryFingerprint> {
|
||||
let metadata = path
|
||||
.metadata()
|
||||
.with_context(|| format!("metadata {}", path.display()))?;
|
||||
let mut file = File::open(path).with_context(|| format!("open {}", path.display()))?;
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
let mut buf = [0u8; 64 * 1024];
|
||||
loop {
|
||||
let read = file.read(&mut buf)?;
|
||||
if read == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buf[..read]);
|
||||
}
|
||||
|
||||
Ok(EntryFingerprint {
|
||||
kind: EntryKind::File,
|
||||
digest: hasher.finalize().into(),
|
||||
size: metadata.len(),
|
||||
modified_millis: metadata.modified().ok().and_then(system_time_to_millis),
|
||||
})
|
||||
}
|
||||
|
||||
fn fingerprint_symlink(path: &Path) -> Result<EntryFingerprint> {
|
||||
let target =
|
||||
std::fs::read_link(path).with_context(|| format!("read_link {}", path.display()))?;
|
||||
let mut hasher = Sha256::new();
|
||||
let target_str = normalize_rel_path(&target);
|
||||
hasher.update(target_str.as_bytes());
|
||||
Ok(EntryFingerprint {
|
||||
kind: EntryKind::Symlink,
|
||||
digest: hasher.finalize().into(),
|
||||
size: 0,
|
||||
modified_millis: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn compute_root_digest(entries: &BTreeMap<String, EntryFingerprint>) -> DigestBytes {
|
||||
let mut hasher = Sha256::new();
|
||||
for (path, fingerprint) in entries {
|
||||
hasher.update(path.as_bytes());
|
||||
hasher.update(fingerprint.digest);
|
||||
hasher.update([fingerprint.kind as u8]);
|
||||
hasher.update(fingerprint.size.to_le_bytes());
|
||||
if let Some(modified) = fingerprint.modified_millis {
|
||||
hasher.update(modified.to_le_bytes());
|
||||
}
|
||||
}
|
||||
hasher.finalize().into()
|
||||
}
|
||||
|
||||
fn normalize_rel_path(path: &Path) -> String {
|
||||
let s = path_to_cow(path);
|
||||
if s.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
s.replace('\\', "/")
|
||||
}
|
||||
}
|
||||
|
||||
fn path_to_cow(path: &Path) -> Cow<'_, str> {
|
||||
path.to_string_lossy()
|
||||
}
|
||||
|
||||
fn system_time_to_millis(ts: SystemTime) -> Option<u128> {
|
||||
ts.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.map(|duration| duration.as_millis())
|
||||
.ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn diff_tracks_added_modified_removed() {
|
||||
let dir = tempdir().unwrap();
|
||||
let root = dir.path();
|
||||
|
||||
std::fs::write(root.join("file_a.txt"), "alpha").unwrap();
|
||||
std::fs::write(root.join("file_b.txt"), "bravo").unwrap();
|
||||
let snapshot_one = CodebaseSnapshot::from_disk(root).unwrap();
|
||||
|
||||
std::fs::write(root.join("file_a.txt"), "alpha-updated").unwrap();
|
||||
std::fs::remove_file(root.join("file_b.txt")).unwrap();
|
||||
std::fs::write(root.join("file_c.txt"), "charlie").unwrap();
|
||||
let snapshot_two = CodebaseSnapshot::from_disk(root).unwrap();
|
||||
|
||||
let diff = snapshot_one.diff(&snapshot_two);
|
||||
assert_eq!(diff.added, vec!["file_c.txt".to_string()]);
|
||||
assert_eq!(diff.modified, vec!["file_a.txt".to_string()]);
|
||||
assert_eq!(diff.removed, vec!["file_b.txt".to_string()]);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Debug;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
@@ -17,13 +18,16 @@ use codex_apply_patch::ApplyPatchAction;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::ConversationPathResponseEvent;
|
||||
use codex_protocol::protocol::ExitedReviewModeEvent;
|
||||
use codex_protocol::protocol::McpAuthStatus;
|
||||
use codex_protocol::protocol::ReviewRequest;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TaskStartedEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnContextItem;
|
||||
use futures::future::BoxFuture;
|
||||
use futures::prelude::*;
|
||||
use futures::stream::FuturesOrdered;
|
||||
use mcp_types::CallToolResult;
|
||||
use serde_json;
|
||||
use serde_json::Value;
|
||||
@@ -40,6 +44,9 @@ use crate::apply_patch::convert_apply_patch_to_protocol;
|
||||
use crate::client::ModelClient;
|
||||
use crate::client_common::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
use crate::codebase_change_notice::CODEBASE_CHANGE_NOTICE_MAX_PATHS;
|
||||
use crate::codebase_change_notice::CodebaseChangeNotice;
|
||||
use crate::codebase_snapshot::CodebaseSnapshot;
|
||||
use crate::config::Config;
|
||||
use crate::config_types::ShellEnvironmentPolicy;
|
||||
use crate::conversation_history::ConversationHistory;
|
||||
@@ -55,6 +62,7 @@ use crate::exec_command::WriteStdinParams;
|
||||
use crate::executor::Executor;
|
||||
use crate::executor::ExecutorConfig;
|
||||
use crate::executor::normalize_exec_result;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::model_family::find_family_for_model;
|
||||
use crate::openai_model_info::get_model_info;
|
||||
@@ -96,6 +104,7 @@ use crate::rollout::RolloutRecorderParams;
|
||||
use crate::shell;
|
||||
use crate::state::ActiveTurn;
|
||||
use crate::state::SessionServices;
|
||||
use crate::state::TaskKind;
|
||||
use crate::tasks::CompactTask;
|
||||
use crate::tasks::RegularTask;
|
||||
use crate::tasks::ReviewTask;
|
||||
@@ -361,14 +370,32 @@ impl Session {
|
||||
|
||||
let mcp_fut = McpConnectionManager::new(
|
||||
config.mcp_servers.clone(),
|
||||
config.use_experimental_use_rmcp_client,
|
||||
config
|
||||
.features
|
||||
.enabled(crate::features::Feature::RmcpClient),
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
);
|
||||
let default_shell_fut = shell::default_user_shell();
|
||||
let history_meta_fut = crate::message_history::history_metadata(&config);
|
||||
let auth_statuses_fut = compute_auth_statuses(
|
||||
config.mcp_servers.iter(),
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
);
|
||||
|
||||
// Join all independent futures.
|
||||
let (rollout_recorder, mcp_res, default_shell, (history_log_id, history_entry_count)) =
|
||||
tokio::join!(rollout_fut, mcp_fut, default_shell_fut, history_meta_fut);
|
||||
let (
|
||||
rollout_recorder,
|
||||
mcp_res,
|
||||
default_shell,
|
||||
(history_log_id, history_entry_count),
|
||||
auth_statuses,
|
||||
) = tokio::join!(
|
||||
rollout_fut,
|
||||
mcp_fut,
|
||||
default_shell_fut,
|
||||
history_meta_fut,
|
||||
auth_statuses_fut
|
||||
);
|
||||
|
||||
let rollout_recorder = rollout_recorder.map_err(|e| {
|
||||
error!("failed to initialize rollout recorder: {e:#}");
|
||||
@@ -395,11 +422,24 @@ impl Session {
|
||||
// Surface individual client start-up failures to the user.
|
||||
if !failed_clients.is_empty() {
|
||||
for (server_name, err) in failed_clients {
|
||||
let message = format!("MCP client for `{server_name}` failed to start: {err:#}");
|
||||
error!("{message}");
|
||||
let log_message =
|
||||
format!("MCP client for `{server_name}` failed to start: {err:#}");
|
||||
error!("{log_message}");
|
||||
let display_message = if matches!(
|
||||
auth_statuses.get(&server_name),
|
||||
Some(McpAuthStatus::NotLoggedIn)
|
||||
) {
|
||||
format!(
|
||||
"The {server_name} MCP server is not logged in. Run `codex mcp login {server_name}` to log in."
|
||||
)
|
||||
} else {
|
||||
log_message
|
||||
};
|
||||
post_session_configured_error_events.push(Event {
|
||||
id: INITIAL_SUBMIT_ID.to_owned(),
|
||||
msg: EventMsg::Error(ErrorEvent { message }),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: display_message,
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -442,12 +482,7 @@ impl Session {
|
||||
client,
|
||||
tools_config: ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &config.model_family,
|
||||
include_plan_tool: config.include_plan_tool,
|
||||
include_apply_patch_tool: config.include_apply_patch_tool,
|
||||
include_web_search_request: config.tools_web_search_request,
|
||||
use_streamable_shell_tool: config.use_experimental_streamable_shell_tool,
|
||||
include_view_image_tool: config.include_view_image_tool,
|
||||
experimental_unified_exec_tool: config.use_experimental_unified_exec_tool,
|
||||
features: &config.features,
|
||||
}),
|
||||
user_instructions,
|
||||
base_instructions,
|
||||
@@ -715,6 +750,73 @@ impl Session {
|
||||
self.persist_rollout_items(&rollout_items).await;
|
||||
}
|
||||
|
||||
async fn stored_snapshot_for_root(&self, root: &Path) -> Option<CodebaseSnapshot> {
|
||||
let state = self.state.lock().await;
|
||||
state
|
||||
.codebase_snapshot
|
||||
.as_ref()
|
||||
.filter(|snapshot| snapshot.root() == root)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
async fn set_codebase_snapshot(&self, snapshot: CodebaseSnapshot) {
|
||||
let mut state = self.state.lock().await;
|
||||
state.codebase_snapshot = Some(snapshot);
|
||||
}
|
||||
|
||||
pub(crate) async fn emit_codebase_delta_if_changed(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
sub_id: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let cwd = turn_context.cwd.clone();
|
||||
let previous = self.stored_snapshot_for_root(&cwd).await;
|
||||
let latest = CodebaseSnapshot::capture(cwd.clone()).await?;
|
||||
|
||||
if let Some(previous_snapshot) = previous {
|
||||
let diff = previous_snapshot.diff(&latest);
|
||||
if diff.is_empty() {
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let notice = CodebaseChangeNotice::new(diff, CODEBASE_CHANGE_NOTICE_MAX_PATHS);
|
||||
if notice.is_empty() {
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let response_item: ResponseItem = notice.into();
|
||||
self.record_conversation_items(std::slice::from_ref(&response_item))
|
||||
.await;
|
||||
|
||||
for msg in
|
||||
map_response_item_to_event_messages(&response_item, self.show_raw_agent_reasoning())
|
||||
{
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg,
|
||||
};
|
||||
self.send_event(event).await;
|
||||
}
|
||||
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.set_codebase_snapshot(latest).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn refresh_codebase_snapshot(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
) -> anyhow::Result<()> {
|
||||
let snapshot = CodebaseSnapshot::capture(turn_context.cwd.clone()).await?;
|
||||
self.set_codebase_snapshot(snapshot).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn build_initial_context(&self, turn_context: &TurnContext) -> Vec<ResponseItem> {
|
||||
let mut items = Vec::<ResponseItem>::with_capacity(2);
|
||||
if let Some(user_instructions) = turn_context.user_instructions.as_deref() {
|
||||
@@ -1191,12 +1293,7 @@ async fn submission_loop(
|
||||
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &effective_family,
|
||||
include_plan_tool: config.include_plan_tool,
|
||||
include_apply_patch_tool: config.include_apply_patch_tool,
|
||||
include_web_search_request: config.tools_web_search_request,
|
||||
use_streamable_shell_tool: config.use_experimental_streamable_shell_tool,
|
||||
include_view_image_tool: config.include_view_image_tool,
|
||||
experimental_unified_exec_tool: config.use_experimental_unified_exec_tool,
|
||||
features: &config.features,
|
||||
});
|
||||
|
||||
let new_turn_context = TurnContext {
|
||||
@@ -1293,14 +1390,7 @@ async fn submission_loop(
|
||||
client,
|
||||
tools_config: ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: config.include_plan_tool,
|
||||
include_apply_patch_tool: config.include_apply_patch_tool,
|
||||
include_web_search_request: config.tools_web_search_request,
|
||||
use_streamable_shell_tool: config
|
||||
.use_experimental_streamable_shell_tool,
|
||||
include_view_image_tool: config.include_view_image_tool,
|
||||
experimental_unified_exec_tool: config
|
||||
.use_experimental_unified_exec_tool,
|
||||
features: &config.features,
|
||||
}),
|
||||
user_instructions: turn_context.user_instructions.clone(),
|
||||
base_instructions: turn_context.base_instructions.clone(),
|
||||
@@ -1400,10 +1490,18 @@ async fn submission_loop(
|
||||
|
||||
// This is a cheap lookup from the connection manager's cache.
|
||||
let tools = sess.services.mcp_connection_manager.list_all_tools();
|
||||
let auth_statuses = compute_auth_statuses(
|
||||
config.mcp_servers.iter(),
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
)
|
||||
.await;
|
||||
let event = Event {
|
||||
id: sub_id,
|
||||
msg: EventMsg::McpListToolsResponse(
|
||||
crate::protocol::McpListToolsResponseEvent { tools },
|
||||
crate::protocol::McpListToolsResponseEvent {
|
||||
tools,
|
||||
auth_statuses,
|
||||
},
|
||||
),
|
||||
};
|
||||
sess.send_event(event).await;
|
||||
@@ -1524,14 +1622,15 @@ async fn spawn_review_thread(
|
||||
let model = config.review_model.clone();
|
||||
let review_model_family = find_family_for_model(&model)
|
||||
.unwrap_or_else(|| parent_turn_context.client.get_model_family());
|
||||
// For reviews, disable plan, web_search, view_image regardless of global settings.
|
||||
let mut review_features = config.features.clone();
|
||||
review_features.disable(crate::features::Feature::PlanTool);
|
||||
review_features.disable(crate::features::Feature::WebSearchRequest);
|
||||
review_features.disable(crate::features::Feature::ViewImageTool);
|
||||
review_features.disable(crate::features::Feature::StreamableShell);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &review_model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: config.include_apply_patch_tool,
|
||||
include_web_search_request: false,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: false,
|
||||
experimental_unified_exec_tool: config.use_experimental_unified_exec_tool,
|
||||
features: &review_features,
|
||||
});
|
||||
|
||||
let base_instructions = REVIEW_PROMPT.to_string();
|
||||
@@ -1584,7 +1683,7 @@ async fn spawn_review_thread(
|
||||
|
||||
// Seed the child task with the review prompt as the initial user message.
|
||||
let input: Vec<InputItem> = vec![InputItem::Text {
|
||||
text: format!("{base_instructions}\n\n---\n\nNow, here's your task: {review_prompt}"),
|
||||
text: review_prompt,
|
||||
}];
|
||||
let tc = Arc::new(review_turn_context);
|
||||
|
||||
@@ -1622,6 +1721,7 @@ pub(crate) async fn run_task(
|
||||
turn_context: Arc<TurnContext>,
|
||||
sub_id: String,
|
||||
input: Vec<InputItem>,
|
||||
task_kind: TaskKind,
|
||||
) -> Option<String> {
|
||||
if input.is_empty() {
|
||||
return None;
|
||||
@@ -1649,6 +1749,14 @@ pub(crate) async fn run_task(
|
||||
.await;
|
||||
}
|
||||
|
||||
if !is_review_mode
|
||||
&& let Err(err) = sess
|
||||
.emit_codebase_delta_if_changed(turn_context.as_ref(), &sub_id)
|
||||
.await
|
||||
{
|
||||
warn!(error = ?err, "failed to compute codebase changes");
|
||||
}
|
||||
|
||||
let mut last_agent_message: Option<String> = None;
|
||||
// Although from the perspective of codex.rs, TurnDiffTracker has the lifecycle of a Task which contains
|
||||
// many turns, from the perspective of the user, it is a single turn.
|
||||
@@ -1705,6 +1813,7 @@ pub(crate) async fn run_task(
|
||||
Arc::clone(&turn_diff_tracker),
|
||||
sub_id.clone(),
|
||||
turn_input,
|
||||
task_kind,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -1857,6 +1966,7 @@ pub(crate) async fn run_task(
|
||||
);
|
||||
sess.notifier()
|
||||
.notify(&UserNotification::AgentTurnComplete {
|
||||
thread_id: sess.conversation_id.to_string(),
|
||||
turn_id: sub_id.clone(),
|
||||
input_messages: turn_input_messages,
|
||||
last_assistant_message: last_agent_message.clone(),
|
||||
@@ -1880,6 +1990,11 @@ pub(crate) async fn run_task(
|
||||
}
|
||||
}
|
||||
|
||||
if !is_review_mode && let Err(err) = sess.refresh_codebase_snapshot(turn_context.as_ref()).await
|
||||
{
|
||||
warn!(error = ?err, "failed to refresh codebase snapshot");
|
||||
}
|
||||
|
||||
// If this was a review thread and we have a final assistant message,
|
||||
// try to parse it as a ReviewOutput.
|
||||
//
|
||||
@@ -1930,6 +2045,7 @@ async fn run_turn(
|
||||
turn_diff_tracker: SharedTurnDiffTracker,
|
||||
sub_id: String,
|
||||
input: Vec<ResponseItem>,
|
||||
task_kind: TaskKind,
|
||||
) -> CodexResult<TurnRunResult> {
|
||||
let mcp_tools = sess.services.mcp_connection_manager.list_all_tools();
|
||||
let router = Arc::new(ToolRouter::from_config(
|
||||
@@ -1959,6 +2075,7 @@ async fn run_turn(
|
||||
Arc::clone(&turn_diff_tracker),
|
||||
&sub_id,
|
||||
&prompt,
|
||||
task_kind,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -1996,9 +2113,7 @@ async fn run_turn(
|
||||
// at a seemingly frozen screen.
|
||||
sess.notify_stream_error(
|
||||
&sub_id,
|
||||
format!(
|
||||
"stream error: {e}; retrying {retries}/{max_retries} in {delay:?}…"
|
||||
),
|
||||
format!("Re-connecting... {retries}/{max_retries}"),
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -2034,6 +2149,7 @@ async fn try_run_turn(
|
||||
turn_diff_tracker: SharedTurnDiffTracker,
|
||||
sub_id: &str,
|
||||
prompt: &Prompt,
|
||||
task_kind: TaskKind,
|
||||
) -> CodexResult<TurnRunResult> {
|
||||
// call_ids that are part of this response.
|
||||
let completed_call_ids = prompt
|
||||
@@ -2099,16 +2215,21 @@ async fn try_run_turn(
|
||||
summary: turn_context.client.get_reasoning_summary(),
|
||||
});
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
let mut stream = turn_context.client.clone().stream(&prompt).await?;
|
||||
let mut stream = turn_context
|
||||
.client
|
||||
.clone()
|
||||
.stream_with_task_kind(prompt.as_ref(), task_kind)
|
||||
.await?;
|
||||
|
||||
let mut output = Vec::new();
|
||||
let mut tool_runtime = ToolCallRuntime::new(
|
||||
let tool_runtime = ToolCallRuntime::new(
|
||||
Arc::clone(&router),
|
||||
Arc::clone(&sess),
|
||||
Arc::clone(&turn_context),
|
||||
Arc::clone(&turn_diff_tracker),
|
||||
sub_id.to_string(),
|
||||
);
|
||||
let mut output: FuturesOrdered<BoxFuture<CodexResult<ProcessedResponseItem>>> =
|
||||
FuturesOrdered::new();
|
||||
|
||||
loop {
|
||||
// Poll the next item from the model stream. We must inspect *both* Ok and Err
|
||||
@@ -2116,9 +2237,8 @@ async fn try_run_turn(
|
||||
// `response.completed`) bubble up and trigger the caller's retry logic.
|
||||
let event = stream.next().await;
|
||||
let event = match event {
|
||||
Some(event) => event,
|
||||
Some(res) => res?,
|
||||
None => {
|
||||
tool_runtime.abort_all();
|
||||
return Err(CodexErr::Stream(
|
||||
"stream closed before response.completed".into(),
|
||||
None,
|
||||
@@ -2126,14 +2246,8 @@ async fn try_run_turn(
|
||||
}
|
||||
};
|
||||
|
||||
let event = match event {
|
||||
Ok(ev) => ev,
|
||||
Err(e) => {
|
||||
tool_runtime.abort_all();
|
||||
// Propagate the underlying stream error to the caller (run_turn), which
|
||||
// will apply the configured `stream_max_retries` policy.
|
||||
return Err(e);
|
||||
}
|
||||
let add_completed = &mut |response_item: ProcessedResponseItem| {
|
||||
output.push_back(future::ready(Ok(response_item)).boxed());
|
||||
};
|
||||
|
||||
match event {
|
||||
@@ -2143,14 +2257,18 @@ async fn try_run_turn(
|
||||
Ok(Some(call)) => {
|
||||
let payload_preview = call.payload.log_payload().into_owned();
|
||||
tracing::info!("ToolCall: {} {}", call.tool_name, payload_preview);
|
||||
let index = output.len();
|
||||
output.push(ProcessedResponseItem {
|
||||
item,
|
||||
response: None,
|
||||
});
|
||||
tool_runtime
|
||||
.handle_tool_call(call, index, output.as_mut_slice())
|
||||
.await?;
|
||||
|
||||
let response = tool_runtime.handle_tool_call(call);
|
||||
|
||||
output.push_back(
|
||||
async move {
|
||||
Ok(ProcessedResponseItem {
|
||||
item,
|
||||
response: Some(response.await?),
|
||||
})
|
||||
}
|
||||
.boxed(),
|
||||
);
|
||||
}
|
||||
Ok(None) => {
|
||||
let response = handle_non_tool_response_item(
|
||||
@@ -2160,7 +2278,7 @@ async fn try_run_turn(
|
||||
item.clone(),
|
||||
)
|
||||
.await?;
|
||||
output.push(ProcessedResponseItem { item, response });
|
||||
add_completed(ProcessedResponseItem { item, response });
|
||||
}
|
||||
Err(FunctionCallError::MissingLocalShellCallId) => {
|
||||
let msg = "LocalShellCall without call_id or id";
|
||||
@@ -2177,7 +2295,7 @@ async fn try_run_turn(
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
output.push(ProcessedResponseItem {
|
||||
add_completed(ProcessedResponseItem {
|
||||
item,
|
||||
response: Some(response),
|
||||
});
|
||||
@@ -2190,7 +2308,7 @@ async fn try_run_turn(
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
output.push(ProcessedResponseItem {
|
||||
add_completed(ProcessedResponseItem {
|
||||
item,
|
||||
response: Some(response),
|
||||
});
|
||||
@@ -2221,7 +2339,7 @@ async fn try_run_turn(
|
||||
sess.update_token_usage_info(sub_id, turn_context.as_ref(), token_usage.as_ref())
|
||||
.await;
|
||||
|
||||
tool_runtime.resolve_pending(output.as_mut_slice()).await?;
|
||||
let processed_items: Vec<ProcessedResponseItem> = output.try_collect().await?;
|
||||
|
||||
let unified_diff = {
|
||||
let mut tracker = turn_diff_tracker.lock().await;
|
||||
@@ -2237,7 +2355,7 @@ async fn try_run_turn(
|
||||
}
|
||||
|
||||
let result = TurnRunResult {
|
||||
processed_items: output,
|
||||
processed_items,
|
||||
total_token_usage: token_usage.clone(),
|
||||
};
|
||||
|
||||
@@ -2740,12 +2858,7 @@ mod tests {
|
||||
);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &config.model_family,
|
||||
include_plan_tool: config.include_plan_tool,
|
||||
include_apply_patch_tool: config.include_apply_patch_tool,
|
||||
include_web_search_request: config.tools_web_search_request,
|
||||
use_streamable_shell_tool: config.use_experimental_streamable_shell_tool,
|
||||
include_view_image_tool: config.include_view_image_tool,
|
||||
experimental_unified_exec_tool: config.use_experimental_unified_exec_tool,
|
||||
features: &config.features,
|
||||
});
|
||||
let turn_context = TurnContext {
|
||||
client,
|
||||
@@ -2813,12 +2926,7 @@ mod tests {
|
||||
);
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &config.model_family,
|
||||
include_plan_tool: config.include_plan_tool,
|
||||
include_apply_patch_tool: config.include_apply_patch_tool,
|
||||
include_web_search_request: config.tools_web_search_request,
|
||||
use_streamable_shell_tool: config.use_experimental_streamable_shell_tool,
|
||||
include_view_image_tool: config.include_view_image_tool,
|
||||
experimental_unified_exec_tool: config.use_experimental_unified_exec_tool,
|
||||
features: &config.features,
|
||||
});
|
||||
let turn_context = Arc::new(TurnContext {
|
||||
client,
|
||||
|
||||
@@ -16,6 +16,7 @@ use crate::protocol::InputItem;
|
||||
use crate::protocol::InputMessageKind;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::state::TaskKind;
|
||||
use crate::truncate::truncate_middle;
|
||||
use crate::util::backoff;
|
||||
use askama::Template;
|
||||
@@ -70,14 +71,10 @@ async fn run_compact_task_inner(
|
||||
input: Vec<InputItem>,
|
||||
) {
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
let turn_input = sess
|
||||
let mut turn_input = sess
|
||||
.turn_input_with_history(vec![initial_input_for_turn.clone().into()])
|
||||
.await;
|
||||
|
||||
let prompt = Prompt {
|
||||
input: turn_input,
|
||||
..Default::default()
|
||||
};
|
||||
let mut truncated_count = 0usize;
|
||||
|
||||
let max_retries = turn_context.client.get_provider().stream_max_retries();
|
||||
let mut retries = 0;
|
||||
@@ -93,17 +90,36 @@ async fn run_compact_task_inner(
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
|
||||
loop {
|
||||
let prompt = Prompt {
|
||||
input: turn_input.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
let attempt_result =
|
||||
drain_to_completed(&sess, turn_context.as_ref(), &sub_id, &prompt).await;
|
||||
|
||||
match attempt_result {
|
||||
Ok(()) => {
|
||||
if truncated_count > 0 {
|
||||
sess.notify_background_event(
|
||||
&sub_id,
|
||||
format!(
|
||||
"Trimmed {truncated_count} older conversation item(s) before compacting so the prompt fits the model context window."
|
||||
),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
break;
|
||||
}
|
||||
Err(CodexErr::Interrupted) => {
|
||||
return;
|
||||
}
|
||||
Err(e @ CodexErr::ContextWindowExceeded) => {
|
||||
if turn_input.len() > 1 {
|
||||
turn_input.remove(0);
|
||||
truncated_count += 1;
|
||||
retries = 0;
|
||||
continue;
|
||||
}
|
||||
sess.set_total_tokens_full(&sub_id, turn_context.as_ref())
|
||||
.await;
|
||||
let event = Event {
|
||||
@@ -121,9 +137,7 @@ async fn run_compact_task_inner(
|
||||
let delay = backoff(retries);
|
||||
sess.notify_stream_error(
|
||||
&sub_id,
|
||||
format!(
|
||||
"stream error: {e}; retrying {retries}/{max_retries} in {delay:?}…"
|
||||
),
|
||||
format!("Re-connecting... {retries}/{max_retries}"),
|
||||
)
|
||||
.await;
|
||||
tokio::time::sleep(delay).await;
|
||||
@@ -245,7 +259,11 @@ async fn drain_to_completed(
|
||||
sub_id: &str,
|
||||
prompt: &Prompt,
|
||||
) -> CodexResult<()> {
|
||||
let mut stream = turn_context.client.clone().stream(prompt).await?;
|
||||
let mut stream = turn_context
|
||||
.client
|
||||
.clone()
|
||||
.stream_with_task_kind(prompt, TaskKind::Compact)
|
||||
.await?;
|
||||
loop {
|
||||
let maybe_event = stream.next().await;
|
||||
let Some(event) = maybe_event else {
|
||||
|
||||
@@ -17,6 +17,10 @@ use crate::config_types::ShellEnvironmentPolicy;
|
||||
use crate::config_types::ShellEnvironmentPolicyToml;
|
||||
use crate::config_types::Tui;
|
||||
use crate::config_types::UriBasedFileOpener;
|
||||
use crate::features::Feature;
|
||||
use crate::features::FeatureOverrides;
|
||||
use crate::features::Features;
|
||||
use crate::features::FeaturesToml;
|
||||
use crate::git_info::resolve_root_git_project_for_trust;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::model_family::derive_default_model_family;
|
||||
@@ -33,12 +37,15 @@ use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use dirs::home_dir;
|
||||
use serde::Deserialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use tempfile::NamedTempFile;
|
||||
use toml::Value as TomlValue;
|
||||
use toml_edit::Array as TomlArray;
|
||||
@@ -142,6 +149,15 @@ pub struct Config {
|
||||
/// Definition for MCP servers that Codex can reach out to for tool calls.
|
||||
pub mcp_servers: HashMap<String, McpServerConfig>,
|
||||
|
||||
/// Preferred store for MCP OAuth credentials.
|
||||
/// keyring: Use an OS-specific keyring service.
|
||||
/// Credentials stored in the keyring will only be readable by Codex unless the user explicitly grants access via OS-level keyring access.
|
||||
/// https://github.com/openai/codex/blob/main/codex-rs/rmcp-client/src/oauth.rs#L2
|
||||
/// file: CODEX_HOME/.credentials.json
|
||||
/// This file will be readable to Codex and other applications running as the same user.
|
||||
/// auto (default): keyring if available, otherwise file.
|
||||
pub mcp_oauth_credentials_store_mode: OAuthCredentialsStoreMode,
|
||||
|
||||
/// Combined provider map (defaults merged with user-defined overrides).
|
||||
pub model_providers: HashMap<String, ModelProviderInfo>,
|
||||
|
||||
@@ -206,6 +222,9 @@ pub struct Config {
|
||||
/// Include the `view_image` tool that lets the agent attach a local image path to context.
|
||||
pub include_view_image_tool: bool,
|
||||
|
||||
/// Centralized feature flags; source of truth for feature gating.
|
||||
pub features: Features,
|
||||
|
||||
/// The active profile name used to derive this `Config` (if any).
|
||||
pub active_profile: Option<String>,
|
||||
|
||||
@@ -301,12 +320,35 @@ pub async fn load_global_mcp_servers(
|
||||
return Ok(BTreeMap::new());
|
||||
};
|
||||
|
||||
ensure_no_inline_bearer_tokens(servers_value)?;
|
||||
|
||||
servers_value
|
||||
.clone()
|
||||
.try_into()
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))
|
||||
}
|
||||
|
||||
/// We briefly allowed plain text bearer_token fields in MCP server configs.
|
||||
/// We want to warn people who recently added these fields but can remove this after a few months.
|
||||
fn ensure_no_inline_bearer_tokens(value: &TomlValue) -> std::io::Result<()> {
|
||||
let Some(servers_table) = value.as_table() else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
for (server_name, server_value) in servers_table {
|
||||
if let Some(server_table) = server_value.as_table()
|
||||
&& server_table.contains_key("bearer_token")
|
||||
{
|
||||
let message = format!(
|
||||
"mcp_servers.{server_name} uses unsupported `bearer_token`; set `bearer_token_env_var`."
|
||||
);
|
||||
return Err(std::io::Error::new(ErrorKind::InvalidData, message));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn write_global_mcp_servers(
|
||||
codex_home: &Path,
|
||||
servers: &BTreeMap<String, McpServerConfig>,
|
||||
@@ -355,14 +397,21 @@ pub fn write_global_mcp_servers(
|
||||
entry["env"] = TomlItem::Table(env_table);
|
||||
}
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
entry["url"] = toml_edit::value(url.clone());
|
||||
if let Some(token) = bearer_token {
|
||||
entry["bearer_token"] = toml_edit::value(token.clone());
|
||||
if let Some(env_var) = bearer_token_env_var {
|
||||
entry["bearer_token_env_var"] = toml_edit::value(env_var.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !config.enabled {
|
||||
entry["enabled"] = toml_edit::value(false);
|
||||
}
|
||||
|
||||
if let Some(timeout) = config.startup_timeout_sec {
|
||||
entry["startup_timeout_sec"] = toml_edit::value(timeout.as_secs_f64());
|
||||
}
|
||||
@@ -694,6 +743,14 @@ pub struct ConfigToml {
|
||||
#[serde(default)]
|
||||
pub mcp_servers: HashMap<String, McpServerConfig>,
|
||||
|
||||
/// Preferred backend for storing MCP OAuth credentials.
|
||||
/// keyring: Use an OS-specific keyring service.
|
||||
/// https://github.com/openai/codex/blob/main/codex-rs/rmcp-client/src/oauth.rs#L2
|
||||
/// file: Use a file in the Codex home directory.
|
||||
/// auto (default): Use the OS-specific keyring service if available, otherwise use a file.
|
||||
#[serde(default)]
|
||||
pub mcp_oauth_credentials_store: Option<OAuthCredentialsStoreMode>,
|
||||
|
||||
/// User-defined provider entries that extend/override the built-in list.
|
||||
#[serde(default)]
|
||||
pub model_providers: HashMap<String, ModelProviderInfo>,
|
||||
@@ -744,19 +801,15 @@ pub struct ConfigToml {
|
||||
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
|
||||
/// Experimental path to a file whose contents replace the built-in BASE_INSTRUCTIONS.
|
||||
pub experimental_instructions_file: Option<PathBuf>,
|
||||
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
|
||||
pub projects: Option<HashMap<String, ProjectConfig>>,
|
||||
|
||||
/// Nested tools section for feature toggles
|
||||
pub tools: Option<ToolsToml>,
|
||||
|
||||
/// Centralized feature flags (new). Prefer this over individual toggles.
|
||||
#[serde(default)]
|
||||
pub features: Option<FeaturesToml>,
|
||||
|
||||
/// When true, disables burst-paste detection for typed input entirely.
|
||||
/// All characters are inserted as they are received, and no buffering
|
||||
/// or placeholder replacement will occur for fast keypress bursts.
|
||||
@@ -767,6 +820,13 @@ pub struct ConfigToml {
|
||||
|
||||
/// Tracks whether the Windows onboarding screen has been acknowledged.
|
||||
pub windows_wsl_setup_acknowledged: Option<bool>,
|
||||
|
||||
/// Legacy, now use features
|
||||
pub experimental_instructions_file: Option<PathBuf>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
}
|
||||
|
||||
impl From<ConfigToml> for UserSavedConfig {
|
||||
@@ -930,9 +990,9 @@ impl Config {
|
||||
config_profile: config_profile_key,
|
||||
codex_linux_sandbox_exe,
|
||||
base_instructions,
|
||||
include_plan_tool,
|
||||
include_apply_patch_tool,
|
||||
include_view_image_tool,
|
||||
include_plan_tool: include_plan_tool_override,
|
||||
include_apply_patch_tool: include_apply_patch_tool_override,
|
||||
include_view_image_tool: include_view_image_tool_override,
|
||||
show_raw_agent_reasoning,
|
||||
tools_web_search_request: override_tools_web_search_request,
|
||||
} = overrides;
|
||||
@@ -955,6 +1015,15 @@ impl Config {
|
||||
None => ConfigProfile::default(),
|
||||
};
|
||||
|
||||
let feature_overrides = FeatureOverrides {
|
||||
include_plan_tool: include_plan_tool_override,
|
||||
include_apply_patch_tool: include_apply_patch_tool_override,
|
||||
include_view_image_tool: include_view_image_tool_override,
|
||||
web_search_request: override_tools_web_search_request,
|
||||
};
|
||||
|
||||
let features = Features::from_config(&cfg, &config_profile, feature_overrides);
|
||||
|
||||
let sandbox_policy = cfg.derive_sandbox_policy(sandbox_mode);
|
||||
|
||||
let mut model_providers = built_in_model_providers();
|
||||
@@ -1000,13 +1069,13 @@ impl Config {
|
||||
|
||||
let history = cfg.history.unwrap_or_default();
|
||||
|
||||
let tools_web_search_request = override_tools_web_search_request
|
||||
.or(cfg.tools.as_ref().and_then(|t| t.web_search))
|
||||
.unwrap_or(false);
|
||||
|
||||
let include_view_image_tool = include_view_image_tool
|
||||
.or(cfg.tools.as_ref().and_then(|t| t.view_image))
|
||||
.unwrap_or(true);
|
||||
let include_plan_tool_flag = features.enabled(Feature::PlanTool);
|
||||
let include_apply_patch_tool_flag = features.enabled(Feature::ApplyPatchFreeform);
|
||||
let include_view_image_tool_flag = features.enabled(Feature::ViewImageTool);
|
||||
let tools_web_search_request = features.enabled(Feature::WebSearchRequest);
|
||||
let use_experimental_streamable_shell_tool = features.enabled(Feature::StreamableShell);
|
||||
let use_experimental_unified_exec_tool = features.enabled(Feature::UnifiedExec);
|
||||
let use_experimental_use_rmcp_client = features.enabled(Feature::RmcpClient);
|
||||
|
||||
let model = model
|
||||
.or(config_profile.model)
|
||||
@@ -1074,6 +1143,9 @@ impl Config {
|
||||
user_instructions,
|
||||
base_instructions,
|
||||
mcp_servers: cfg.mcp_servers,
|
||||
// The config.toml omits "_mode" because it's a config file. However, "_mode"
|
||||
// is important in code to differentiate the mode from the store implementation.
|
||||
mcp_oauth_credentials_store_mode: cfg.mcp_oauth_credentials_store.unwrap_or_default(),
|
||||
model_providers,
|
||||
project_doc_max_bytes: cfg.project_doc_max_bytes.unwrap_or(PROJECT_DOC_MAX_BYTES),
|
||||
project_doc_fallback_filenames: cfg
|
||||
@@ -1111,19 +1183,14 @@ impl Config {
|
||||
.chatgpt_base_url
|
||||
.or(cfg.chatgpt_base_url)
|
||||
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
|
||||
include_plan_tool: include_plan_tool.unwrap_or(false),
|
||||
include_apply_patch_tool: include_apply_patch_tool
|
||||
.or(cfg.experimental_use_freeform_apply_patch)
|
||||
.unwrap_or(false),
|
||||
include_plan_tool: include_plan_tool_flag,
|
||||
include_apply_patch_tool: include_apply_patch_tool_flag,
|
||||
tools_web_search_request,
|
||||
use_experimental_streamable_shell_tool: cfg
|
||||
.experimental_use_exec_command_tool
|
||||
.unwrap_or(false),
|
||||
use_experimental_unified_exec_tool: cfg
|
||||
.experimental_use_unified_exec_tool
|
||||
.unwrap_or(false),
|
||||
use_experimental_use_rmcp_client: cfg.experimental_use_rmcp_client.unwrap_or(false),
|
||||
include_view_image_tool,
|
||||
use_experimental_streamable_shell_tool,
|
||||
use_experimental_unified_exec_tool,
|
||||
use_experimental_use_rmcp_client,
|
||||
include_view_image_tool: include_view_image_tool_flag,
|
||||
features,
|
||||
active_profile: active_profile_name,
|
||||
windows_wsl_setup_acknowledged: cfg.windows_wsl_setup_acknowledged.unwrap_or(false),
|
||||
disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false),
|
||||
@@ -1256,6 +1323,7 @@ pub fn log_dir(cfg: &Config) -> std::io::Result<PathBuf> {
|
||||
mod tests {
|
||||
use crate::config_types::HistoryPersistence;
|
||||
use crate::config_types::Notifications;
|
||||
use crate::features::Feature;
|
||||
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -1364,6 +1432,172 @@ exclude_slash_tmp = true
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_defaults_to_auto_oauth_store_mode() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml::default();
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert_eq!(
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
OAuthCredentialsStoreMode::Auto,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut profiles = HashMap::new();
|
||||
profiles.insert(
|
||||
"work".to_string(),
|
||||
ConfigProfile {
|
||||
include_plan_tool: Some(true),
|
||||
include_view_image_tool: Some(false),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
let cfg = ConfigToml {
|
||||
profiles,
|
||||
profile: Some("work".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert!(config.features.enabled(Feature::PlanTool));
|
||||
assert!(!config.features.enabled(Feature::ViewImageTool));
|
||||
assert!(config.include_plan_tool);
|
||||
assert!(!config.include_view_image_tool);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feature_table_overrides_legacy_flags() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut entries = BTreeMap::new();
|
||||
entries.insert("plan_tool".to_string(), false);
|
||||
entries.insert("apply_patch_freeform".to_string(), false);
|
||||
let cfg = ConfigToml {
|
||||
features: Some(crate::features::FeaturesToml { entries }),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert!(!config.features.enabled(Feature::PlanTool));
|
||||
assert!(!config.features.enabled(Feature::ApplyPatchFreeform));
|
||||
assert!(!config.include_plan_tool);
|
||||
assert!(!config.include_apply_patch_tool);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn legacy_toggles_map_to_features() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
experimental_use_exec_command_tool: Some(true),
|
||||
experimental_use_unified_exec_tool: Some(true),
|
||||
experimental_use_rmcp_client: Some(true),
|
||||
experimental_use_freeform_apply_patch: Some(true),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert!(config.features.enabled(Feature::ApplyPatchFreeform));
|
||||
assert!(config.features.enabled(Feature::StreamableShell));
|
||||
assert!(config.features.enabled(Feature::UnifiedExec));
|
||||
assert!(config.features.enabled(Feature::RmcpClient));
|
||||
|
||||
assert!(config.include_apply_patch_tool);
|
||||
assert!(config.use_experimental_streamable_shell_tool);
|
||||
assert!(config.use_experimental_unified_exec_tool);
|
||||
assert!(config.use_experimental_use_rmcp_client);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn config_honors_explicit_file_oauth_store_mode() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
mcp_oauth_credentials_store: Some(OAuthCredentialsStoreMode::File),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert_eq!(
|
||||
config.mcp_oauth_credentials_store_mode,
|
||||
OAuthCredentialsStoreMode::File,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn managed_config_overrides_oauth_store_mode() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let managed_path = codex_home.path().join("managed_config.toml");
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
|
||||
std::fs::write(&config_path, "mcp_oauth_credentials_store = \"file\"\n")?;
|
||||
std::fs::write(&managed_path, "mcp_oauth_credentials_store = \"keyring\"\n")?;
|
||||
|
||||
let overrides = crate::config_loader::LoaderOverrides {
|
||||
managed_config_path: Some(managed_path.clone()),
|
||||
#[cfg(target_os = "macos")]
|
||||
managed_preferences_base64: None,
|
||||
};
|
||||
|
||||
let root_value = load_resolved_config(codex_home.path(), Vec::new(), overrides).await?;
|
||||
let cfg: ConfigToml = root_value.try_into().map_err(|e| {
|
||||
tracing::error!("Failed to deserialize overridden config: {e}");
|
||||
std::io::Error::new(std::io::ErrorKind::InvalidData, e)
|
||||
})?;
|
||||
assert_eq!(
|
||||
cfg.mcp_oauth_credentials_store,
|
||||
Some(OAuthCredentialsStoreMode::Keyring),
|
||||
);
|
||||
|
||||
let final_config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
assert_eq!(
|
||||
final_config.mcp_oauth_credentials_store_mode,
|
||||
OAuthCredentialsStoreMode::Keyring,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_global_mcp_servers_returns_empty_if_missing() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -1387,6 +1621,7 @@ exclude_slash_tmp = true
|
||||
args: vec!["hello".to_string()],
|
||||
env: None,
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: Some(Duration::from_secs(3)),
|
||||
tool_timeout_sec: Some(Duration::from_secs(5)),
|
||||
},
|
||||
@@ -1407,6 +1642,7 @@ exclude_slash_tmp = true
|
||||
}
|
||||
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_secs(3)));
|
||||
assert_eq!(docs.tool_timeout_sec, Some(Duration::from_secs(5)));
|
||||
assert!(docs.enabled);
|
||||
|
||||
let empty = BTreeMap::new();
|
||||
write_global_mcp_servers(codex_home.path(), &empty)?;
|
||||
@@ -1471,6 +1707,31 @@ startup_timeout_ms = 2500
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_global_mcp_servers_rejects_inline_bearer_token() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
|
||||
std::fs::write(
|
||||
&config_path,
|
||||
r#"
|
||||
[mcp_servers.docs]
|
||||
url = "https://example.com/mcp"
|
||||
bearer_token = "secret"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let err = load_global_mcp_servers(codex_home.path())
|
||||
.await
|
||||
.expect_err("bearer_token entries should be rejected");
|
||||
|
||||
assert_eq!(err.kind(), std::io::ErrorKind::InvalidData);
|
||||
assert!(err.to_string().contains("bearer_token"));
|
||||
assert!(err.to_string().contains("bearer_token_env_var"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn write_global_mcp_servers_serializes_env_sorted() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -1486,6 +1747,7 @@ startup_timeout_ms = 2500
|
||||
("ALPHA_VAR".to_string(), "1".to_string()),
|
||||
])),
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
@@ -1534,8 +1796,9 @@ ZIG_VAR = "3"
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: Some("secret-token".to_string()),
|
||||
bearer_token_env_var: Some("MCP_TOKEN".to_string()),
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: Some(Duration::from_secs(2)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
@@ -1549,7 +1812,7 @@ ZIG_VAR = "3"
|
||||
serialized,
|
||||
r#"[mcp_servers.docs]
|
||||
url = "https://example.com/mcp"
|
||||
bearer_token = "secret-token"
|
||||
bearer_token_env_var = "MCP_TOKEN"
|
||||
startup_timeout_sec = 2.0
|
||||
"#
|
||||
);
|
||||
@@ -1557,9 +1820,12 @@ startup_timeout_sec = 2.0
|
||||
let loaded = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert_eq!(bearer_token.as_deref(), Some("secret-token"));
|
||||
assert_eq!(bearer_token_env_var.as_deref(), Some("MCP_TOKEN"));
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
@@ -1570,8 +1836,9 @@ startup_timeout_sec = 2.0
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: None,
|
||||
bearer_token_env_var: None,
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
@@ -1589,9 +1856,12 @@ url = "https://example.com/mcp"
|
||||
let loaded = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert!(bearer_token.is_none());
|
||||
assert!(bearer_token_env_var.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
@@ -1599,6 +1869,40 @@ url = "https://example.com/mcp"
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn write_global_mcp_servers_serializes_disabled_flag() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let servers = BTreeMap::from([(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: "docs-server".to_string(),
|
||||
args: Vec::new(),
|
||||
env: None,
|
||||
},
|
||||
enabled: false,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
let serialized = std::fs::read_to_string(&config_path)?;
|
||||
assert!(
|
||||
serialized.contains("enabled = false"),
|
||||
"serialized config missing disabled flag:\n{serialized}"
|
||||
);
|
||||
|
||||
let loaded = load_global_mcp_servers(codex_home.path()).await?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
assert!(!docs.enabled);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_model_selection_updates_defaults() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -1896,6 +2200,7 @@ model_verbosity = "high"
|
||||
notify: None,
|
||||
cwd: fixture.cwd(),
|
||||
mcp_servers: HashMap::new(),
|
||||
mcp_oauth_credentials_store_mode: Default::default(),
|
||||
model_providers: fixture.model_provider_map.clone(),
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
@@ -1917,6 +2222,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
features: Features::with_defaults(),
|
||||
active_profile: Some("o3".to_string()),
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
disable_paste_burst: false,
|
||||
@@ -1958,6 +2264,7 @@ model_verbosity = "high"
|
||||
notify: None,
|
||||
cwd: fixture.cwd(),
|
||||
mcp_servers: HashMap::new(),
|
||||
mcp_oauth_credentials_store_mode: Default::default(),
|
||||
model_providers: fixture.model_provider_map.clone(),
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
@@ -1979,6 +2286,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
features: Features::with_defaults(),
|
||||
active_profile: Some("gpt3".to_string()),
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
disable_paste_burst: false,
|
||||
@@ -2035,6 +2343,7 @@ model_verbosity = "high"
|
||||
notify: None,
|
||||
cwd: fixture.cwd(),
|
||||
mcp_servers: HashMap::new(),
|
||||
mcp_oauth_credentials_store_mode: Default::default(),
|
||||
model_providers: fixture.model_provider_map.clone(),
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
@@ -2056,6 +2365,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
features: Features::with_defaults(),
|
||||
active_profile: Some("zdr".to_string()),
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
disable_paste_burst: false,
|
||||
@@ -2098,6 +2408,7 @@ model_verbosity = "high"
|
||||
notify: None,
|
||||
cwd: fixture.cwd(),
|
||||
mcp_servers: HashMap::new(),
|
||||
mcp_oauth_credentials_store_mode: Default::default(),
|
||||
model_providers: fixture.model_provider_map.clone(),
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
@@ -2119,6 +2430,7 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
features: Features::with_defaults(),
|
||||
active_profile: Some("gpt5".to_string()),
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
disable_paste_burst: false,
|
||||
|
||||
@@ -20,6 +20,18 @@ pub struct ConfigProfile {
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
pub experimental_instructions_file: Option<PathBuf>,
|
||||
pub include_plan_tool: Option<bool>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
pub tools_web_search: Option<bool>,
|
||||
pub tools_view_image: Option<bool>,
|
||||
/// Optional feature toggles scoped to this profile.
|
||||
#[serde(default)]
|
||||
pub features: Option<crate::features::FeaturesToml>,
|
||||
}
|
||||
|
||||
impl From<ConfigProfile> for codex_app_server_protocol::Profile {
|
||||
|
||||
@@ -20,6 +20,10 @@ pub struct McpServerConfig {
|
||||
#[serde(flatten)]
|
||||
pub transport: McpServerTransportConfig,
|
||||
|
||||
/// When `false`, Codex skips initializing this MCP server.
|
||||
#[serde(default = "default_enabled")]
|
||||
pub enabled: bool,
|
||||
|
||||
/// Startup timeout in seconds for initializing MCP server & initially listing tools.
|
||||
#[serde(
|
||||
default,
|
||||
@@ -48,6 +52,7 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
|
||||
url: Option<String>,
|
||||
bearer_token: Option<String>,
|
||||
bearer_token_env_var: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
startup_timeout_sec: Option<f64>,
|
||||
@@ -55,6 +60,8 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
startup_timeout_ms: Option<u64>,
|
||||
#[serde(default, with = "option_duration_secs")]
|
||||
tool_timeout_sec: Option<Duration>,
|
||||
#[serde(default)]
|
||||
enabled: Option<bool>,
|
||||
}
|
||||
|
||||
let raw = RawMcpServerConfig::deserialize(deserializer)?;
|
||||
@@ -86,11 +93,15 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
args,
|
||||
env,
|
||||
url,
|
||||
bearer_token,
|
||||
bearer_token_env_var,
|
||||
..
|
||||
} => {
|
||||
throw_if_set("stdio", "url", url.as_ref())?;
|
||||
throw_if_set("stdio", "bearer_token", bearer_token.as_ref())?;
|
||||
throw_if_set(
|
||||
"stdio",
|
||||
"bearer_token_env_var",
|
||||
bearer_token_env_var.as_ref(),
|
||||
)?;
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args: args.unwrap_or_default(),
|
||||
@@ -100,6 +111,7 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
RawMcpServerConfig {
|
||||
url: Some(url),
|
||||
bearer_token,
|
||||
bearer_token_env_var,
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
@@ -108,7 +120,11 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
throw_if_set("streamable_http", "command", command.as_ref())?;
|
||||
throw_if_set("streamable_http", "args", args.as_ref())?;
|
||||
throw_if_set("streamable_http", "env", env.as_ref())?;
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token }
|
||||
throw_if_set("streamable_http", "bearer_token", bearer_token.as_ref())?;
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
}
|
||||
}
|
||||
_ => return Err(SerdeError::custom("invalid transport")),
|
||||
};
|
||||
@@ -117,10 +133,15 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
transport,
|
||||
startup_timeout_sec,
|
||||
tool_timeout_sec: raw.tool_timeout_sec,
|
||||
enabled: raw.enabled.unwrap_or_else(default_enabled),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const fn default_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
||||
#[serde(untagged, deny_unknown_fields, rename_all = "snake_case")]
|
||||
pub enum McpServerTransportConfig {
|
||||
@@ -135,11 +156,11 @@ pub enum McpServerTransportConfig {
|
||||
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http
|
||||
StreamableHttp {
|
||||
url: String,
|
||||
/// A plain text bearer token to use for authentication.
|
||||
/// This bearer token will be included in the HTTP request header as an `Authorization: Bearer <token>` header.
|
||||
/// This should be used with caution because it lives on disk in clear text.
|
||||
/// Name of the environment variable to read for an HTTP bearer token.
|
||||
/// When set, requests will include the token via `Authorization: Bearer <token>`.
|
||||
/// The actual secret value must be provided via the environment.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
bearer_token: Option<String>,
|
||||
bearer_token_env_var: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -450,6 +471,7 @@ mod tests {
|
||||
env: None
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -470,6 +492,7 @@ mod tests {
|
||||
env: None
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -491,6 +514,20 @@ mod tests {
|
||||
env: Some(HashMap::from([("FOO".to_string(), "BAR".to_string())]))
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_disabled_server_config() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
enabled = false
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize disabled server config");
|
||||
|
||||
assert!(!cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -506,17 +543,18 @@ mod tests {
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: None
|
||||
bearer_token_env_var: None
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_streamable_http_server_config_with_bearer_token() {
|
||||
fn deserialize_streamable_http_server_config_with_env_var() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
url = "https://example.com/mcp"
|
||||
bearer_token = "secret"
|
||||
bearer_token_env_var = "GITHUB_TOKEN"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize http config");
|
||||
@@ -525,9 +563,10 @@ mod tests {
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: Some("secret".to_string())
|
||||
bearer_token_env_var: Some("GITHUB_TOKEN".to_string())
|
||||
}
|
||||
);
|
||||
assert!(cfg.enabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -553,13 +592,18 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_bearer_token_for_stdio_transport() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
fn deserialize_rejects_inline_bearer_token_field() {
|
||||
let err = toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
command = "echo"
|
||||
url = "https://example.com"
|
||||
bearer_token = "secret"
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject bearer token for stdio transport");
|
||||
.expect_err("should reject bearer_token field");
|
||||
|
||||
assert!(
|
||||
err.to_string().contains("bearer_token is not supported"),
|
||||
"unexpected error: {err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ use std::sync::OnceLock;
|
||||
/// The full user agent string is returned from the mcp initialize response.
|
||||
/// Parenthesis will be added by Codex. This should only specify what goes inside of the parenthesis.
|
||||
pub static USER_AGENT_SUFFIX: LazyLock<Mutex<Option<String>>> = LazyLock::new(|| Mutex::new(None));
|
||||
|
||||
pub const DEFAULT_ORIGINATOR: &str = "codex_cli_rs";
|
||||
pub const CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR: &str = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE";
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Originator {
|
||||
@@ -35,10 +35,11 @@ pub enum SetOriginatorError {
|
||||
AlreadyInitialized,
|
||||
}
|
||||
|
||||
fn init_originator_from_env() -> Originator {
|
||||
let default = "codex_cli_rs";
|
||||
fn get_originator_value(provided: Option<String>) -> Originator {
|
||||
let value = std::env::var(CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR)
|
||||
.unwrap_or_else(|_| default.to_string());
|
||||
.ok()
|
||||
.or(provided)
|
||||
.unwrap_or(DEFAULT_ORIGINATOR.to_string());
|
||||
|
||||
match HeaderValue::from_str(&value) {
|
||||
Ok(header_value) => Originator {
|
||||
@@ -48,31 +49,22 @@ fn init_originator_from_env() -> Originator {
|
||||
Err(e) => {
|
||||
tracing::error!("Unable to turn originator override {value} into header value: {e}");
|
||||
Originator {
|
||||
value: default.to_string(),
|
||||
header_value: HeaderValue::from_static(default),
|
||||
value: DEFAULT_ORIGINATOR.to_string(),
|
||||
header_value: HeaderValue::from_static(DEFAULT_ORIGINATOR),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_originator(value: String) -> Result<Originator, SetOriginatorError> {
|
||||
let header_value =
|
||||
HeaderValue::from_str(&value).map_err(|_| SetOriginatorError::InvalidHeaderValue)?;
|
||||
Ok(Originator {
|
||||
value,
|
||||
header_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_default_originator(value: &str) -> Result<(), SetOriginatorError> {
|
||||
let originator = build_originator(value.to_string())?;
|
||||
pub fn set_default_originator(value: String) -> Result<(), SetOriginatorError> {
|
||||
let originator = get_originator_value(Some(value));
|
||||
ORIGINATOR
|
||||
.set(originator)
|
||||
.map_err(|_| SetOriginatorError::AlreadyInitialized)
|
||||
}
|
||||
|
||||
pub fn originator() -> &'static Originator {
|
||||
ORIGINATOR.get_or_init(init_originator_from_env)
|
||||
ORIGINATOR.get_or_init(|| get_originator_value(None))
|
||||
}
|
||||
|
||||
pub fn get_codex_user_agent() -> String {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::token_data::KnownPlan;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::truncate::truncate_middle;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use reqwest::StatusCode;
|
||||
@@ -12,6 +13,9 @@ use tokio::task::JoinError;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, CodexErr>;
|
||||
|
||||
/// Limit UI error messages to a reasonable size while keeping useful context.
|
||||
const ERROR_MESSAGE_UI_MAX_BYTES: usize = 2 * 1024; // 4 KiB
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum SandboxErr {
|
||||
/// Error from sandbox execution
|
||||
@@ -304,21 +308,44 @@ impl CodexErr {
|
||||
}
|
||||
|
||||
pub fn get_error_message_ui(e: &CodexErr) -> String {
|
||||
match e {
|
||||
CodexErr::Sandbox(SandboxErr::Denied { output }) => output.stderr.text.clone(),
|
||||
let message = match e {
|
||||
CodexErr::Sandbox(SandboxErr::Denied { output }) => {
|
||||
let aggregated = output.aggregated_output.text.trim();
|
||||
if !aggregated.is_empty() {
|
||||
output.aggregated_output.text.clone()
|
||||
} else {
|
||||
let stderr = output.stderr.text.trim();
|
||||
let stdout = output.stdout.text.trim();
|
||||
match (stderr.is_empty(), stdout.is_empty()) {
|
||||
(false, false) => format!("{stderr}\n{stdout}"),
|
||||
(false, true) => output.stderr.text.clone(),
|
||||
(true, false) => output.stdout.text.clone(),
|
||||
(true, true) => format!(
|
||||
"command failed inside sandbox with exit code {}",
|
||||
output.exit_code
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
// Timeouts are not sandbox errors from a UX perspective; present them plainly
|
||||
CodexErr::Sandbox(SandboxErr::Timeout { output }) => format!(
|
||||
"error: command timed out after {} ms",
|
||||
output.duration.as_millis()
|
||||
),
|
||||
CodexErr::Sandbox(SandboxErr::Timeout { output }) => {
|
||||
format!(
|
||||
"error: command timed out after {} ms",
|
||||
output.duration.as_millis()
|
||||
)
|
||||
}
|
||||
_ => e.to_string(),
|
||||
}
|
||||
};
|
||||
|
||||
truncate_middle(&message, ERROR_MESSAGE_UI_MAX_BYTES).0
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::exec::StreamOutput;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn rate_limit_snapshot() -> RateLimitSnapshot {
|
||||
RateLimitSnapshot {
|
||||
@@ -348,6 +375,73 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_uses_aggregated_output_when_stderr_empty() {
|
||||
let output = ExecToolCallOutput {
|
||||
exit_code: 77,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new("aggregate detail".to_string()),
|
||||
duration: Duration::from_millis(10),
|
||||
timed_out: false,
|
||||
};
|
||||
let err = CodexErr::Sandbox(SandboxErr::Denied {
|
||||
output: Box::new(output),
|
||||
});
|
||||
assert_eq!(get_error_message_ui(&err), "aggregate detail");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_reports_both_streams_when_available() {
|
||||
let output = ExecToolCallOutput {
|
||||
exit_code: 9,
|
||||
stdout: StreamOutput::new("stdout detail".to_string()),
|
||||
stderr: StreamOutput::new("stderr detail".to_string()),
|
||||
aggregated_output: StreamOutput::new(String::new()),
|
||||
duration: Duration::from_millis(10),
|
||||
timed_out: false,
|
||||
};
|
||||
let err = CodexErr::Sandbox(SandboxErr::Denied {
|
||||
output: Box::new(output),
|
||||
});
|
||||
assert_eq!(get_error_message_ui(&err), "stderr detail\nstdout detail");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_reports_stdout_when_no_stderr() {
|
||||
let output = ExecToolCallOutput {
|
||||
exit_code: 11,
|
||||
stdout: StreamOutput::new("stdout only".to_string()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new(String::new()),
|
||||
duration: Duration::from_millis(8),
|
||||
timed_out: false,
|
||||
};
|
||||
let err = CodexErr::Sandbox(SandboxErr::Denied {
|
||||
output: Box::new(output),
|
||||
});
|
||||
assert_eq!(get_error_message_ui(&err), "stdout only");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_reports_exit_code_when_no_output_available() {
|
||||
let output = ExecToolCallOutput {
|
||||
exit_code: 13,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new(String::new()),
|
||||
duration: Duration::from_millis(5),
|
||||
timed_out: false,
|
||||
};
|
||||
let err = CodexErr::Sandbox(SandboxErr::Denied {
|
||||
output: Box::new(output),
|
||||
});
|
||||
assert_eq!(
|
||||
get_error_message_ui(&err),
|
||||
"command failed inside sandbox with exit code 13"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_error_formats_free_plan() {
|
||||
let err = UsageLimitReachedError {
|
||||
|
||||
@@ -177,7 +177,7 @@ pub async fn process_exec_tool_call(
|
||||
}));
|
||||
}
|
||||
|
||||
if exit_code != 0 && is_likely_sandbox_denied(sandbox_type, exit_code) {
|
||||
if is_likely_sandbox_denied(sandbox_type, &exec_output) {
|
||||
return Err(CodexErr::Sandbox(SandboxErr::Denied {
|
||||
output: Box::new(exec_output),
|
||||
}));
|
||||
@@ -195,21 +195,57 @@ pub async fn process_exec_tool_call(
|
||||
/// We don't have a fully deterministic way to tell if our command failed
|
||||
/// because of the sandbox - a command in the user's zshrc file might hit an
|
||||
/// error, but the command itself might fail or succeed for other reasons.
|
||||
/// For now, we conservatively check for 'command not found' (exit code 127),
|
||||
/// and can add additional cases as necessary.
|
||||
fn is_likely_sandbox_denied(sandbox_type: SandboxType, exit_code: i32) -> bool {
|
||||
if sandbox_type == SandboxType::None {
|
||||
/// For now, we conservatively check for well known command failure exit codes and
|
||||
/// also look for common sandbox denial keywords in the command output.
|
||||
fn is_likely_sandbox_denied(sandbox_type: SandboxType, exec_output: &ExecToolCallOutput) -> bool {
|
||||
if sandbox_type == SandboxType::None || exec_output.exit_code == 0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Quick rejects: well-known non-sandbox shell exit codes
|
||||
// 127: command not found, 2: misuse of shell builtins
|
||||
if exit_code == 127 {
|
||||
// 2: misuse of shell builtins
|
||||
// 126: permission denied
|
||||
// 127: command not found
|
||||
const QUICK_REJECT_EXIT_CODES: [i32; 3] = [2, 126, 127];
|
||||
if QUICK_REJECT_EXIT_CODES.contains(&exec_output.exit_code) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// For all other cases, we assume the sandbox is the cause
|
||||
true
|
||||
const SANDBOX_DENIED_KEYWORDS: [&str; 6] = [
|
||||
"operation not permitted",
|
||||
"permission denied",
|
||||
"read-only file system",
|
||||
"seccomp",
|
||||
"sandbox",
|
||||
"landlock",
|
||||
];
|
||||
|
||||
if [
|
||||
&exec_output.stderr.text,
|
||||
&exec_output.stdout.text,
|
||||
&exec_output.aggregated_output.text,
|
||||
]
|
||||
.into_iter()
|
||||
.any(|section| {
|
||||
let lower = section.to_lowercase();
|
||||
SANDBOX_DENIED_KEYWORDS
|
||||
.iter()
|
||||
.any(|needle| lower.contains(needle))
|
||||
}) {
|
||||
return true;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
const SIGSYS_CODE: i32 = libc::SIGSYS;
|
||||
if sandbox_type == SandboxType::LinuxSeccomp
|
||||
&& exec_output.exit_code == EXIT_CODE_SIGNAL_BASE + SIGSYS_CODE
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -436,3 +472,77 @@ fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
std::process::ExitStatus::from_raw(code.try_into().unwrap())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::time::Duration;
|
||||
|
||||
fn make_exec_output(
|
||||
exit_code: i32,
|
||||
stdout: &str,
|
||||
stderr: &str,
|
||||
aggregated: &str,
|
||||
) -> ExecToolCallOutput {
|
||||
ExecToolCallOutput {
|
||||
exit_code,
|
||||
stdout: StreamOutput::new(stdout.to_string()),
|
||||
stderr: StreamOutput::new(stderr.to_string()),
|
||||
aggregated_output: StreamOutput::new(aggregated.to_string()),
|
||||
duration: Duration::from_millis(1),
|
||||
timed_out: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_detection_requires_keywords() {
|
||||
let output = make_exec_output(1, "", "", "");
|
||||
assert!(!is_likely_sandbox_denied(
|
||||
SandboxType::LinuxSeccomp,
|
||||
&output
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_detection_identifies_keyword_in_stderr() {
|
||||
let output = make_exec_output(1, "", "Operation not permitted", "");
|
||||
assert!(is_likely_sandbox_denied(SandboxType::LinuxSeccomp, &output));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_detection_respects_quick_reject_exit_codes() {
|
||||
let output = make_exec_output(127, "", "command not found", "");
|
||||
assert!(!is_likely_sandbox_denied(
|
||||
SandboxType::LinuxSeccomp,
|
||||
&output
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_detection_ignores_non_sandbox_mode() {
|
||||
let output = make_exec_output(1, "", "Operation not permitted", "");
|
||||
assert!(!is_likely_sandbox_denied(SandboxType::None, &output));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_detection_uses_aggregated_output() {
|
||||
let output = make_exec_output(
|
||||
101,
|
||||
"",
|
||||
"",
|
||||
"cargo failed: Read-only file system when writing target",
|
||||
);
|
||||
assert!(is_likely_sandbox_denied(
|
||||
SandboxType::MacosSeatbelt,
|
||||
&output
|
||||
));
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn sandbox_detection_flags_sigsys_exit_code() {
|
||||
let exit_code = EXIT_CODE_SIGNAL_BASE + libc::SIGSYS;
|
||||
let output = make_exec_output(exit_code, "", "", "");
|
||||
assert!(is_likely_sandbox_denied(SandboxType::LinuxSeccomp, &output));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use async_trait::async_trait;
|
||||
use crate::CODEX_APPLY_PATCH_ARG1;
|
||||
use crate::apply_patch::ApplyPatchExec;
|
||||
use crate::exec::ExecParams;
|
||||
use crate::executor::ExecutorConfig;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
|
||||
pub(crate) enum ExecutionMode {
|
||||
@@ -22,6 +23,7 @@ pub(crate) trait ExecutionBackend: Send + Sync {
|
||||
params: ExecParams,
|
||||
// Required for downcasting the apply_patch.
|
||||
mode: &ExecutionMode,
|
||||
config: &ExecutorConfig,
|
||||
) -> Result<ExecParams, FunctionCallError>;
|
||||
|
||||
fn stream_stdout(&self, _mode: &ExecutionMode) -> bool {
|
||||
@@ -47,6 +49,7 @@ impl ExecutionBackend for ShellBackend {
|
||||
&self,
|
||||
params: ExecParams,
|
||||
mode: &ExecutionMode,
|
||||
_config: &ExecutorConfig,
|
||||
) -> Result<ExecParams, FunctionCallError> {
|
||||
match mode {
|
||||
ExecutionMode::Shell => Ok(params),
|
||||
@@ -65,17 +68,22 @@ impl ExecutionBackend for ApplyPatchBackend {
|
||||
&self,
|
||||
params: ExecParams,
|
||||
mode: &ExecutionMode,
|
||||
config: &ExecutorConfig,
|
||||
) -> Result<ExecParams, FunctionCallError> {
|
||||
match mode {
|
||||
ExecutionMode::ApplyPatch(exec) => {
|
||||
let path_to_codex = env::current_exe()
|
||||
.ok()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.ok_or_else(|| {
|
||||
FunctionCallError::RespondToModel(
|
||||
"failed to determine path to codex executable".to_string(),
|
||||
)
|
||||
})?;
|
||||
let path_to_codex = if let Some(exe_path) = &config.codex_exe {
|
||||
exe_path.to_string_lossy().to_string()
|
||||
} else {
|
||||
env::current_exe()
|
||||
.ok()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.ok_or_else(|| {
|
||||
FunctionCallError::RespondToModel(
|
||||
"failed to determine path to codex executable".to_string(),
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let patch = exec.action.patch.clone();
|
||||
Ok(ExecParams {
|
||||
|
||||
@@ -30,19 +30,19 @@ use codex_otel::otel_event_manager::ToolDecisionSource;
|
||||
pub(crate) struct ExecutorConfig {
|
||||
pub(crate) sandbox_policy: SandboxPolicy,
|
||||
pub(crate) sandbox_cwd: PathBuf,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub(crate) codex_exe: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl ExecutorConfig {
|
||||
pub(crate) fn new(
|
||||
sandbox_policy: SandboxPolicy,
|
||||
sandbox_cwd: PathBuf,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
codex_exe: Option<PathBuf>,
|
||||
) -> Self {
|
||||
Self {
|
||||
sandbox_policy,
|
||||
sandbox_cwd,
|
||||
codex_linux_sandbox_exe,
|
||||
codex_exe,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -86,7 +86,14 @@ impl Executor {
|
||||
maybe_translate_shell_command(request.params, session, request.use_shell_profile);
|
||||
}
|
||||
|
||||
// Step 1: Normalise parameters via the selected backend.
|
||||
// Step 1: Snapshot sandbox configuration so it stays stable for this run.
|
||||
let config = self
|
||||
.config
|
||||
.read()
|
||||
.map_err(|_| ExecError::rejection("executor config poisoned"))?
|
||||
.clone();
|
||||
|
||||
// Step 2: Normalise parameters via the selected backend.
|
||||
let backend = backend_for_mode(&request.mode);
|
||||
let stdout_stream = if backend.stream_stdout(&request.mode) {
|
||||
request.stdout_stream.clone()
|
||||
@@ -94,16 +101,9 @@ impl Executor {
|
||||
None
|
||||
};
|
||||
request.params = backend
|
||||
.prepare(request.params, &request.mode)
|
||||
.prepare(request.params, &request.mode, &config)
|
||||
.map_err(ExecError::from)?;
|
||||
|
||||
// Step 2: Snapshot sandbox configuration so it stays stable for this run.
|
||||
let config = self
|
||||
.config
|
||||
.read()
|
||||
.map_err(|_| ExecError::rejection("executor config poisoned"))?
|
||||
.clone();
|
||||
|
||||
// Step 3: Decide sandbox placement, prompting for approval when needed.
|
||||
let sandbox_decision = select_sandbox(
|
||||
&request,
|
||||
@@ -227,7 +227,7 @@ impl Executor {
|
||||
sandbox,
|
||||
&config.sandbox_policy,
|
||||
&config.sandbox_cwd,
|
||||
&config.codex_linux_sandbox_exe,
|
||||
&config.codex_exe,
|
||||
stdout_stream,
|
||||
)
|
||||
.await
|
||||
@@ -380,6 +380,23 @@ mod tests {
|
||||
assert_eq!(message, "failed in sandbox: sandbox stderr");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_failure_message_falls_back_to_aggregated_output() {
|
||||
let output = ExecToolCallOutput {
|
||||
exit_code: 101,
|
||||
stdout: StreamOutput::new(String::new()),
|
||||
stderr: StreamOutput::new(String::new()),
|
||||
aggregated_output: StreamOutput::new("aggregate text".to_string()),
|
||||
duration: Duration::from_millis(10),
|
||||
timed_out: false,
|
||||
};
|
||||
let err = SandboxErr::Denied {
|
||||
output: Box::new(output),
|
||||
};
|
||||
let message = sandbox_failure_message(err);
|
||||
assert_eq!(message, "failed in sandbox: aggregate text");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_function_error_synthesizes_payload() {
|
||||
let err = FunctionCallError::RespondToModel("boom".to_string());
|
||||
|
||||
250
codex-rs/core/src/features.rs
Normal file
250
codex-rs/core/src/features.rs
Normal file
@@ -0,0 +1,250 @@
|
||||
//! Centralized feature flags and metadata.
|
||||
//!
|
||||
//! This module defines a small set of toggles that gate experimental and
|
||||
//! optional behavior across the codebase. Instead of wiring individual
|
||||
//! booleans through multiple types, call sites consult a single `Features`
|
||||
//! container attached to `Config`.
|
||||
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config_profile::ConfigProfile;
|
||||
use serde::Deserialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
mod legacy;
|
||||
pub(crate) use legacy::LegacyFeatureToggles;
|
||||
|
||||
/// High-level lifecycle stage for a feature.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Stage {
|
||||
Experimental,
|
||||
Beta,
|
||||
Stable,
|
||||
Deprecated,
|
||||
Removed,
|
||||
}
|
||||
|
||||
/// Unique features toggled via configuration.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Feature {
|
||||
/// Use the single unified PTY-backed exec tool.
|
||||
UnifiedExec,
|
||||
/// Use the streamable exec-command/write-stdin tool pair.
|
||||
StreamableShell,
|
||||
/// Use the official Rust MCP client (rmcp).
|
||||
RmcpClient,
|
||||
/// Include the plan tool.
|
||||
PlanTool,
|
||||
/// Include the freeform apply_patch tool.
|
||||
ApplyPatchFreeform,
|
||||
/// Include the view_image tool.
|
||||
ViewImageTool,
|
||||
/// Allow the model to request web searches.
|
||||
WebSearchRequest,
|
||||
}
|
||||
|
||||
impl Feature {
|
||||
pub fn key(self) -> &'static str {
|
||||
self.info().key
|
||||
}
|
||||
|
||||
pub fn stage(self) -> Stage {
|
||||
self.info().stage
|
||||
}
|
||||
|
||||
pub fn default_enabled(self) -> bool {
|
||||
self.info().default_enabled
|
||||
}
|
||||
|
||||
fn info(self) -> &'static FeatureSpec {
|
||||
FEATURES
|
||||
.iter()
|
||||
.find(|spec| spec.id == self)
|
||||
.unwrap_or_else(|| unreachable!("missing FeatureSpec for {:?}", self))
|
||||
}
|
||||
}
|
||||
|
||||
/// Holds the effective set of enabled features.
|
||||
#[derive(Debug, Clone, Default, PartialEq)]
|
||||
pub struct Features {
|
||||
enabled: BTreeSet<Feature>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct FeatureOverrides {
|
||||
pub include_plan_tool: Option<bool>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub web_search_request: Option<bool>,
|
||||
}
|
||||
|
||||
impl FeatureOverrides {
|
||||
fn apply(self, features: &mut Features) {
|
||||
LegacyFeatureToggles {
|
||||
include_plan_tool: self.include_plan_tool,
|
||||
include_apply_patch_tool: self.include_apply_patch_tool,
|
||||
include_view_image_tool: self.include_view_image_tool,
|
||||
tools_web_search: self.web_search_request,
|
||||
..Default::default()
|
||||
}
|
||||
.apply(features);
|
||||
}
|
||||
}
|
||||
|
||||
impl Features {
|
||||
/// Starts with built-in defaults.
|
||||
pub fn with_defaults() -> Self {
|
||||
let mut set = BTreeSet::new();
|
||||
for spec in FEATURES {
|
||||
if spec.default_enabled {
|
||||
set.insert(spec.id);
|
||||
}
|
||||
}
|
||||
Self { enabled: set }
|
||||
}
|
||||
|
||||
pub fn enabled(&self, f: Feature) -> bool {
|
||||
self.enabled.contains(&f)
|
||||
}
|
||||
|
||||
pub fn enable(&mut self, f: Feature) {
|
||||
self.enabled.insert(f);
|
||||
}
|
||||
|
||||
pub fn disable(&mut self, f: Feature) {
|
||||
self.enabled.remove(&f);
|
||||
}
|
||||
|
||||
/// Apply a table of key -> bool toggles (e.g. from TOML).
|
||||
pub fn apply_map(&mut self, m: &BTreeMap<String, bool>) {
|
||||
for (k, v) in m {
|
||||
match feature_for_key(k) {
|
||||
Some(feat) => {
|
||||
if *v {
|
||||
self.enable(feat);
|
||||
} else {
|
||||
self.disable(feat);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
tracing::warn!("unknown feature key in config: {k}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_config(
|
||||
cfg: &ConfigToml,
|
||||
config_profile: &ConfigProfile,
|
||||
overrides: FeatureOverrides,
|
||||
) -> Self {
|
||||
let mut features = Features::with_defaults();
|
||||
|
||||
let base_legacy = LegacyFeatureToggles {
|
||||
experimental_use_freeform_apply_patch: cfg.experimental_use_freeform_apply_patch,
|
||||
experimental_use_exec_command_tool: cfg.experimental_use_exec_command_tool,
|
||||
experimental_use_unified_exec_tool: cfg.experimental_use_unified_exec_tool,
|
||||
experimental_use_rmcp_client: cfg.experimental_use_rmcp_client,
|
||||
tools_web_search: cfg.tools.as_ref().and_then(|t| t.web_search),
|
||||
tools_view_image: cfg.tools.as_ref().and_then(|t| t.view_image),
|
||||
..Default::default()
|
||||
};
|
||||
base_legacy.apply(&mut features);
|
||||
|
||||
if let Some(base_features) = cfg.features.as_ref() {
|
||||
features.apply_map(&base_features.entries);
|
||||
}
|
||||
|
||||
let profile_legacy = LegacyFeatureToggles {
|
||||
include_plan_tool: config_profile.include_plan_tool,
|
||||
include_apply_patch_tool: config_profile.include_apply_patch_tool,
|
||||
include_view_image_tool: config_profile.include_view_image_tool,
|
||||
experimental_use_freeform_apply_patch: config_profile
|
||||
.experimental_use_freeform_apply_patch,
|
||||
experimental_use_exec_command_tool: config_profile.experimental_use_exec_command_tool,
|
||||
experimental_use_unified_exec_tool: config_profile.experimental_use_unified_exec_tool,
|
||||
experimental_use_rmcp_client: config_profile.experimental_use_rmcp_client,
|
||||
tools_web_search: config_profile.tools_web_search,
|
||||
tools_view_image: config_profile.tools_view_image,
|
||||
};
|
||||
profile_legacy.apply(&mut features);
|
||||
if let Some(profile_features) = config_profile.features.as_ref() {
|
||||
features.apply_map(&profile_features.entries);
|
||||
}
|
||||
|
||||
overrides.apply(&mut features);
|
||||
|
||||
features
|
||||
}
|
||||
}
|
||||
|
||||
/// Keys accepted in `[features]` tables.
|
||||
fn feature_for_key(key: &str) -> Option<Feature> {
|
||||
for spec in FEATURES {
|
||||
if spec.key == key {
|
||||
return Some(spec.id);
|
||||
}
|
||||
}
|
||||
legacy::feature_for_key(key)
|
||||
}
|
||||
|
||||
/// Deserializable features table for TOML.
|
||||
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
|
||||
pub struct FeaturesToml {
|
||||
#[serde(flatten)]
|
||||
pub entries: BTreeMap<String, bool>,
|
||||
}
|
||||
|
||||
/// Single, easy-to-read registry of all feature definitions.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct FeatureSpec {
|
||||
pub id: Feature,
|
||||
pub key: &'static str,
|
||||
pub stage: Stage,
|
||||
pub default_enabled: bool,
|
||||
}
|
||||
|
||||
pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::UnifiedExec,
|
||||
key: "unified_exec",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::StreamableShell,
|
||||
key: "streamable_shell",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RmcpClient,
|
||||
key: "rmcp_client",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::PlanTool,
|
||||
key: "plan_tool",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ApplyPatchFreeform,
|
||||
key: "apply_patch_freeform",
|
||||
stage: Stage::Beta,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ViewImageTool,
|
||||
key: "view_image_tool",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WebSearchRequest,
|
||||
key: "web_search_request",
|
||||
stage: Stage::Stable,
|
||||
default_enabled: false,
|
||||
},
|
||||
];
|
||||
158
codex-rs/core/src/features/legacy.rs
Normal file
158
codex-rs/core/src/features/legacy.rs
Normal file
@@ -0,0 +1,158 @@
|
||||
use super::Feature;
|
||||
use super::Features;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct Alias {
|
||||
legacy_key: &'static str,
|
||||
feature: Feature,
|
||||
}
|
||||
|
||||
const ALIASES: &[Alias] = &[
|
||||
Alias {
|
||||
legacy_key: "experimental_use_unified_exec_tool",
|
||||
feature: Feature::UnifiedExec,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "experimental_use_exec_command_tool",
|
||||
feature: Feature::StreamableShell,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "experimental_use_rmcp_client",
|
||||
feature: Feature::RmcpClient,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "experimental_use_freeform_apply_patch",
|
||||
feature: Feature::ApplyPatchFreeform,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "include_apply_patch_tool",
|
||||
feature: Feature::ApplyPatchFreeform,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "include_plan_tool",
|
||||
feature: Feature::PlanTool,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "include_view_image_tool",
|
||||
feature: Feature::ViewImageTool,
|
||||
},
|
||||
Alias {
|
||||
legacy_key: "web_search",
|
||||
feature: Feature::WebSearchRequest,
|
||||
},
|
||||
];
|
||||
|
||||
pub(crate) fn feature_for_key(key: &str) -> Option<Feature> {
|
||||
ALIASES
|
||||
.iter()
|
||||
.find(|alias| alias.legacy_key == key)
|
||||
.map(|alias| {
|
||||
log_alias(alias.legacy_key, alias.feature);
|
||||
alias.feature
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LegacyFeatureToggles {
|
||||
pub include_plan_tool: Option<bool>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub experimental_use_freeform_apply_patch: Option<bool>,
|
||||
pub experimental_use_exec_command_tool: Option<bool>,
|
||||
pub experimental_use_unified_exec_tool: Option<bool>,
|
||||
pub experimental_use_rmcp_client: Option<bool>,
|
||||
pub tools_web_search: Option<bool>,
|
||||
pub tools_view_image: Option<bool>,
|
||||
}
|
||||
|
||||
impl LegacyFeatureToggles {
|
||||
pub fn apply(self, features: &mut Features) {
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::PlanTool,
|
||||
self.include_plan_tool,
|
||||
"include_plan_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::ApplyPatchFreeform,
|
||||
self.include_apply_patch_tool,
|
||||
"include_apply_patch_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::ApplyPatchFreeform,
|
||||
self.experimental_use_freeform_apply_patch,
|
||||
"experimental_use_freeform_apply_patch",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::StreamableShell,
|
||||
self.experimental_use_exec_command_tool,
|
||||
"experimental_use_exec_command_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::UnifiedExec,
|
||||
self.experimental_use_unified_exec_tool,
|
||||
"experimental_use_unified_exec_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::RmcpClient,
|
||||
self.experimental_use_rmcp_client,
|
||||
"experimental_use_rmcp_client",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::WebSearchRequest,
|
||||
self.tools_web_search,
|
||||
"tools.web_search",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::ViewImageTool,
|
||||
self.include_view_image_tool,
|
||||
"include_view_image_tool",
|
||||
);
|
||||
set_if_some(
|
||||
features,
|
||||
Feature::ViewImageTool,
|
||||
self.tools_view_image,
|
||||
"tools.view_image",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn set_if_some(
|
||||
features: &mut Features,
|
||||
feature: Feature,
|
||||
maybe_value: Option<bool>,
|
||||
alias_key: &'static str,
|
||||
) {
|
||||
if let Some(enabled) = maybe_value {
|
||||
set_feature(features, feature, enabled);
|
||||
log_alias(alias_key, feature);
|
||||
}
|
||||
}
|
||||
|
||||
fn set_feature(features: &mut Features, feature: Feature, enabled: bool) {
|
||||
if enabled {
|
||||
features.enable(feature);
|
||||
} else {
|
||||
features.disable(feature);
|
||||
}
|
||||
}
|
||||
|
||||
fn log_alias(alias: &str, feature: Feature) {
|
||||
let canonical = feature.key();
|
||||
if alias == canonical {
|
||||
return;
|
||||
}
|
||||
info!(
|
||||
%alias,
|
||||
canonical,
|
||||
"legacy feature toggle detected; prefer `[features].{canonical}`"
|
||||
);
|
||||
}
|
||||
@@ -11,6 +11,8 @@ pub mod bash;
|
||||
mod chat_completions;
|
||||
mod client;
|
||||
mod client_common;
|
||||
mod codebase_change_notice;
|
||||
mod codebase_snapshot;
|
||||
pub mod codex;
|
||||
mod codex_conversation;
|
||||
pub mod token_data;
|
||||
@@ -29,9 +31,11 @@ pub mod exec;
|
||||
mod exec_command;
|
||||
pub mod exec_env;
|
||||
pub mod executor;
|
||||
pub mod features;
|
||||
mod flags;
|
||||
pub mod git_info;
|
||||
pub mod landlock;
|
||||
pub mod mcp;
|
||||
mod mcp_connection_manager;
|
||||
mod mcp_tool_call;
|
||||
mod message_history;
|
||||
|
||||
58
codex-rs/core/src/mcp/auth.rs
Normal file
58
codex-rs/core/src/mcp/auth.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_protocol::protocol::McpAuthStatus;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_rmcp_client::determine_streamable_http_auth_status;
|
||||
use futures::future::join_all;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config_types::McpServerConfig;
|
||||
use crate::config_types::McpServerTransportConfig;
|
||||
|
||||
pub async fn compute_auth_statuses<'a, I>(
|
||||
servers: I,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> HashMap<String, McpAuthStatus>
|
||||
where
|
||||
I: IntoIterator<Item = (&'a String, &'a McpServerConfig)>,
|
||||
{
|
||||
let futures = servers.into_iter().map(|(name, config)| {
|
||||
let name = name.clone();
|
||||
let config = config.clone();
|
||||
async move {
|
||||
let status = match compute_auth_status(&name, &config, store_mode).await {
|
||||
Ok(status) => status,
|
||||
Err(error) => {
|
||||
warn!("failed to determine auth status for MCP server `{name}`: {error:?}");
|
||||
McpAuthStatus::Unsupported
|
||||
}
|
||||
};
|
||||
(name, status)
|
||||
}
|
||||
});
|
||||
|
||||
join_all(futures).await.into_iter().collect()
|
||||
}
|
||||
|
||||
async fn compute_auth_status(
|
||||
server_name: &str,
|
||||
config: &McpServerConfig,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<McpAuthStatus> {
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio { .. } => Ok(McpAuthStatus::Unsupported),
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url,
|
||||
bearer_token_env_var,
|
||||
} => {
|
||||
determine_streamable_http_auth_status(
|
||||
server_name,
|
||||
url,
|
||||
bearer_token_env_var.as_deref(),
|
||||
store_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
1
codex-rs/core/src/mcp/mod.rs
Normal file
1
codex-rs/core/src/mcp/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod auth;
|
||||
@@ -8,6 +8,7 @@
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::ffi::OsString;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
@@ -16,6 +17,7 @@ use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_mcp_client::McpClient;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_rmcp_client::RmcpClient;
|
||||
use mcp_types::ClientCapabilities;
|
||||
use mcp_types::Implementation;
|
||||
@@ -125,9 +127,11 @@ impl McpClientAdapter {
|
||||
bearer_token: Option<String>,
|
||||
params: mcp_types::InitializeRequestParams,
|
||||
startup_timeout: Duration,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<Self> {
|
||||
let client = Arc::new(
|
||||
RmcpClient::new_streamable_http_client(&server_name, &url, bearer_token).await?,
|
||||
RmcpClient::new_streamable_http_client(&server_name, &url, bearer_token, store_mode)
|
||||
.await?,
|
||||
);
|
||||
client.initialize(params, Some(startup_timeout)).await?;
|
||||
Ok(McpClientAdapter::Rmcp(client))
|
||||
@@ -182,6 +186,7 @@ impl McpConnectionManager {
|
||||
pub async fn new(
|
||||
mcp_servers: HashMap<String, McpServerConfig>,
|
||||
use_rmcp_client: bool,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<(Self, ClientStartErrors)> {
|
||||
// Early exit if no servers are configured.
|
||||
if mcp_servers.is_empty() {
|
||||
@@ -202,9 +207,21 @@ impl McpConnectionManager {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !cfg.enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
let startup_timeout = cfg.startup_timeout_sec.unwrap_or(DEFAULT_STARTUP_TIMEOUT);
|
||||
let tool_timeout = cfg.tool_timeout_sec.unwrap_or(DEFAULT_TOOL_TIMEOUT);
|
||||
|
||||
let resolved_bearer_token = match &cfg.transport {
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
bearer_token_env_var,
|
||||
..
|
||||
} => resolve_bearer_token(&server_name, bearer_token_env_var.as_deref()),
|
||||
_ => Ok(None),
|
||||
};
|
||||
|
||||
join_set.spawn(async move {
|
||||
let McpServerConfig { transport, .. } = cfg;
|
||||
let params = mcp_types::InitializeRequestParams {
|
||||
@@ -242,13 +259,14 @@ impl McpConnectionManager {
|
||||
)
|
||||
.await
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpServerTransportConfig::StreamableHttp { url, .. } => {
|
||||
McpClientAdapter::new_streamable_http_client(
|
||||
server_name.clone(),
|
||||
url,
|
||||
bearer_token,
|
||||
resolved_bearer_token.unwrap_or_default(),
|
||||
params,
|
||||
startup_timeout,
|
||||
store_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -336,6 +354,33 @@ impl McpConnectionManager {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_bearer_token(
|
||||
server_name: &str,
|
||||
bearer_token_env_var: Option<&str>,
|
||||
) -> Result<Option<String>> {
|
||||
let Some(env_var) = bearer_token_env_var else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
match env::var(env_var) {
|
||||
Ok(value) => {
|
||||
if value.is_empty() {
|
||||
Err(anyhow!(
|
||||
"Environment variable {env_var} for MCP server '{server_name}' is empty"
|
||||
))
|
||||
} else {
|
||||
Ok(Some(value))
|
||||
}
|
||||
}
|
||||
Err(env::VarError::NotPresent) => Err(anyhow!(
|
||||
"Environment variable {env_var} for MCP server '{server_name}' is not set"
|
||||
)),
|
||||
Err(env::VarError::NotUnicode(_)) => Err(anyhow!(
|
||||
"Environment variable {env_var} for MCP server '{server_name}' contains invalid Unicode"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Query every server for its available tools and return a single map that
|
||||
/// contains **all** tools. Each key is the fully-qualified name for the tool.
|
||||
async fn list_all_tools(clients: &HashMap<String, ManagedClient>) -> Result<Vec<ToolInfo>> {
|
||||
|
||||
@@ -119,8 +119,10 @@ pub fn find_family_for_model(mut slug: &str) -> Option<ModelFamily> {
|
||||
reasoning_summary_format: ReasoningSummaryFormat::Experimental,
|
||||
base_instructions: GPT_5_CODEX_INSTRUCTIONS.to_string(),
|
||||
experimental_supported_tools: vec![
|
||||
"grep_files".to_string(),
|
||||
"list_dir".to_string(),
|
||||
"read_file".to_string(),
|
||||
"test_sync_tool".to_string()
|
||||
"test_sync_tool".to_string(),
|
||||
],
|
||||
supports_parallel_tool_calls: true,
|
||||
)
|
||||
@@ -133,7 +135,11 @@ pub fn find_family_for_model(mut slug: &str) -> Option<ModelFamily> {
|
||||
reasoning_summary_format: ReasoningSummaryFormat::Experimental,
|
||||
base_instructions: GPT_5_CODEX_INSTRUCTIONS.to_string(),
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
experimental_supported_tools: vec!["read_file".to_string()],
|
||||
experimental_supported_tools: vec![
|
||||
"grep_files".to_string(),
|
||||
"list_dir".to_string(),
|
||||
"read_file".to_string(),
|
||||
],
|
||||
supports_parallel_tool_calls: true,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use crate::codebase_snapshot::CodebaseSnapshot;
|
||||
use crate::conversation_history::ConversationHistory;
|
||||
use crate::protocol::RateLimitSnapshot;
|
||||
use crate::protocol::TokenUsage;
|
||||
@@ -13,6 +14,7 @@ pub(crate) struct SessionState {
|
||||
pub(crate) history: ConversationHistory,
|
||||
pub(crate) token_info: Option<TokenUsageInfo>,
|
||||
pub(crate) latest_rate_limits: Option<RateLimitSnapshot>,
|
||||
pub(crate) codebase_snapshot: Option<CodebaseSnapshot>,
|
||||
}
|
||||
|
||||
impl SessionState {
|
||||
|
||||
@@ -34,6 +34,16 @@ pub(crate) enum TaskKind {
|
||||
Compact,
|
||||
}
|
||||
|
||||
impl TaskKind {
|
||||
pub(crate) fn header_value(self) -> &'static str {
|
||||
match self {
|
||||
TaskKind::Regular => "standard",
|
||||
TaskKind::Review => "review",
|
||||
TaskKind::Compact => "compact",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RunningTask {
|
||||
pub(crate) handle: AbortHandle,
|
||||
@@ -113,3 +123,15 @@ impl ActiveTurn {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::TaskKind;
|
||||
|
||||
#[test]
|
||||
fn header_value_matches_expected_labels() {
|
||||
assert_eq!(TaskKind::Regular.header_value(), "standard");
|
||||
assert_eq!(TaskKind::Review.header_value(), "review");
|
||||
assert_eq!(TaskKind::Compact.header_value(), "compact");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,6 @@ impl SessionTask for RegularTask {
|
||||
input: Vec<InputItem>,
|
||||
) -> Option<String> {
|
||||
let sess = session.clone_session();
|
||||
run_task(sess, ctx, sub_id, input).await
|
||||
run_task(sess, ctx, sub_id, input, TaskKind::Regular).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ impl SessionTask for ReviewTask {
|
||||
input: Vec<InputItem>,
|
||||
) -> Option<String> {
|
||||
let sess = session.clone_session();
|
||||
run_task(sess, ctx, sub_id, input).await
|
||||
run_task(sess, ctx, sub_id, input, TaskKind::Review).await
|
||||
}
|
||||
|
||||
async fn abort(&self, session: Arc<SessionTaskContext>, sub_id: &str) {
|
||||
|
||||
272
codex-rs/core/src/tools/handlers/grep_files.rs
Normal file
272
codex-rs/core/src/tools/handlers/grep_files.rs
Normal file
@@ -0,0 +1,272 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::Deserialize;
|
||||
use tokio::process::Command;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::tools::context::ToolInvocation;
|
||||
use crate::tools::context::ToolOutput;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::registry::ToolHandler;
|
||||
use crate::tools::registry::ToolKind;
|
||||
|
||||
pub struct GrepFilesHandler;
|
||||
|
||||
const DEFAULT_LIMIT: usize = 100;
|
||||
const MAX_LIMIT: usize = 2000;
|
||||
const COMMAND_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
|
||||
fn default_limit() -> usize {
|
||||
DEFAULT_LIMIT
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GrepFilesArgs {
|
||||
pattern: String,
|
||||
#[serde(default)]
|
||||
include: Option<String>,
|
||||
#[serde(default)]
|
||||
path: Option<String>,
|
||||
#[serde(default = "default_limit")]
|
||||
limit: usize,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ToolHandler for GrepFilesHandler {
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<ToolOutput, FunctionCallError> {
|
||||
let ToolInvocation { payload, turn, .. } = invocation;
|
||||
|
||||
let arguments = match payload {
|
||||
ToolPayload::Function { arguments } => arguments,
|
||||
_ => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"grep_files handler received unsupported payload".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let args: GrepFilesArgs = serde_json::from_str(&arguments).map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to parse function arguments: {err:?}"
|
||||
))
|
||||
})?;
|
||||
|
||||
let pattern = args.pattern.trim();
|
||||
if pattern.is_empty() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"pattern must not be empty".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if args.limit == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"limit must be greater than zero".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let limit = args.limit.min(MAX_LIMIT);
|
||||
let search_path = turn.resolve_path(args.path.clone());
|
||||
|
||||
verify_path_exists(&search_path).await?;
|
||||
|
||||
let include = args.include.as_deref().map(str::trim).and_then(|val| {
|
||||
if val.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(val.to_string())
|
||||
}
|
||||
});
|
||||
|
||||
let search_results =
|
||||
run_rg_search(pattern, include.as_deref(), &search_path, limit, &turn.cwd).await?;
|
||||
|
||||
if search_results.is_empty() {
|
||||
Ok(ToolOutput::Function {
|
||||
content: "No matches found.".to_string(),
|
||||
success: Some(false),
|
||||
})
|
||||
} else {
|
||||
Ok(ToolOutput::Function {
|
||||
content: search_results.join("\n"),
|
||||
success: Some(true),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn verify_path_exists(path: &Path) -> Result<(), FunctionCallError> {
|
||||
tokio::fs::metadata(path).await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("unable to access `{}`: {err}", path.display()))
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_rg_search(
|
||||
pattern: &str,
|
||||
include: Option<&str>,
|
||||
search_path: &Path,
|
||||
limit: usize,
|
||||
cwd: &Path,
|
||||
) -> Result<Vec<String>, FunctionCallError> {
|
||||
let mut command = Command::new("rg");
|
||||
command
|
||||
.current_dir(cwd)
|
||||
.arg("--files-with-matches")
|
||||
.arg("--sortr=modified")
|
||||
.arg("--regexp")
|
||||
.arg(pattern)
|
||||
.arg("--no-messages");
|
||||
|
||||
if let Some(glob) = include {
|
||||
command.arg("--glob").arg(glob);
|
||||
}
|
||||
|
||||
command.arg("--").arg(search_path);
|
||||
|
||||
let output = timeout(COMMAND_TIMEOUT, command.output())
|
||||
.await
|
||||
.map_err(|_| {
|
||||
FunctionCallError::RespondToModel("rg timed out after 30 seconds".to_string())
|
||||
})?
|
||||
.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to launch rg: {err}. Ensure ripgrep is installed and on PATH."
|
||||
))
|
||||
})?;
|
||||
|
||||
match output.status.code() {
|
||||
Some(0) => Ok(parse_results(&output.stdout, limit)),
|
||||
Some(1) => Ok(Vec::new()),
|
||||
_ => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
Err(FunctionCallError::RespondToModel(format!(
|
||||
"rg failed: {stderr}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_results(stdout: &[u8], limit: usize) -> Vec<String> {
|
||||
let mut results = Vec::new();
|
||||
for line in stdout.split(|byte| *byte == b'\n') {
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if let Ok(text) = std::str::from_utf8(line) {
|
||||
if text.is_empty() {
|
||||
continue;
|
||||
}
|
||||
results.push(text.to_string());
|
||||
if results.len() == limit {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::process::Command as StdCommand;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn parses_basic_results() {
|
||||
let stdout = b"/tmp/file_a.rs\n/tmp/file_b.rs\n";
|
||||
let parsed = parse_results(stdout, 10);
|
||||
assert_eq!(
|
||||
parsed,
|
||||
vec!["/tmp/file_a.rs".to_string(), "/tmp/file_b.rs".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_truncates_after_limit() {
|
||||
let stdout = b"/tmp/file_a.rs\n/tmp/file_b.rs\n/tmp/file_c.rs\n";
|
||||
let parsed = parse_results(stdout, 2);
|
||||
assert_eq!(
|
||||
parsed,
|
||||
vec!["/tmp/file_a.rs".to_string(), "/tmp/file_b.rs".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn run_search_returns_results() -> anyhow::Result<()> {
|
||||
if !rg_available() {
|
||||
return Ok(());
|
||||
}
|
||||
let temp = tempdir().expect("create temp dir");
|
||||
let dir = temp.path();
|
||||
std::fs::write(dir.join("match_one.txt"), "alpha beta gamma").unwrap();
|
||||
std::fs::write(dir.join("match_two.txt"), "alpha delta").unwrap();
|
||||
std::fs::write(dir.join("other.txt"), "omega").unwrap();
|
||||
|
||||
let results = run_rg_search("alpha", None, dir, 10, dir).await?;
|
||||
assert_eq!(results.len(), 2);
|
||||
assert!(results.iter().any(|path| path.ends_with("match_one.txt")));
|
||||
assert!(results.iter().any(|path| path.ends_with("match_two.txt")));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn run_search_with_glob_filter() -> anyhow::Result<()> {
|
||||
if !rg_available() {
|
||||
return Ok(());
|
||||
}
|
||||
let temp = tempdir().expect("create temp dir");
|
||||
let dir = temp.path();
|
||||
std::fs::write(dir.join("match_one.rs"), "alpha beta gamma").unwrap();
|
||||
std::fs::write(dir.join("match_two.txt"), "alpha delta").unwrap();
|
||||
|
||||
let results = run_rg_search("alpha", Some("*.rs"), dir, 10, dir).await?;
|
||||
assert_eq!(results.len(), 1);
|
||||
assert!(results.iter().all(|path| path.ends_with("match_one.rs")));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn run_search_respects_limit() -> anyhow::Result<()> {
|
||||
if !rg_available() {
|
||||
return Ok(());
|
||||
}
|
||||
let temp = tempdir().expect("create temp dir");
|
||||
let dir = temp.path();
|
||||
std::fs::write(dir.join("one.txt"), "alpha one").unwrap();
|
||||
std::fs::write(dir.join("two.txt"), "alpha two").unwrap();
|
||||
std::fs::write(dir.join("three.txt"), "alpha three").unwrap();
|
||||
|
||||
let results = run_rg_search("alpha", None, dir, 2, dir).await?;
|
||||
assert_eq!(results.len(), 2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn run_search_handles_no_matches() -> anyhow::Result<()> {
|
||||
if !rg_available() {
|
||||
return Ok(());
|
||||
}
|
||||
let temp = tempdir().expect("create temp dir");
|
||||
let dir = temp.path();
|
||||
std::fs::write(dir.join("one.txt"), "omega").unwrap();
|
||||
|
||||
let results = run_rg_search("alpha", None, dir, 5, dir).await?;
|
||||
assert!(results.is_empty());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rg_available() -> bool {
|
||||
StdCommand::new("rg")
|
||||
.arg("--version")
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
476
codex-rs/core/src/tools/handlers/list_dir.rs
Normal file
476
codex-rs/core/src/tools/handlers/list_dir.rs
Normal file
@@ -0,0 +1,476 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::FileType;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use codex_utils_string::take_bytes_at_char_boundary;
|
||||
use serde::Deserialize;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::tools::context::ToolInvocation;
|
||||
use crate::tools::context::ToolOutput;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::registry::ToolHandler;
|
||||
use crate::tools::registry::ToolKind;
|
||||
|
||||
pub struct ListDirHandler;
|
||||
|
||||
const MAX_ENTRY_LENGTH: usize = 500;
|
||||
const INDENTATION_SPACES: usize = 2;
|
||||
|
||||
fn default_offset() -> usize {
|
||||
1
|
||||
}
|
||||
|
||||
fn default_limit() -> usize {
|
||||
25
|
||||
}
|
||||
|
||||
fn default_depth() -> usize {
|
||||
2
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ListDirArgs {
|
||||
dir_path: String,
|
||||
#[serde(default = "default_offset")]
|
||||
offset: usize,
|
||||
#[serde(default = "default_limit")]
|
||||
limit: usize,
|
||||
#[serde(default = "default_depth")]
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ToolHandler for ListDirHandler {
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<ToolOutput, FunctionCallError> {
|
||||
let ToolInvocation { payload, .. } = invocation;
|
||||
|
||||
let arguments = match payload {
|
||||
ToolPayload::Function { arguments } => arguments,
|
||||
_ => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"list_dir handler received unsupported payload".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let args: ListDirArgs = serde_json::from_str(&arguments).map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to parse function arguments: {err:?}"
|
||||
))
|
||||
})?;
|
||||
|
||||
let ListDirArgs {
|
||||
dir_path,
|
||||
offset,
|
||||
limit,
|
||||
depth,
|
||||
} = args;
|
||||
|
||||
if offset == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"offset must be a 1-indexed entry number".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if limit == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"limit must be greater than zero".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if depth == 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"depth must be greater than zero".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let path = PathBuf::from(&dir_path);
|
||||
if !path.is_absolute() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"dir_path must be an absolute path".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let entries = list_dir_slice(&path, offset, limit, depth).await?;
|
||||
let mut output = Vec::with_capacity(entries.len() + 1);
|
||||
output.push(format!("Absolute path: {}", path.display()));
|
||||
output.extend(entries);
|
||||
Ok(ToolOutput::Function {
|
||||
content: output.join("\n"),
|
||||
success: Some(true),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_dir_slice(
|
||||
path: &Path,
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
depth: usize,
|
||||
) -> Result<Vec<String>, FunctionCallError> {
|
||||
let mut entries = Vec::new();
|
||||
collect_entries(path, Path::new(""), depth, &mut entries).await?;
|
||||
|
||||
if entries.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let start_index = offset - 1;
|
||||
if start_index >= entries.len() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"offset exceeds directory entry count".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let remaining_entries = entries.len() - start_index;
|
||||
let capped_limit = limit.min(remaining_entries);
|
||||
let end_index = start_index + capped_limit;
|
||||
let mut selected_entries = entries[start_index..end_index].to_vec();
|
||||
selected_entries.sort_unstable_by(|a, b| a.name.cmp(&b.name));
|
||||
let mut formatted = Vec::with_capacity(selected_entries.len());
|
||||
|
||||
for entry in &selected_entries {
|
||||
formatted.push(format_entry_line(entry));
|
||||
}
|
||||
|
||||
if end_index < entries.len() {
|
||||
formatted.push(format!("More than {capped_limit} entries found"));
|
||||
}
|
||||
|
||||
Ok(formatted)
|
||||
}
|
||||
|
||||
async fn collect_entries(
|
||||
dir_path: &Path,
|
||||
relative_prefix: &Path,
|
||||
depth: usize,
|
||||
entries: &mut Vec<DirEntry>,
|
||||
) -> Result<(), FunctionCallError> {
|
||||
let mut queue = VecDeque::new();
|
||||
queue.push_back((dir_path.to_path_buf(), relative_prefix.to_path_buf(), depth));
|
||||
|
||||
while let Some((current_dir, prefix, remaining_depth)) = queue.pop_front() {
|
||||
let mut read_dir = fs::read_dir(¤t_dir).await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("failed to read directory: {err}"))
|
||||
})?;
|
||||
|
||||
let mut dir_entries = Vec::new();
|
||||
|
||||
while let Some(entry) = read_dir.next_entry().await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("failed to read directory: {err}"))
|
||||
})? {
|
||||
let file_type = entry.file_type().await.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("failed to inspect entry: {err}"))
|
||||
})?;
|
||||
|
||||
let file_name = entry.file_name();
|
||||
let relative_path = if prefix.as_os_str().is_empty() {
|
||||
PathBuf::from(&file_name)
|
||||
} else {
|
||||
prefix.join(&file_name)
|
||||
};
|
||||
|
||||
let display_name = format_entry_component(&file_name);
|
||||
let display_depth = prefix.components().count();
|
||||
let sort_key = format_entry_name(&relative_path);
|
||||
let kind = DirEntryKind::from(&file_type);
|
||||
dir_entries.push((
|
||||
entry.path(),
|
||||
relative_path,
|
||||
kind,
|
||||
DirEntry {
|
||||
name: sort_key,
|
||||
display_name,
|
||||
depth: display_depth,
|
||||
kind,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
dir_entries.sort_unstable_by(|a, b| a.3.name.cmp(&b.3.name));
|
||||
|
||||
for (entry_path, relative_path, kind, dir_entry) in dir_entries {
|
||||
if kind == DirEntryKind::Directory && remaining_depth > 1 {
|
||||
queue.push_back((entry_path, relative_path, remaining_depth - 1));
|
||||
}
|
||||
entries.push(dir_entry);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn format_entry_name(path: &Path) -> String {
|
||||
let normalized = path.to_string_lossy().replace("\\", "/");
|
||||
if normalized.len() > MAX_ENTRY_LENGTH {
|
||||
take_bytes_at_char_boundary(&normalized, MAX_ENTRY_LENGTH).to_string()
|
||||
} else {
|
||||
normalized
|
||||
}
|
||||
}
|
||||
|
||||
fn format_entry_component(name: &OsStr) -> String {
|
||||
let normalized = name.to_string_lossy();
|
||||
if normalized.len() > MAX_ENTRY_LENGTH {
|
||||
take_bytes_at_char_boundary(&normalized, MAX_ENTRY_LENGTH).to_string()
|
||||
} else {
|
||||
normalized.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn format_entry_line(entry: &DirEntry) -> String {
|
||||
let indent = " ".repeat(entry.depth * INDENTATION_SPACES);
|
||||
let mut name = entry.display_name.clone();
|
||||
match entry.kind {
|
||||
DirEntryKind::Directory => name.push('/'),
|
||||
DirEntryKind::Symlink => name.push('@'),
|
||||
DirEntryKind::Other => name.push('?'),
|
||||
DirEntryKind::File => {}
|
||||
}
|
||||
format!("{indent}{name}")
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct DirEntry {
|
||||
name: String,
|
||||
display_name: String,
|
||||
depth: usize,
|
||||
kind: DirEntryKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
enum DirEntryKind {
|
||||
Directory,
|
||||
File,
|
||||
Symlink,
|
||||
Other,
|
||||
}
|
||||
|
||||
impl From<&FileType> for DirEntryKind {
|
||||
fn from(file_type: &FileType) -> Self {
|
||||
if file_type.is_symlink() {
|
||||
DirEntryKind::Symlink
|
||||
} else if file_type.is_dir() {
|
||||
DirEntryKind::Directory
|
||||
} else if file_type.is_file() {
|
||||
DirEntryKind::File
|
||||
} else {
|
||||
DirEntryKind::Other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[tokio::test]
|
||||
async fn lists_directory_entries() {
|
||||
let temp = tempdir().expect("create tempdir");
|
||||
let dir_path = temp.path();
|
||||
|
||||
let sub_dir = dir_path.join("nested");
|
||||
tokio::fs::create_dir(&sub_dir)
|
||||
.await
|
||||
.expect("create sub dir");
|
||||
|
||||
let deeper_dir = sub_dir.join("deeper");
|
||||
tokio::fs::create_dir(&deeper_dir)
|
||||
.await
|
||||
.expect("create deeper dir");
|
||||
|
||||
tokio::fs::write(dir_path.join("entry.txt"), b"content")
|
||||
.await
|
||||
.expect("write file");
|
||||
tokio::fs::write(sub_dir.join("child.txt"), b"child")
|
||||
.await
|
||||
.expect("write child");
|
||||
tokio::fs::write(deeper_dir.join("grandchild.txt"), b"grandchild")
|
||||
.await
|
||||
.expect("write grandchild");
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::symlink;
|
||||
let link_path = dir_path.join("link");
|
||||
symlink(dir_path.join("entry.txt"), &link_path).expect("create symlink");
|
||||
}
|
||||
|
||||
let entries = list_dir_slice(dir_path, 1, 20, 3)
|
||||
.await
|
||||
.expect("list directory");
|
||||
|
||||
#[cfg(unix)]
|
||||
let expected = vec![
|
||||
"entry.txt".to_string(),
|
||||
"link@".to_string(),
|
||||
"nested/".to_string(),
|
||||
" child.txt".to_string(),
|
||||
" deeper/".to_string(),
|
||||
" grandchild.txt".to_string(),
|
||||
];
|
||||
|
||||
#[cfg(not(unix))]
|
||||
let expected = vec![
|
||||
"entry.txt".to_string(),
|
||||
"nested/".to_string(),
|
||||
" child.txt".to_string(),
|
||||
" deeper/".to_string(),
|
||||
" grandchild.txt".to_string(),
|
||||
];
|
||||
|
||||
assert_eq!(entries, expected);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn errors_when_offset_exceeds_entries() {
|
||||
let temp = tempdir().expect("create tempdir");
|
||||
let dir_path = temp.path();
|
||||
tokio::fs::create_dir(dir_path.join("nested"))
|
||||
.await
|
||||
.expect("create sub dir");
|
||||
|
||||
let err = list_dir_slice(dir_path, 10, 1, 2)
|
||||
.await
|
||||
.expect_err("offset exceeds entries");
|
||||
assert_eq!(
|
||||
err,
|
||||
FunctionCallError::RespondToModel("offset exceeds directory entry count".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn respects_depth_parameter() {
|
||||
let temp = tempdir().expect("create tempdir");
|
||||
let dir_path = temp.path();
|
||||
let nested = dir_path.join("nested");
|
||||
let deeper = nested.join("deeper");
|
||||
tokio::fs::create_dir(&nested).await.expect("create nested");
|
||||
tokio::fs::create_dir(&deeper).await.expect("create deeper");
|
||||
tokio::fs::write(dir_path.join("root.txt"), b"root")
|
||||
.await
|
||||
.expect("write root");
|
||||
tokio::fs::write(nested.join("child.txt"), b"child")
|
||||
.await
|
||||
.expect("write nested");
|
||||
tokio::fs::write(deeper.join("grandchild.txt"), b"deep")
|
||||
.await
|
||||
.expect("write deeper");
|
||||
|
||||
let entries_depth_one = list_dir_slice(dir_path, 1, 10, 1)
|
||||
.await
|
||||
.expect("list depth 1");
|
||||
assert_eq!(
|
||||
entries_depth_one,
|
||||
vec!["nested/".to_string(), "root.txt".to_string(),]
|
||||
);
|
||||
|
||||
let entries_depth_two = list_dir_slice(dir_path, 1, 20, 2)
|
||||
.await
|
||||
.expect("list depth 2");
|
||||
assert_eq!(
|
||||
entries_depth_two,
|
||||
vec![
|
||||
"nested/".to_string(),
|
||||
" child.txt".to_string(),
|
||||
" deeper/".to_string(),
|
||||
"root.txt".to_string(),
|
||||
]
|
||||
);
|
||||
|
||||
let entries_depth_three = list_dir_slice(dir_path, 1, 30, 3)
|
||||
.await
|
||||
.expect("list depth 3");
|
||||
assert_eq!(
|
||||
entries_depth_three,
|
||||
vec![
|
||||
"nested/".to_string(),
|
||||
" child.txt".to_string(),
|
||||
" deeper/".to_string(),
|
||||
" grandchild.txt".to_string(),
|
||||
"root.txt".to_string(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn handles_large_limit_without_overflow() {
|
||||
let temp = tempdir().expect("create tempdir");
|
||||
let dir_path = temp.path();
|
||||
tokio::fs::write(dir_path.join("alpha.txt"), b"alpha")
|
||||
.await
|
||||
.expect("write alpha");
|
||||
tokio::fs::write(dir_path.join("beta.txt"), b"beta")
|
||||
.await
|
||||
.expect("write beta");
|
||||
tokio::fs::write(dir_path.join("gamma.txt"), b"gamma")
|
||||
.await
|
||||
.expect("write gamma");
|
||||
|
||||
let entries = list_dir_slice(dir_path, 2, usize::MAX, 1)
|
||||
.await
|
||||
.expect("list without overflow");
|
||||
assert_eq!(
|
||||
entries,
|
||||
vec!["beta.txt".to_string(), "gamma.txt".to_string(),]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn indicates_truncated_results() {
|
||||
let temp = tempdir().expect("create tempdir");
|
||||
let dir_path = temp.path();
|
||||
|
||||
for idx in 0..40 {
|
||||
let file = dir_path.join(format!("file_{idx:02}.txt"));
|
||||
tokio::fs::write(file, b"content")
|
||||
.await
|
||||
.expect("write file");
|
||||
}
|
||||
|
||||
let entries = list_dir_slice(dir_path, 1, 25, 1)
|
||||
.await
|
||||
.expect("list directory");
|
||||
assert_eq!(entries.len(), 26);
|
||||
assert_eq!(
|
||||
entries.last(),
|
||||
Some(&"More than 25 entries found".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bfs_truncation() -> anyhow::Result<()> {
|
||||
let temp = tempdir()?;
|
||||
let dir_path = temp.path();
|
||||
let nested = dir_path.join("nested");
|
||||
let deeper = nested.join("deeper");
|
||||
tokio::fs::create_dir(&nested).await?;
|
||||
tokio::fs::create_dir(&deeper).await?;
|
||||
tokio::fs::write(dir_path.join("root.txt"), b"root").await?;
|
||||
tokio::fs::write(nested.join("child.txt"), b"child").await?;
|
||||
tokio::fs::write(deeper.join("grandchild.txt"), b"deep").await?;
|
||||
|
||||
let entries_depth_three = list_dir_slice(dir_path, 1, 3, 3).await?;
|
||||
assert_eq!(
|
||||
entries_depth_three,
|
||||
vec![
|
||||
"nested/".to_string(),
|
||||
" child.txt".to_string(),
|
||||
"root.txt".to_string(),
|
||||
"More than 3 entries found".to_string()
|
||||
]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
pub mod apply_patch;
|
||||
mod exec_stream;
|
||||
mod grep_files;
|
||||
mod list_dir;
|
||||
mod mcp;
|
||||
mod plan;
|
||||
mod read_file;
|
||||
@@ -12,6 +14,8 @@ pub use plan::PLAN_TOOL;
|
||||
|
||||
pub use apply_patch::ApplyPatchHandler;
|
||||
pub use exec_stream::ExecStreamHandler;
|
||||
pub use grep_files::GrepFilesHandler;
|
||||
pub use list_dir::ListDirHandler;
|
||||
pub use mcp::McpHandler;
|
||||
pub use plan::PlanHandler;
|
||||
pub use read_file::ReadFileHandler;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,8 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio_util::either::Either;
|
||||
use tokio_util::task::AbortOnDropHandle;
|
||||
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
@@ -11,20 +13,13 @@ use crate::tools::router::ToolCall;
|
||||
use crate::tools::router::ToolRouter;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
|
||||
use crate::codex::ProcessedResponseItem;
|
||||
|
||||
struct PendingToolCall {
|
||||
index: usize,
|
||||
handle: JoinHandle<Result<ResponseInputItem, FunctionCallError>>,
|
||||
}
|
||||
|
||||
pub(crate) struct ToolCallRuntime {
|
||||
router: Arc<ToolRouter>,
|
||||
session: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
tracker: SharedTurnDiffTracker,
|
||||
sub_id: String,
|
||||
pending_calls: Vec<PendingToolCall>,
|
||||
parallel_execution: Arc<RwLock<()>>,
|
||||
}
|
||||
|
||||
impl ToolCallRuntime {
|
||||
@@ -41,97 +36,45 @@ impl ToolCallRuntime {
|
||||
turn_context,
|
||||
tracker,
|
||||
sub_id,
|
||||
pending_calls: Vec::new(),
|
||||
parallel_execution: Arc::new(RwLock::new(())),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn handle_tool_call(
|
||||
&mut self,
|
||||
pub(crate) fn handle_tool_call(
|
||||
&self,
|
||||
call: ToolCall,
|
||||
output_index: usize,
|
||||
output: &mut [ProcessedResponseItem],
|
||||
) -> Result<(), CodexErr> {
|
||||
) -> impl std::future::Future<Output = Result<ResponseInputItem, CodexErr>> {
|
||||
let supports_parallel = self.router.tool_supports_parallel(&call.tool_name);
|
||||
if supports_parallel {
|
||||
self.spawn_parallel(call, output_index);
|
||||
} else {
|
||||
self.resolve_pending(output).await?;
|
||||
let response = self.dispatch_serial(call).await?;
|
||||
let slot = output.get_mut(output_index).ok_or_else(|| {
|
||||
CodexErr::Fatal(format!("tool output index {output_index} out of bounds"))
|
||||
})?;
|
||||
slot.response = Some(response);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn abort_all(&mut self) {
|
||||
while let Some(pending) = self.pending_calls.pop() {
|
||||
pending.handle.abort();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn resolve_pending(
|
||||
&mut self,
|
||||
output: &mut [ProcessedResponseItem],
|
||||
) -> Result<(), CodexErr> {
|
||||
while let Some(PendingToolCall { index, handle }) = self.pending_calls.pop() {
|
||||
match handle.await {
|
||||
Ok(Ok(response)) => {
|
||||
if let Some(slot) = output.get_mut(index) {
|
||||
slot.response = Some(response);
|
||||
}
|
||||
}
|
||||
Ok(Err(FunctionCallError::Fatal(message))) => {
|
||||
self.abort_all();
|
||||
return Err(CodexErr::Fatal(message));
|
||||
}
|
||||
Ok(Err(other)) => {
|
||||
self.abort_all();
|
||||
return Err(CodexErr::Fatal(other.to_string()));
|
||||
}
|
||||
Err(join_err) => {
|
||||
self.abort_all();
|
||||
return Err(CodexErr::Fatal(format!(
|
||||
"tool task failed to join: {join_err}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn spawn_parallel(&mut self, call: ToolCall, index: usize) {
|
||||
let router = Arc::clone(&self.router);
|
||||
let session = Arc::clone(&self.session);
|
||||
let turn = Arc::clone(&self.turn_context);
|
||||
let tracker = Arc::clone(&self.tracker);
|
||||
let sub_id = self.sub_id.clone();
|
||||
let handle = tokio::spawn(async move {
|
||||
router
|
||||
.dispatch_tool_call(session, turn, tracker, sub_id, call)
|
||||
.await
|
||||
});
|
||||
self.pending_calls.push(PendingToolCall { index, handle });
|
||||
}
|
||||
let lock = Arc::clone(&self.parallel_execution);
|
||||
|
||||
async fn dispatch_serial(&self, call: ToolCall) -> Result<ResponseInputItem, CodexErr> {
|
||||
match self
|
||||
.router
|
||||
.dispatch_tool_call(
|
||||
Arc::clone(&self.session),
|
||||
Arc::clone(&self.turn_context),
|
||||
Arc::clone(&self.tracker),
|
||||
self.sub_id.clone(),
|
||||
call,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(response) => Ok(response),
|
||||
Err(FunctionCallError::Fatal(message)) => Err(CodexErr::Fatal(message)),
|
||||
Err(other) => Err(CodexErr::Fatal(other.to_string())),
|
||||
let handle: AbortOnDropHandle<Result<ResponseInputItem, FunctionCallError>> =
|
||||
AbortOnDropHandle::new(tokio::spawn(async move {
|
||||
let _guard = if supports_parallel {
|
||||
Either::Left(lock.read().await)
|
||||
} else {
|
||||
Either::Right(lock.write().await)
|
||||
};
|
||||
|
||||
router
|
||||
.dispatch_tool_call(session, turn, tracker, sub_id, call)
|
||||
.await
|
||||
}));
|
||||
|
||||
async move {
|
||||
match handle.await {
|
||||
Ok(Ok(response)) => Ok(response),
|
||||
Ok(Err(FunctionCallError::Fatal(message))) => Err(CodexErr::Fatal(message)),
|
||||
Ok(Err(other)) => Err(CodexErr::Fatal(other.to_string())),
|
||||
Err(err) => Err(CodexErr::Fatal(format!(
|
||||
"tool task failed to receive: {err:?}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use crate::client_common::tools::ResponsesApiTool;
|
||||
use crate::client_common::tools::ToolSpec;
|
||||
use crate::features::Feature;
|
||||
use crate::features::Features;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::tools::handlers::PLAN_TOOL;
|
||||
use crate::tools::handlers::apply_patch::ApplyPatchToolType;
|
||||
@@ -33,26 +35,23 @@ pub(crate) struct ToolsConfig {
|
||||
|
||||
pub(crate) struct ToolsConfigParams<'a> {
|
||||
pub(crate) model_family: &'a ModelFamily,
|
||||
pub(crate) include_plan_tool: bool,
|
||||
pub(crate) include_apply_patch_tool: bool,
|
||||
pub(crate) include_web_search_request: bool,
|
||||
pub(crate) use_streamable_shell_tool: bool,
|
||||
pub(crate) include_view_image_tool: bool,
|
||||
pub(crate) experimental_unified_exec_tool: bool,
|
||||
pub(crate) features: &'a Features,
|
||||
}
|
||||
|
||||
impl ToolsConfig {
|
||||
pub fn new(params: &ToolsConfigParams) -> Self {
|
||||
let ToolsConfigParams {
|
||||
model_family,
|
||||
include_plan_tool,
|
||||
include_apply_patch_tool,
|
||||
include_web_search_request,
|
||||
use_streamable_shell_tool,
|
||||
include_view_image_tool,
|
||||
experimental_unified_exec_tool,
|
||||
features,
|
||||
} = params;
|
||||
let shell_type = if *use_streamable_shell_tool {
|
||||
let use_streamable_shell_tool = features.enabled(Feature::StreamableShell);
|
||||
let experimental_unified_exec_tool = features.enabled(Feature::UnifiedExec);
|
||||
let include_plan_tool = features.enabled(Feature::PlanTool);
|
||||
let include_apply_patch_tool = features.enabled(Feature::ApplyPatchFreeform);
|
||||
let include_web_search_request = features.enabled(Feature::WebSearchRequest);
|
||||
let include_view_image_tool = features.enabled(Feature::ViewImageTool);
|
||||
|
||||
let shell_type = if use_streamable_shell_tool {
|
||||
ConfigShellToolType::Streamable
|
||||
} else if model_family.uses_local_shell_tool {
|
||||
ConfigShellToolType::Local
|
||||
@@ -64,7 +63,7 @@ impl ToolsConfig {
|
||||
Some(ApplyPatchToolType::Freeform) => Some(ApplyPatchToolType::Freeform),
|
||||
Some(ApplyPatchToolType::Function) => Some(ApplyPatchToolType::Function),
|
||||
None => {
|
||||
if *include_apply_patch_tool {
|
||||
if include_apply_patch_tool {
|
||||
Some(ApplyPatchToolType::Freeform)
|
||||
} else {
|
||||
None
|
||||
@@ -74,11 +73,11 @@ impl ToolsConfig {
|
||||
|
||||
Self {
|
||||
shell_type,
|
||||
plan_tool: *include_plan_tool,
|
||||
plan_tool: include_plan_tool,
|
||||
apply_patch_tool_type,
|
||||
web_search_request: *include_web_search_request,
|
||||
include_view_image_tool: *include_view_image_tool,
|
||||
experimental_unified_exec_tool: *experimental_unified_exec_tool,
|
||||
web_search_request: include_web_search_request,
|
||||
include_view_image_tool,
|
||||
experimental_unified_exec_tool,
|
||||
experimental_supported_tools: model_family.experimental_supported_tools.clone(),
|
||||
}
|
||||
}
|
||||
@@ -320,6 +319,56 @@ fn create_test_sync_tool() -> ToolSpec {
|
||||
})
|
||||
}
|
||||
|
||||
fn create_grep_files_tool() -> ToolSpec {
|
||||
let mut properties = BTreeMap::new();
|
||||
properties.insert(
|
||||
"pattern".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Regular expression pattern to search for.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"include".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Optional glob that limits which files are searched (e.g. \"*.rs\" or \
|
||||
\"*.{ts,tsx}\")."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"path".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Directory or file path to search. Defaults to the session's working directory."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"limit".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"Maximum number of file paths to return (defaults to 100).".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "grep_files".to_string(),
|
||||
description: "Finds files whose contents match the pattern and lists them by modification \
|
||||
time."
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
required: Some(vec!["pattern".to_string()]),
|
||||
additional_properties: Some(false.into()),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn create_read_file_tool() -> ToolSpec {
|
||||
let mut properties = BTreeMap::new();
|
||||
properties.insert(
|
||||
@@ -342,11 +391,72 @@ fn create_read_file_tool() -> ToolSpec {
|
||||
description: Some("The maximum number of lines to return.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"mode".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some(
|
||||
"Optional mode selector: \"slice\" for simple ranges (default) or \"indentation\" \
|
||||
to expand around an anchor line."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
|
||||
let mut indentation_properties = BTreeMap::new();
|
||||
indentation_properties.insert(
|
||||
"anchor_line".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"Anchor line to center the indentation lookup on (defaults to offset).".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
indentation_properties.insert(
|
||||
"max_levels".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"How many parent indentation levels (smaller indents) to include.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
indentation_properties.insert(
|
||||
"include_siblings".to_string(),
|
||||
JsonSchema::Boolean {
|
||||
description: Some(
|
||||
"When true, include additional blocks that share the anchor indentation."
|
||||
.to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
indentation_properties.insert(
|
||||
"include_header".to_string(),
|
||||
JsonSchema::Boolean {
|
||||
description: Some(
|
||||
"Include doc comments or attributes directly above the selected block.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
indentation_properties.insert(
|
||||
"max_lines".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"Hard cap on the number of lines returned when using indentation mode.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"indentation".to_string(),
|
||||
JsonSchema::Object {
|
||||
properties: indentation_properties,
|
||||
required: None,
|
||||
additional_properties: Some(false.into()),
|
||||
},
|
||||
);
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "read_file".to_string(),
|
||||
description:
|
||||
"Reads a local file with 1-indexed line numbers and returns up to the requested number of lines."
|
||||
"Reads a local file with 1-indexed line numbers, supporting slice and indentation-aware block modes."
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
@@ -356,6 +466,51 @@ fn create_read_file_tool() -> ToolSpec {
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn create_list_dir_tool() -> ToolSpec {
|
||||
let mut properties = BTreeMap::new();
|
||||
properties.insert(
|
||||
"dir_path".to_string(),
|
||||
JsonSchema::String {
|
||||
description: Some("Absolute path to the directory to list.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"offset".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"The entry number to start listing from. Must be 1 or greater.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"limit".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some("The maximum number of entries to return.".to_string()),
|
||||
},
|
||||
);
|
||||
properties.insert(
|
||||
"depth".to_string(),
|
||||
JsonSchema::Number {
|
||||
description: Some(
|
||||
"The maximum directory depth to traverse. Must be 1 or greater.".to_string(),
|
||||
),
|
||||
},
|
||||
);
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "list_dir".to_string(),
|
||||
description:
|
||||
"Lists entries in a local directory with 1-indexed entry numbers and simple type labels."
|
||||
.to_string(),
|
||||
strict: false,
|
||||
parameters: JsonSchema::Object {
|
||||
properties,
|
||||
required: Some(vec!["dir_path".to_string()]),
|
||||
additional_properties: Some(false.into()),
|
||||
},
|
||||
})
|
||||
}
|
||||
/// TODO(dylan): deprecate once we get rid of json tool
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub(crate) struct ApplyPatchToolArgs {
|
||||
@@ -565,6 +720,8 @@ pub(crate) fn build_specs(
|
||||
use crate::exec_command::create_write_stdin_tool_for_responses_api;
|
||||
use crate::tools::handlers::ApplyPatchHandler;
|
||||
use crate::tools::handlers::ExecStreamHandler;
|
||||
use crate::tools::handlers::GrepFilesHandler;
|
||||
use crate::tools::handlers::ListDirHandler;
|
||||
use crate::tools::handlers::McpHandler;
|
||||
use crate::tools::handlers::PlanHandler;
|
||||
use crate::tools::handlers::ReadFileHandler;
|
||||
@@ -632,8 +789,16 @@ pub(crate) fn build_specs(
|
||||
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.iter()
|
||||
.any(|tool| tool == "read_file")
|
||||
.contains(&"grep_files".to_string())
|
||||
{
|
||||
let grep_files_handler = Arc::new(GrepFilesHandler);
|
||||
builder.push_spec_with_parallel_support(create_grep_files_tool(), true);
|
||||
builder.register_handler("grep_files", grep_files_handler);
|
||||
}
|
||||
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.contains(&"read_file".to_string())
|
||||
{
|
||||
let read_file_handler = Arc::new(ReadFileHandler);
|
||||
builder.push_spec_with_parallel_support(create_read_file_tool(), true);
|
||||
@@ -643,7 +808,16 @@ pub(crate) fn build_specs(
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.iter()
|
||||
.any(|tool| tool == "test_sync_tool")
|
||||
.any(|tool| tool == "list_dir")
|
||||
{
|
||||
let list_dir_handler = Arc::new(ListDirHandler);
|
||||
builder.push_spec_with_parallel_support(create_list_dir_tool(), true);
|
||||
builder.register_handler("list_dir", list_dir_handler);
|
||||
}
|
||||
|
||||
if config
|
||||
.experimental_supported_tools
|
||||
.contains(&"test_sync_tool".to_string())
|
||||
{
|
||||
let test_sync_handler = Arc::new(TestSyncHandler);
|
||||
builder.push_spec_with_parallel_support(create_test_sync_tool(), true);
|
||||
@@ -731,14 +905,13 @@ mod tests {
|
||||
fn test_build_specs() {
|
||||
let model_family = find_family_for_model("codex-mini-latest")
|
||||
.expect("codex-mini-latest should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::PlanTool);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: true,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
let (tools, _) = build_specs(&config, Some(HashMap::new())).build();
|
||||
|
||||
@@ -751,14 +924,13 @@ mod tests {
|
||||
#[test]
|
||||
fn test_build_specs_default_shell() {
|
||||
let model_family = find_family_for_model("o3").expect("o3 should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::PlanTool);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: true,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
let (tools, _) = build_specs(&config, Some(HashMap::new())).build();
|
||||
|
||||
@@ -773,18 +945,18 @@ mod tests {
|
||||
fn test_parallel_support_flags() {
|
||||
let model_family = find_family_for_model("gpt-5-codex")
|
||||
.expect("codex-mini-latest should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.disable(Feature::ViewImageTool);
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: false,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: false,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
let (tools, _) = build_specs(&config, None).build();
|
||||
|
||||
assert!(!find_tool(&tools, "unified_exec").supports_parallel_tool_calls);
|
||||
assert!(find_tool(&tools, "grep_files").supports_parallel_tool_calls);
|
||||
assert!(find_tool(&tools, "list_dir").supports_parallel_tool_calls);
|
||||
assert!(find_tool(&tools, "read_file").supports_parallel_tool_calls);
|
||||
}
|
||||
|
||||
@@ -792,14 +964,11 @@ mod tests {
|
||||
fn test_test_model_family_includes_sync_tool() {
|
||||
let model_family = find_family_for_model("test-gpt-5-codex")
|
||||
.expect("test-gpt-5-codex should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.disable(Feature::ViewImageTool);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: false,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: false,
|
||||
experimental_unified_exec_tool: false,
|
||||
features: &features,
|
||||
});
|
||||
let (tools, _) = build_specs(&config, None).build();
|
||||
|
||||
@@ -813,19 +982,23 @@ mod tests {
|
||||
.iter()
|
||||
.any(|tool| tool_name(&tool.spec) == "read_file")
|
||||
);
|
||||
assert!(
|
||||
tools
|
||||
.iter()
|
||||
.any(|tool| tool_name(&tool.spec) == "grep_files")
|
||||
);
|
||||
assert!(tools.iter().any(|tool| tool_name(&tool.spec) == "list_dir"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_specs_mcp_tools() {
|
||||
let model_family = find_family_for_model("o3").expect("o3 should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
let (tools, _) = build_specs(
|
||||
&config,
|
||||
@@ -923,14 +1096,11 @@ mod tests {
|
||||
#[test]
|
||||
fn test_build_specs_mcp_tools_sorted_by_name() {
|
||||
let model_family = find_family_for_model("o3").expect("o3 should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: false,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
|
||||
// Intentionally construct a map with keys that would sort alphabetically.
|
||||
@@ -1000,14 +1170,12 @@ mod tests {
|
||||
fn test_mcp_tool_property_missing_type_defaults_to_string() {
|
||||
let model_family = find_family_for_model("gpt-5-codex")
|
||||
.expect("gpt-5-codex should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -1069,14 +1237,12 @@ mod tests {
|
||||
fn test_mcp_tool_integer_normalized_to_number() {
|
||||
let model_family = find_family_for_model("gpt-5-codex")
|
||||
.expect("gpt-5-codex should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -1133,14 +1299,13 @@ mod tests {
|
||||
fn test_mcp_tool_array_without_items_gets_default_string_items() {
|
||||
let model_family = find_family_for_model("gpt-5-codex")
|
||||
.expect("gpt-5-codex should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
features.enable(Feature::ApplyPatchFreeform);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: true,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -1200,14 +1365,12 @@ mod tests {
|
||||
fn test_mcp_tool_anyof_defaults_to_string() {
|
||||
let model_family = find_family_for_model("gpt-5-codex")
|
||||
.expect("gpt-5-codex should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -1279,14 +1442,12 @@ mod tests {
|
||||
fn test_get_openai_tools_mcp_tools_with_additional_properties_schema() {
|
||||
let model_family = find_family_for_model("gpt-5-codex")
|
||||
.expect("gpt-5-codex should be a valid model family");
|
||||
let mut features = Features::with_defaults();
|
||||
features.enable(Feature::UnifiedExec);
|
||||
features.enable(Feature::WebSearchRequest);
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_family: &model_family,
|
||||
include_plan_tool: false,
|
||||
include_apply_patch_tool: false,
|
||||
include_web_search_request: true,
|
||||
use_streamable_shell_tool: false,
|
||||
include_view_image_tool: true,
|
||||
experimental_unified_exec_tool: true,
|
||||
features: &features,
|
||||
});
|
||||
let (tools, _) = build_specs(
|
||||
&config,
|
||||
|
||||
@@ -110,11 +110,22 @@ impl ManagedUnifiedExecSession {
|
||||
let buffer_clone = Arc::clone(&output_buffer);
|
||||
let notify_clone = Arc::clone(&output_notify);
|
||||
let output_task = tokio::spawn(async move {
|
||||
while let Ok(chunk) = receiver.recv().await {
|
||||
let mut guard = buffer_clone.lock().await;
|
||||
guard.push_chunk(chunk);
|
||||
drop(guard);
|
||||
notify_clone.notify_waiters();
|
||||
loop {
|
||||
match receiver.recv().await {
|
||||
Ok(chunk) => {
|
||||
let mut guard = buffer_clone.lock().await;
|
||||
guard.push_chunk(chunk);
|
||||
drop(guard);
|
||||
notify_clone.notify_waiters();
|
||||
}
|
||||
// If we lag behind the broadcast buffer, skip missed
|
||||
// messages but keep the task alive to continue streaming.
|
||||
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
|
||||
continue;
|
||||
}
|
||||
// When the sender closes, exit the task.
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ impl UserNotifier {
|
||||
pub(crate) enum UserNotification {
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
AgentTurnComplete {
|
||||
thread_id: String,
|
||||
turn_id: String,
|
||||
|
||||
/// Messages that the user sent to the agent to initiate the turn.
|
||||
@@ -67,6 +68,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_user_notification() -> Result<()> {
|
||||
let notification = UserNotification::AgentTurnComplete {
|
||||
thread_id: "b5f6c1c2-1111-2222-3333-444455556666".to_string(),
|
||||
turn_id: "12345".to_string(),
|
||||
input_messages: vec!["Rename `foo` to `bar` and update the callsites.".to_string()],
|
||||
last_assistant_message: Some(
|
||||
@@ -76,7 +78,7 @@ mod tests {
|
||||
let serialized = serde_json::to_string(¬ification)?;
|
||||
assert_eq!(
|
||||
serialized,
|
||||
r#"{"type":"agent-turn-complete","turn-id":"12345","input-messages":["Rename `foo` to `bar` and update the callsites."],"last-assistant-message":"Rename complete and verified `cargo build` succeeds."}"#
|
||||
r#"{"type":"agent-turn-complete","thread-id":"b5f6c1c2-1111-2222-3333-444455556666","turn-id":"12345","input-messages":["Rename `foo` to `bar` and update the callsites."],"last-assistant-message":"Rename complete and verified `cargo build` succeeds."}"#
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -10,8 +10,10 @@ path = "lib.rs"
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
regex-lite = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tokio = { workspace = true, features = ["time"] }
|
||||
walkdir = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
||||
@@ -164,6 +164,149 @@ pub fn sandbox_network_env_var() -> &'static str {
|
||||
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
|
||||
}
|
||||
|
||||
pub mod fs_wait {
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use notify::RecursiveMode;
|
||||
use notify::Watcher;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc;
|
||||
use std::sync::mpsc::RecvTimeoutError;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use tokio::task;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub async fn wait_for_path_exists(
|
||||
path: impl Into<PathBuf>,
|
||||
timeout: Duration,
|
||||
) -> Result<PathBuf> {
|
||||
let path = path.into();
|
||||
task::spawn_blocking(move || wait_for_path_exists_blocking(path, timeout)).await?
|
||||
}
|
||||
|
||||
pub async fn wait_for_matching_file(
|
||||
root: impl Into<PathBuf>,
|
||||
timeout: Duration,
|
||||
predicate: impl FnMut(&Path) -> bool + Send + 'static,
|
||||
) -> Result<PathBuf> {
|
||||
let root = root.into();
|
||||
task::spawn_blocking(move || {
|
||||
let mut predicate = predicate;
|
||||
blocking_find_matching_file(root, timeout, &mut predicate)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
fn wait_for_path_exists_blocking(path: PathBuf, timeout: Duration) -> Result<PathBuf> {
|
||||
if path.exists() {
|
||||
return Ok(path);
|
||||
}
|
||||
|
||||
let watch_root = nearest_existing_ancestor(&path);
|
||||
let (tx, rx) = mpsc::channel();
|
||||
let mut watcher = notify::recommended_watcher(move |res| {
|
||||
let _ = tx.send(res);
|
||||
})?;
|
||||
watcher.watch(&watch_root, RecursiveMode::Recursive)?;
|
||||
|
||||
let deadline = Instant::now() + timeout;
|
||||
loop {
|
||||
if path.exists() {
|
||||
return Ok(path.clone());
|
||||
}
|
||||
let now = Instant::now();
|
||||
if now >= deadline {
|
||||
break;
|
||||
}
|
||||
let remaining = deadline.saturating_duration_since(now);
|
||||
match rx.recv_timeout(remaining) {
|
||||
Ok(Ok(_event)) => {
|
||||
if path.exists() {
|
||||
return Ok(path.clone());
|
||||
}
|
||||
}
|
||||
Ok(Err(err)) => return Err(err.into()),
|
||||
Err(RecvTimeoutError::Timeout) => break,
|
||||
Err(RecvTimeoutError::Disconnected) => break,
|
||||
}
|
||||
}
|
||||
|
||||
if path.exists() {
|
||||
Ok(path)
|
||||
} else {
|
||||
Err(anyhow!("timed out waiting for {:?}", path))
|
||||
}
|
||||
}
|
||||
|
||||
fn blocking_find_matching_file(
|
||||
root: PathBuf,
|
||||
timeout: Duration,
|
||||
predicate: &mut impl FnMut(&Path) -> bool,
|
||||
) -> Result<PathBuf> {
|
||||
let root = wait_for_path_exists_blocking(root, timeout)?;
|
||||
|
||||
if let Some(found) = scan_for_match(&root, predicate) {
|
||||
return Ok(found);
|
||||
}
|
||||
|
||||
let (tx, rx) = mpsc::channel();
|
||||
let mut watcher = notify::recommended_watcher(move |res| {
|
||||
let _ = tx.send(res);
|
||||
})?;
|
||||
watcher.watch(&root, RecursiveMode::Recursive)?;
|
||||
|
||||
let deadline = Instant::now() + timeout;
|
||||
|
||||
while Instant::now() < deadline {
|
||||
let remaining = deadline.saturating_duration_since(Instant::now());
|
||||
match rx.recv_timeout(remaining) {
|
||||
Ok(Ok(_event)) => {
|
||||
if let Some(found) = scan_for_match(&root, predicate) {
|
||||
return Ok(found);
|
||||
}
|
||||
}
|
||||
Ok(Err(err)) => return Err(err.into()),
|
||||
Err(RecvTimeoutError::Timeout) => break,
|
||||
Err(RecvTimeoutError::Disconnected) => break,
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(found) = scan_for_match(&root, predicate) {
|
||||
Ok(found)
|
||||
} else {
|
||||
Err(anyhow!("timed out waiting for matching file in {:?}", root))
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_for_match(root: &Path, predicate: &mut impl FnMut(&Path) -> bool) -> Option<PathBuf> {
|
||||
for entry in WalkDir::new(root).into_iter().filter_map(Result::ok) {
|
||||
let path = entry.path();
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
}
|
||||
if predicate(path) {
|
||||
return Some(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn nearest_existing_ancestor(path: &Path) -> PathBuf {
|
||||
let mut current = path;
|
||||
loop {
|
||||
if current.exists() {
|
||||
return current.to_path_buf();
|
||||
}
|
||||
match current.parent() {
|
||||
Some(parent) => current = parent,
|
||||
None => return PathBuf::from("."),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! skip_if_sandbox {
|
||||
() => {{
|
||||
|
||||
@@ -1,11 +1,105 @@
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use serde_json::Value;
|
||||
use wiremock::BodyPrintLimit;
|
||||
use wiremock::Match;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockBuilder;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
use wiremock::matchers::path_regex;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ResponseMock {
|
||||
requests: Arc<Mutex<Vec<ResponsesRequest>>>,
|
||||
}
|
||||
|
||||
impl ResponseMock {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
requests: Arc::new(Mutex::new(Vec::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn single_request(&self) -> ResponsesRequest {
|
||||
let requests = self.requests.lock().unwrap();
|
||||
if requests.len() != 1 {
|
||||
panic!("expected 1 request, got {}", requests.len());
|
||||
}
|
||||
requests.first().unwrap().clone()
|
||||
}
|
||||
|
||||
pub fn requests(&self) -> Vec<ResponsesRequest> {
|
||||
self.requests.lock().unwrap().clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ResponsesRequest(wiremock::Request);
|
||||
|
||||
impl ResponsesRequest {
|
||||
pub fn body_json(&self) -> Value {
|
||||
self.0.body_json().unwrap()
|
||||
}
|
||||
|
||||
pub fn input(&self) -> Vec<Value> {
|
||||
self.0.body_json::<Value>().unwrap()["input"]
|
||||
.as_array()
|
||||
.expect("input array not found in request")
|
||||
.clone()
|
||||
}
|
||||
|
||||
pub fn function_call_output(&self, call_id: &str) -> Value {
|
||||
self.call_output(call_id, "function_call_output")
|
||||
}
|
||||
|
||||
pub fn custom_tool_call_output(&self, call_id: &str) -> Value {
|
||||
self.call_output(call_id, "custom_tool_call_output")
|
||||
}
|
||||
|
||||
pub fn call_output(&self, call_id: &str, call_type: &str) -> Value {
|
||||
self.input()
|
||||
.iter()
|
||||
.find(|item| {
|
||||
item.get("type").unwrap() == call_type && item.get("call_id").unwrap() == call_id
|
||||
})
|
||||
.cloned()
|
||||
.unwrap_or_else(|| panic!("function call output {call_id} item not found in request"))
|
||||
}
|
||||
|
||||
pub fn header(&self, name: &str) -> Option<String> {
|
||||
self.0
|
||||
.headers
|
||||
.get(name)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub fn path(&self) -> String {
|
||||
self.0.url.path().to_string()
|
||||
}
|
||||
|
||||
pub fn query_param(&self, name: &str) -> Option<String> {
|
||||
self.0
|
||||
.url
|
||||
.query_pairs()
|
||||
.find(|(k, _)| k == name)
|
||||
.map(|(_, v)| v.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl Match for ResponseMock {
|
||||
fn matches(&self, request: &wiremock::Request) -> bool {
|
||||
self.requests
|
||||
.lock()
|
||||
.unwrap()
|
||||
.push(ResponsesRequest(request.clone()));
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// Build an SSE stream body from a list of JSON events.
|
||||
pub fn sse(events: Vec<Value>) -> String {
|
||||
@@ -161,34 +255,40 @@ pub fn sse_response(body: String) -> ResponseTemplate {
|
||||
.set_body_raw(body, "text/event-stream")
|
||||
}
|
||||
|
||||
pub async fn mount_sse_once_match<M>(server: &MockServer, matcher: M, body: String)
|
||||
fn base_mock() -> (MockBuilder, ResponseMock) {
|
||||
let response_mock = ResponseMock::new();
|
||||
let mock = Mock::given(method("POST"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.and(response_mock.clone());
|
||||
(mock, response_mock)
|
||||
}
|
||||
|
||||
pub async fn mount_sse_once_match<M>(server: &MockServer, matcher: M, body: String) -> ResponseMock
|
||||
where
|
||||
M: wiremock::Match + Send + Sync + 'static,
|
||||
{
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.and(matcher)
|
||||
let (mock, response_mock) = base_mock();
|
||||
mock.and(matcher)
|
||||
.respond_with(sse_response(body))
|
||||
.up_to_n_times(1)
|
||||
.mount(server)
|
||||
.await;
|
||||
response_mock
|
||||
}
|
||||
|
||||
pub async fn mount_sse_once(server: &MockServer, body: String) {
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(sse_response(body))
|
||||
.expect(1)
|
||||
pub async fn mount_sse_once(server: &MockServer, body: String) -> ResponseMock {
|
||||
let (mock, response_mock) = base_mock();
|
||||
mock.respond_with(sse_response(body))
|
||||
.up_to_n_times(1)
|
||||
.mount(server)
|
||||
.await;
|
||||
response_mock
|
||||
}
|
||||
|
||||
pub async fn mount_sse(server: &MockServer, body: String) {
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(sse_response(body))
|
||||
.mount(server)
|
||||
.await;
|
||||
pub async fn mount_sse(server: &MockServer, body: String) -> ResponseMock {
|
||||
let (mock, response_mock) = base_mock();
|
||||
mock.respond_with(sse_response(body)).mount(server).await;
|
||||
response_mock
|
||||
}
|
||||
|
||||
pub async fn start_mock_server() -> MockServer {
|
||||
@@ -201,7 +301,7 @@ pub async fn start_mock_server() -> MockServer {
|
||||
/// Mounts a sequence of SSE response bodies and serves them in order for each
|
||||
/// POST to `/v1/responses`. Panics if more requests are received than bodies
|
||||
/// provided. Also asserts the exact number of expected calls.
|
||||
pub async fn mount_sse_sequence(server: &MockServer, bodies: Vec<String>) {
|
||||
pub async fn mount_sse_sequence(server: &MockServer, bodies: Vec<String>) -> ResponseMock {
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
@@ -228,10 +328,11 @@ pub async fn mount_sse_sequence(server: &MockServer, bodies: Vec<String>) {
|
||||
responses: bodies,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(responder)
|
||||
let (mock, response_mock) = base_mock();
|
||||
mock.respond_with(responder)
|
||||
.expect(num_calls as u64)
|
||||
.mount(server)
|
||||
.await;
|
||||
|
||||
response_mock
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::mem::swap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_core::CodexAuth;
|
||||
@@ -39,6 +40,12 @@ impl TestCodexBuilder {
|
||||
let mut config = load_default_config_for_test(&home);
|
||||
config.cwd = cwd.path().to_path_buf();
|
||||
config.model_provider = model_provider;
|
||||
config.codex_linux_sandbox_exe = Some(PathBuf::from(
|
||||
assert_cmd::Command::cargo_bin("codex")?
|
||||
.get_program()
|
||||
.to_os_string(),
|
||||
));
|
||||
|
||||
let mut mutators = vec![];
|
||||
swap(&mut self.config_mutators, &mut mutators);
|
||||
|
||||
|
||||
102
codex-rs/core/tests/responses_headers.rs
Normal file
102
codex-rs/core/tests/responses_headers.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_core::ContentItem;
|
||||
use codex_core::ModelClient;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::Prompt;
|
||||
use codex_core::ResponseEvent;
|
||||
use codex_core::ResponseItem;
|
||||
use codex_core::WireApi;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::ConversationId;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses;
|
||||
use futures::StreamExt;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::matchers::header;
|
||||
|
||||
#[tokio::test]
|
||||
async fn responses_stream_includes_task_type_header() {
|
||||
core_test_support::skip_if_no_network!();
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let response_body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
|
||||
let request_recorder = responses::mount_sse_once_match(
|
||||
&server,
|
||||
header("Codex-Task-Type", "standard"),
|
||||
response_body,
|
||||
)
|
||||
.await;
|
||||
|
||||
let provider = ModelProviderInfo {
|
||||
name: "mock".into(),
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().expect("failed to create TempDir");
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.model_provider_id = provider.name.clone();
|
||||
config.model_provider = provider.clone();
|
||||
let effort = config.model_reasoning_effort;
|
||||
let summary = config.model_reasoning_summary;
|
||||
let config = Arc::new(config);
|
||||
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
let otel_event_manager = OtelEventManager::new(
|
||||
conversation_id,
|
||||
config.model.as_str(),
|
||||
config.model_family.slug.as_str(),
|
||||
None,
|
||||
Some(AuthMode::ChatGPT),
|
||||
false,
|
||||
"test".to_string(),
|
||||
);
|
||||
|
||||
let client = ModelClient::new(
|
||||
Arc::clone(&config),
|
||||
None,
|
||||
otel_event_manager,
|
||||
provider,
|
||||
effort,
|
||||
summary,
|
||||
conversation_id,
|
||||
);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input = vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".into(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
}];
|
||||
|
||||
let mut stream = client.stream(&prompt).await.expect("stream failed");
|
||||
while let Some(event) = stream.next().await {
|
||||
if matches!(event, Ok(ResponseEvent::Completed { .. })) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let request = request_recorder.single_request();
|
||||
assert_eq!(
|
||||
request.header("Codex-Task-Type").as_deref(),
|
||||
Some("standard")
|
||||
);
|
||||
}
|
||||
@@ -1,12 +1,11 @@
|
||||
use assert_cmd::Command as AssertCommand;
|
||||
use codex_core::RolloutRecorder;
|
||||
use codex_core::protocol::GitInfo;
|
||||
use core_test_support::fs_wait;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use tempfile::TempDir;
|
||||
use uuid::Uuid;
|
||||
use walkdir::WalkDir;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
@@ -106,16 +105,12 @@ async fn exec_cli_applies_experimental_instructions_file() {
|
||||
"data: {\"type\":\"response.created\",\"response\":{}}\n\n",
|
||||
"data: {\"type\":\"response.completed\",\"response\":{\"id\":\"r1\"}}\n\n"
|
||||
);
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse, "text/event-stream"),
|
||||
)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
let resp_mock = core_test_support::responses::mount_sse_once_match(
|
||||
&server,
|
||||
path("/v1/responses"),
|
||||
sse.to_string(),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Create a temporary instructions file with a unique marker we can assert
|
||||
// appears in the outbound request payload.
|
||||
@@ -164,8 +159,8 @@ async fn exec_cli_applies_experimental_instructions_file() {
|
||||
|
||||
// Inspect the captured request and verify our custom base instructions were
|
||||
// included in the `instructions` field.
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let body = request.body_json::<serde_json::Value>().unwrap();
|
||||
let request = resp_mock.single_request();
|
||||
let body = request.body_json();
|
||||
let instructions = body
|
||||
.get("instructions")
|
||||
.and_then(|v| v.as_str())
|
||||
@@ -215,12 +210,12 @@ async fn responses_api_stream_cli() {
|
||||
|
||||
/// End-to-end: create a session (writes rollout), verify the file, then resume and confirm append.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn integration_creates_and_checks_session_file() {
|
||||
async fn integration_creates_and_checks_session_file() -> anyhow::Result<()> {
|
||||
// Honor sandbox network restrictions for CI parity with the other tests.
|
||||
skip_if_no_network!();
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
// 1. Temp home so we read/write isolated session files.
|
||||
let home = TempDir::new().unwrap();
|
||||
let home = TempDir::new()?;
|
||||
|
||||
// 2. Unique marker we'll look for in the session log.
|
||||
let marker = format!("integration-test-{}", Uuid::new_v4());
|
||||
@@ -258,63 +253,20 @@ async fn integration_creates_and_checks_session_file() {
|
||||
|
||||
// Wait for sessions dir to appear.
|
||||
let sessions_dir = home.path().join("sessions");
|
||||
let dir_deadline = Instant::now() + Duration::from_secs(5);
|
||||
while !sessions_dir.exists() && Instant::now() < dir_deadline {
|
||||
std::thread::sleep(Duration::from_millis(50));
|
||||
}
|
||||
assert!(sessions_dir.exists(), "sessions directory never appeared");
|
||||
fs_wait::wait_for_path_exists(&sessions_dir, Duration::from_secs(5)).await?;
|
||||
|
||||
// Find the session file that contains `marker`.
|
||||
let deadline = Instant::now() + Duration::from_secs(10);
|
||||
let mut matching_path: Option<std::path::PathBuf> = None;
|
||||
while Instant::now() < deadline && matching_path.is_none() {
|
||||
for entry in WalkDir::new(&sessions_dir) {
|
||||
let entry = match entry {
|
||||
Ok(e) => e,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
}
|
||||
if !entry.file_name().to_string_lossy().ends_with(".jsonl") {
|
||||
continue;
|
||||
}
|
||||
let path = entry.path();
|
||||
let Ok(content) = std::fs::read_to_string(path) else {
|
||||
continue;
|
||||
};
|
||||
let mut lines = content.lines();
|
||||
if lines.next().is_none() {
|
||||
continue;
|
||||
}
|
||||
for line in lines {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
let item: serde_json::Value = match serde_json::from_str(line) {
|
||||
Ok(v) => v,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if item.get("type").and_then(|t| t.as_str()) == Some("response_item")
|
||||
&& let Some(payload) = item.get("payload")
|
||||
&& payload.get("type").and_then(|t| t.as_str()) == Some("message")
|
||||
&& let Some(c) = payload.get("content")
|
||||
&& c.to_string().contains(&marker)
|
||||
{
|
||||
matching_path = Some(path.to_path_buf());
|
||||
break;
|
||||
}
|
||||
}
|
||||
let marker_clone = marker.clone();
|
||||
let path = fs_wait::wait_for_matching_file(&sessions_dir, Duration::from_secs(10), move |p| {
|
||||
if p.extension().and_then(|ext| ext.to_str()) != Some("jsonl") {
|
||||
return false;
|
||||
}
|
||||
if matching_path.is_none() {
|
||||
std::thread::sleep(Duration::from_millis(50));
|
||||
}
|
||||
}
|
||||
|
||||
let path = match matching_path {
|
||||
Some(p) => p,
|
||||
None => panic!("No session file containing the marker was found"),
|
||||
};
|
||||
let Ok(content) = std::fs::read_to_string(p) else {
|
||||
return false;
|
||||
};
|
||||
content.contains(&marker_clone)
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Basic sanity checks on location and metadata.
|
||||
let rel = match path.strip_prefix(&sessions_dir) {
|
||||
@@ -422,42 +374,25 @@ async fn integration_creates_and_checks_session_file() {
|
||||
assert!(output2.status.success(), "resume codex-cli run failed");
|
||||
|
||||
// Find the new session file containing the resumed marker.
|
||||
let deadline = Instant::now() + Duration::from_secs(10);
|
||||
let mut resumed_path: Option<std::path::PathBuf> = None;
|
||||
while Instant::now() < deadline && resumed_path.is_none() {
|
||||
for entry in WalkDir::new(&sessions_dir) {
|
||||
let entry = match entry {
|
||||
Ok(e) => e,
|
||||
Err(_) => continue,
|
||||
};
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
let marker2_clone = marker2.clone();
|
||||
let resumed_path =
|
||||
fs_wait::wait_for_matching_file(&sessions_dir, Duration::from_secs(10), move |p| {
|
||||
if p.extension().and_then(|ext| ext.to_str()) != Some("jsonl") {
|
||||
return false;
|
||||
}
|
||||
if !entry.file_name().to_string_lossy().ends_with(".jsonl") {
|
||||
continue;
|
||||
}
|
||||
let p = entry.path();
|
||||
let Ok(c) = std::fs::read_to_string(p) else {
|
||||
continue;
|
||||
};
|
||||
if c.contains(&marker2) {
|
||||
resumed_path = Some(p.to_path_buf());
|
||||
break;
|
||||
}
|
||||
}
|
||||
if resumed_path.is_none() {
|
||||
std::thread::sleep(Duration::from_millis(50));
|
||||
}
|
||||
}
|
||||
std::fs::read_to_string(p)
|
||||
.map(|content| content.contains(&marker2_clone))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let resumed_path = resumed_path.expect("No resumed session file found containing the marker2");
|
||||
// Resume should write to the existing log file.
|
||||
assert_eq!(
|
||||
resumed_path, path,
|
||||
"resume should create a new session file"
|
||||
);
|
||||
|
||||
let resumed_content = std::fs::read_to_string(&resumed_path).unwrap();
|
||||
let resumed_content = std::fs::read_to_string(&resumed_path)?;
|
||||
assert!(
|
||||
resumed_content.contains(&marker),
|
||||
"resumed file missing original marker"
|
||||
@@ -466,6 +401,7 @@ async fn integration_creates_and_checks_session_file() {
|
||||
resumed_content.contains(&marker2),
|
||||
"resumed file missing resumed marker"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Integration test to verify git info is collected and recorded in session files.
|
||||
|
||||
@@ -223,15 +223,9 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
|
||||
|
||||
// Mock server that will receive the resumed request
|
||||
let server = MockServer::start().await;
|
||||
let first = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
let resp_mock =
|
||||
responses::mount_sse_once_match(&server, path("/v1/responses"), sse_completed("resp1"))
|
||||
.await;
|
||||
|
||||
// Configure Codex to resume from our file
|
||||
let model_provider = ModelProviderInfo {
|
||||
@@ -277,8 +271,8 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let request_body = request.body_json::<serde_json::Value>().unwrap();
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
let expected_input = json!([
|
||||
{
|
||||
"type": "message",
|
||||
@@ -372,18 +366,9 @@ async fn includes_base_instructions_override_in_request() {
|
||||
skip_if_no_network!();
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
|
||||
// First request – must NOT include `previous_response_id`.
|
||||
let first = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
let resp_mock =
|
||||
responses::mount_sse_once_match(&server, path("/v1/responses"), sse_completed("resp1"))
|
||||
.await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
@@ -414,8 +399,8 @@ async fn includes_base_instructions_override_in_request() {
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let request_body = request.body_json::<serde_json::Value>().unwrap();
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
|
||||
assert!(
|
||||
request_body["instructions"]
|
||||
@@ -570,16 +555,9 @@ async fn includes_user_instructions_message_in_request() {
|
||||
skip_if_no_network!();
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let first = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse_completed("resp1"), "text/event-stream");
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(first)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
let resp_mock =
|
||||
responses::mount_sse_once_match(&server, path("/v1/responses"), sse_completed("resp1"))
|
||||
.await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
@@ -610,8 +588,8 @@ async fn includes_user_instructions_message_in_request() {
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let request_body = request.body_json::<serde_json::Value>().unwrap();
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
|
||||
assert!(
|
||||
!request_body["instructions"]
|
||||
|
||||
@@ -22,6 +22,7 @@ use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::sse_failed;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use pretty_assertions::assert_eq;
|
||||
// --- Test helpers -----------------------------------------------------------
|
||||
@@ -38,6 +39,8 @@ const SECOND_LARGE_REPLY: &str = "SECOND_LARGE_REPLY";
|
||||
const FIRST_AUTO_SUMMARY: &str = "FIRST_AUTO_SUMMARY";
|
||||
const SECOND_AUTO_SUMMARY: &str = "SECOND_AUTO_SUMMARY";
|
||||
const FINAL_REPLY: &str = "FINAL_REPLY";
|
||||
const CONTEXT_LIMIT_MESSAGE: &str =
|
||||
"Your input exceeds the context window of this model. Please adjust your input and try again.";
|
||||
const DUMMY_FUNCTION_NAME: &str = "unsupported_tool";
|
||||
const DUMMY_CALL_ID: &str = "call-multi-auto";
|
||||
|
||||
@@ -622,6 +625,130 @@ async fn auto_compact_stops_after_failed_attempt() {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn manual_compact_retries_after_context_window_error() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let user_turn = sse(vec![
|
||||
ev_assistant_message("m1", FIRST_REPLY),
|
||||
ev_completed("r1"),
|
||||
]);
|
||||
let compact_failed = sse_failed(
|
||||
"resp-fail",
|
||||
"context_length_exceeded",
|
||||
CONTEXT_LIMIT_MESSAGE,
|
||||
);
|
||||
let compact_succeeds = sse(vec![
|
||||
ev_assistant_message("m2", SUMMARY_TEXT),
|
||||
ev_completed("r2"),
|
||||
]);
|
||||
|
||||
let request_log = mount_sse_sequence(
|
||||
&server,
|
||||
vec![
|
||||
user_turn.clone(),
|
||||
compact_failed.clone(),
|
||||
compact_succeeds.clone(),
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home);
|
||||
config.model_provider = model_provider;
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let codex = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"))
|
||||
.new_conversation(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.conversation;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![InputItem::Text {
|
||||
text: "first turn".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
|
||||
let EventMsg::BackgroundEvent(event) =
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::BackgroundEvent(_))).await
|
||||
else {
|
||||
panic!("expected background event after compact retry");
|
||||
};
|
||||
assert!(
|
||||
event.message.contains("Trimmed 1 older conversation item"),
|
||||
"background event should mention trimmed item count: {}",
|
||||
event.message
|
||||
);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = request_log.requests();
|
||||
assert_eq!(
|
||||
requests.len(),
|
||||
3,
|
||||
"expected user turn and two compact attempts"
|
||||
);
|
||||
|
||||
let compact_attempt = requests[1].body_json();
|
||||
let retry_attempt = requests[2].body_json();
|
||||
|
||||
let compact_input = compact_attempt["input"]
|
||||
.as_array()
|
||||
.unwrap_or_else(|| panic!("compact attempt missing input array: {compact_attempt}"));
|
||||
let retry_input = retry_attempt["input"]
|
||||
.as_array()
|
||||
.unwrap_or_else(|| panic!("retry attempt missing input array: {retry_attempt}"));
|
||||
assert_eq!(
|
||||
compact_input
|
||||
.last()
|
||||
.and_then(|item| item.get("content"))
|
||||
.and_then(|v| v.as_array())
|
||||
.and_then(|items| items.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(|text| text.as_str()),
|
||||
Some(SUMMARIZATION_PROMPT),
|
||||
"compact attempt should include summarization prompt"
|
||||
);
|
||||
assert_eq!(
|
||||
retry_input
|
||||
.last()
|
||||
.and_then(|item| item.get("content"))
|
||||
.and_then(|v| v.as_array())
|
||||
.and_then(|items| items.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(|text| text.as_str()),
|
||||
Some(SUMMARIZATION_PROMPT),
|
||||
"retry attempt should include summarization prompt"
|
||||
);
|
||||
assert_eq!(
|
||||
retry_input.len(),
|
||||
compact_input.len().saturating_sub(1),
|
||||
"retry should drop exactly one history item (before {} vs after {})",
|
||||
compact_input.len(),
|
||||
retry_input.len()
|
||||
);
|
||||
if let (Some(first_before), Some(first_after)) = (compact_input.first(), retry_input.first()) {
|
||||
assert_ne!(
|
||||
first_before, first_after,
|
||||
"retry should drop the oldest conversation item"
|
||||
);
|
||||
} else {
|
||||
panic!("expected non-empty compact inputs");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_events() {
|
||||
skip_if_no_network!();
|
||||
|
||||
237
codex-rs/core/tests/suite/grep_files.rs
Normal file
237
codex-rs/core/tests/suite/grep_files.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::TestCodex;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::process::Command as StdCommand;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
const MODEL_WITH_TOOL: &str = "test-gpt-5-codex";
|
||||
|
||||
fn ripgrep_available() -> bool {
|
||||
StdCommand::new("rg")
|
||||
.arg("--version")
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
macro_rules! skip_if_ripgrep_missing {
|
||||
($ret:expr $(,)?) => {{
|
||||
if !ripgrep_available() {
|
||||
eprintln!("rg not available in PATH; skipping test");
|
||||
return $ret;
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn grep_files_tool_collects_matches() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_ripgrep_missing!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = build_test_codex(&server).await?;
|
||||
|
||||
let search_dir = test.cwd.path().join("src");
|
||||
std::fs::create_dir_all(&search_dir)?;
|
||||
let alpha = search_dir.join("alpha.rs");
|
||||
let beta = search_dir.join("beta.rs");
|
||||
let gamma = search_dir.join("gamma.txt");
|
||||
std::fs::write(&alpha, "alpha needle\n")?;
|
||||
std::fs::write(&beta, "beta needle\n")?;
|
||||
std::fs::write(&gamma, "needle in text but excluded\n")?;
|
||||
|
||||
let call_id = "grep-files-collect";
|
||||
let arguments = serde_json::json!({
|
||||
"pattern": "needle",
|
||||
"path": search_dir.to_string_lossy(),
|
||||
"include": "*.rs",
|
||||
})
|
||||
.to_string();
|
||||
|
||||
mount_tool_sequence(&server, call_id, &arguments, "grep_files").await;
|
||||
submit_turn(&test, "please find uses of needle").await?;
|
||||
|
||||
let bodies = recorded_bodies(&server).await?;
|
||||
let tool_output = find_tool_output(&bodies, call_id).expect("tool output present");
|
||||
let payload = tool_output.get("output").expect("output field present");
|
||||
let (content_opt, success_opt) = extract_content_and_success(payload);
|
||||
let content = content_opt.expect("content present");
|
||||
let success = success_opt.unwrap_or(true);
|
||||
assert!(success, "expected success for matches, got {payload:?}");
|
||||
|
||||
let entries = collect_file_names(content);
|
||||
assert_eq!(entries.len(), 2, "content: {content}");
|
||||
assert!(
|
||||
entries.contains("alpha.rs"),
|
||||
"missing alpha.rs in {entries:?}"
|
||||
);
|
||||
assert!(
|
||||
entries.contains("beta.rs"),
|
||||
"missing beta.rs in {entries:?}"
|
||||
);
|
||||
assert!(
|
||||
!entries.contains("gamma.txt"),
|
||||
"txt file should be filtered out: {entries:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn grep_files_tool_reports_empty_results() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_ripgrep_missing!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = build_test_codex(&server).await?;
|
||||
|
||||
let search_dir = test.cwd.path().join("logs");
|
||||
std::fs::create_dir_all(&search_dir)?;
|
||||
std::fs::write(search_dir.join("output.txt"), "no hits here")?;
|
||||
|
||||
let call_id = "grep-files-empty";
|
||||
let arguments = serde_json::json!({
|
||||
"pattern": "needle",
|
||||
"path": search_dir.to_string_lossy(),
|
||||
"limit": 5,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
mount_tool_sequence(&server, call_id, &arguments, "grep_files").await;
|
||||
submit_turn(&test, "search again").await?;
|
||||
|
||||
let bodies = recorded_bodies(&server).await?;
|
||||
let tool_output = find_tool_output(&bodies, call_id).expect("tool output present");
|
||||
let payload = tool_output.get("output").expect("output field present");
|
||||
let (content_opt, success_opt) = extract_content_and_success(payload);
|
||||
let content = content_opt.expect("content present");
|
||||
if let Some(success) = success_opt {
|
||||
assert!(!success, "expected success=false payload: {payload:?}");
|
||||
}
|
||||
assert_eq!(content, "No matches found.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
async fn build_test_codex(server: &wiremock::MockServer) -> Result<TestCodex> {
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.model = MODEL_WITH_TOOL.to_string();
|
||||
config.model_family =
|
||||
find_family_for_model(MODEL_WITH_TOOL).expect("model family for test model");
|
||||
});
|
||||
builder.build(server).await
|
||||
}
|
||||
|
||||
async fn submit_turn(test: &TestCodex, prompt: &str) -> Result<()> {
|
||||
let session_model = test.session_configured.model.clone();
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: prompt.into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: test.cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&test.codex, |event| {
|
||||
matches!(event, EventMsg::TaskComplete(_))
|
||||
})
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn mount_tool_sequence(
|
||||
server: &wiremock::MockServer,
|
||||
call_id: &str,
|
||||
arguments: &str,
|
||||
tool_name: &str,
|
||||
) {
|
||||
let first_response = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, tool_name, arguments),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(server, any(), first_response).await;
|
||||
|
||||
let second_response = sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(server, any(), second_response).await;
|
||||
}
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
async fn recorded_bodies(server: &wiremock::MockServer) -> Result<Vec<Value>> {
|
||||
let requests = server.received_requests().await.expect("requests recorded");
|
||||
Ok(requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn find_tool_output<'a>(requests: &'a [Value], call_id: &str) -> Option<&'a Value> {
|
||||
requests.iter().find_map(|body| {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
&& item.get("call_id").and_then(Value::as_str) == Some(call_id)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn collect_file_names(content: &str) -> HashSet<String> {
|
||||
content
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
if line.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
Path::new(line)
|
||||
.file_name()
|
||||
.map(|name| name.to_string_lossy().into_owned())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn extract_content_and_success(value: &Value) -> (Option<&str>, Option<bool>) {
|
||||
match value {
|
||||
Value::String(text) => (Some(text.as_str()), None),
|
||||
Value::Object(obj) => (
|
||||
obj.get("content").and_then(Value::as_str),
|
||||
obj.get("success").and_then(Value::as_bool),
|
||||
),
|
||||
_ => (None, None),
|
||||
}
|
||||
}
|
||||
460
codex-rs/core/tests/suite/list_dir.rs
Normal file
460
codex-rs/core/tests/suite/list_dir.rs
Normal file
@@ -0,0 +1,460 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::TestCodex;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_returns_entries() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let TestCodex {
|
||||
codex,
|
||||
cwd,
|
||||
session_configured,
|
||||
..
|
||||
} = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = cwd.path().join("sample_dir");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "first file")?;
|
||||
std::fs::create_dir(dir_path.join("nested"))?;
|
||||
let dir_path = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-call";
|
||||
let arguments = serde_json::json!({
|
||||
"dir_path": dir_path,
|
||||
"offset": 1,
|
||||
"limit": 2,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let first_response = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "list_dir", &arguments),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), first_response).await;
|
||||
|
||||
let second_response = sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: "list directory contents".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
assert!(
|
||||
!request_bodies.is_empty(),
|
||||
"expected at least one request body"
|
||||
);
|
||||
|
||||
let tool_output_item = request_bodies
|
||||
.iter()
|
||||
.find_map(|body| {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
})
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
panic!("function_call_output item not found in requests: {request_bodies:#?}")
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
tool_output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
|
||||
let output_text = tool_output_item
|
||||
.get("output")
|
||||
.and_then(|value| match value {
|
||||
Value::String(text) => Some(text.as_str()),
|
||||
Value::Object(obj) => obj.get("content").and_then(Value::as_str),
|
||||
_ => None,
|
||||
})
|
||||
.expect("output text present");
|
||||
assert_eq!(output_text, "E1: [file] alpha.txt\nE2: [dir] nested");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_depth_one_omits_children() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let TestCodex {
|
||||
codex,
|
||||
cwd,
|
||||
session_configured,
|
||||
..
|
||||
} = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = cwd.path().join("depth_one");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
|
||||
std::fs::create_dir(dir_path.join("nested"))?;
|
||||
std::fs::write(dir_path.join("nested").join("beta.txt"), "beta")?;
|
||||
let dir_path = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-depth1";
|
||||
let arguments = serde_json::json!({
|
||||
"dir_path": dir_path,
|
||||
"offset": 1,
|
||||
"limit": 10,
|
||||
"depth": 1,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let first_response = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "list_dir", &arguments),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), first_response).await;
|
||||
|
||||
let second_response = sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: "list directory contents depth one".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
assert!(
|
||||
!request_bodies.is_empty(),
|
||||
"expected at least one request body"
|
||||
);
|
||||
|
||||
let tool_output_item = request_bodies
|
||||
.iter()
|
||||
.find_map(|body| {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
})
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
panic!("function_call_output item not found in requests: {request_bodies:#?}")
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
tool_output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
|
||||
let output_text = tool_output_item
|
||||
.get("output")
|
||||
.and_then(|value| match value {
|
||||
Value::String(text) => Some(text.as_str()),
|
||||
Value::Object(obj) => obj.get("content").and_then(Value::as_str),
|
||||
_ => None,
|
||||
})
|
||||
.expect("output text present");
|
||||
assert_eq!(output_text, "E1: [file] alpha.txt\nE2: [dir] nested");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_depth_two_includes_children_only() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let TestCodex {
|
||||
codex,
|
||||
cwd,
|
||||
session_configured,
|
||||
..
|
||||
} = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = cwd.path().join("depth_two");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
|
||||
let nested = dir_path.join("nested");
|
||||
std::fs::create_dir(&nested)?;
|
||||
std::fs::write(nested.join("beta.txt"), "beta")?;
|
||||
let deeper = nested.join("grand");
|
||||
std::fs::create_dir(&deeper)?;
|
||||
std::fs::write(deeper.join("gamma.txt"), "gamma")?;
|
||||
let dir_path_string = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-depth2";
|
||||
let arguments = serde_json::json!({
|
||||
"dir_path": dir_path_string,
|
||||
"offset": 1,
|
||||
"limit": 10,
|
||||
"depth": 2,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let first_response = sse(vec![
|
||||
serde_json::json!({
|
||||
"type": "response.created",
|
||||
"response": {"id": "resp-1"}
|
||||
}),
|
||||
ev_function_call(call_id, "list_dir", &arguments),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), first_response).await;
|
||||
|
||||
let second_response = sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: "list directory contents depth two".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
assert!(
|
||||
!request_bodies.is_empty(),
|
||||
"expected at least one request body"
|
||||
);
|
||||
|
||||
let tool_output_item = request_bodies
|
||||
.iter()
|
||||
.find_map(|body| {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
})
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
panic!("function_call_output item not found in requests: {request_bodies:#?}")
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
tool_output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
|
||||
let output_text = tool_output_item
|
||||
.get("output")
|
||||
.and_then(|value| match value {
|
||||
Value::String(text) => Some(text.as_str()),
|
||||
Value::Object(obj) => obj.get("content").and_then(Value::as_str),
|
||||
_ => None,
|
||||
})
|
||||
.expect("output text present");
|
||||
assert_eq!(
|
||||
output_text,
|
||||
"E1: [file] alpha.txt\nE2: [dir] nested\nE3: [file] nested/beta.txt\nE4: [dir] nested/grand"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[ignore = "disabled until we enable list_dir tool"]
|
||||
async fn list_dir_tool_depth_three_includes_grandchildren() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let TestCodex {
|
||||
codex,
|
||||
cwd,
|
||||
session_configured,
|
||||
..
|
||||
} = test_codex().build(&server).await?;
|
||||
|
||||
let dir_path = cwd.path().join("depth_three");
|
||||
std::fs::create_dir(&dir_path)?;
|
||||
std::fs::write(dir_path.join("alpha.txt"), "alpha")?;
|
||||
let nested = dir_path.join("nested");
|
||||
std::fs::create_dir(&nested)?;
|
||||
std::fs::write(nested.join("beta.txt"), "beta")?;
|
||||
let deeper = nested.join("grand");
|
||||
std::fs::create_dir(&deeper)?;
|
||||
std::fs::write(deeper.join("gamma.txt"), "gamma")?;
|
||||
let dir_path_string = dir_path.to_string_lossy().to_string();
|
||||
|
||||
let call_id = "list-dir-depth3";
|
||||
let arguments = serde_json::json!({
|
||||
"dir_path": dir_path_string,
|
||||
"offset": 1,
|
||||
"limit": 10,
|
||||
"depth": 3,
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let first_response = sse(vec![
|
||||
serde_json::json!({
|
||||
"type": "response.created",
|
||||
"response": {"id": "resp-1"}
|
||||
}),
|
||||
ev_function_call(call_id, "list_dir", &arguments),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), first_response).await;
|
||||
|
||||
let second_response = sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: "list directory contents depth three".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
assert!(
|
||||
!request_bodies.is_empty(),
|
||||
"expected at least one request body"
|
||||
);
|
||||
|
||||
let tool_output_item = request_bodies
|
||||
.iter()
|
||||
.find_map(|body| {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
})
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
panic!("function_call_output item not found in requests: {request_bodies:#?}")
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
tool_output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
|
||||
let output_text = tool_output_item
|
||||
.get("output")
|
||||
.and_then(|value| match value {
|
||||
Value::String(text) => Some(text.as_str()),
|
||||
Value::Object(obj) => obj.get("content").and_then(Value::as_str),
|
||||
_ => None,
|
||||
})
|
||||
.expect("output text present");
|
||||
assert_eq!(
|
||||
output_text,
|
||||
"E1: [file] alpha.txt\nE2: [dir] nested\nE3: [file] nested/beta.txt\nE4: [dir] nested/grand\nE5: [file] nested/grand/gamma.txt"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -9,7 +9,9 @@ mod compact_resume_fork;
|
||||
mod exec;
|
||||
mod exec_stream_events;
|
||||
mod fork_conversation;
|
||||
mod grep_files;
|
||||
mod json_result;
|
||||
mod list_dir;
|
||||
mod live_cli;
|
||||
mod model_overrides;
|
||||
mod model_tools;
|
||||
|
||||
@@ -4,20 +4,18 @@ use codex_core::CodexAuth;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::wait_for_event;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
fn sse_completed(id: &str) -> String {
|
||||
load_sse_fixture_with_id("tests/fixtures/completed_template.json", id)
|
||||
@@ -44,16 +42,7 @@ async fn collect_tool_identifiers_for_model(model: &str) -> Vec<String> {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let sse = sse_completed(model);
|
||||
let template = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(sse, "text/event-stream");
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(template)
|
||||
.expect(1)
|
||||
.mount(&server)
|
||||
.await;
|
||||
let resp_mock = responses::mount_sse_once_match(&server, wiremock::matchers::any(), sse).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
@@ -68,12 +57,12 @@ async fn collect_tool_identifiers_for_model(model: &str) -> Vec<String> {
|
||||
config.model = model.to_string();
|
||||
config.model_family =
|
||||
find_family_for_model(model).unwrap_or_else(|| panic!("unknown model family for {model}"));
|
||||
config.include_plan_tool = false;
|
||||
config.include_apply_patch_tool = false;
|
||||
config.include_view_image_tool = false;
|
||||
config.tools_web_search_request = false;
|
||||
config.use_experimental_streamable_shell_tool = false;
|
||||
config.use_experimental_unified_exec_tool = false;
|
||||
config.features.disable(Feature::PlanTool);
|
||||
config.features.disable(Feature::ApplyPatchFreeform);
|
||||
config.features.disable(Feature::ViewImageTool);
|
||||
config.features.disable(Feature::WebSearchRequest);
|
||||
config.features.disable(Feature::StreamableShell);
|
||||
config.features.disable(Feature::UnifiedExec);
|
||||
|
||||
let conversation_manager =
|
||||
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
@@ -93,13 +82,7 @@ async fn collect_tool_identifiers_for_model(model: &str) -> Vec<String> {
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.unwrap();
|
||||
assert_eq!(
|
||||
requests.len(),
|
||||
1,
|
||||
"expected a single request for model {model}"
|
||||
);
|
||||
let body = requests[0].body_json::<serde_json::Value>().unwrap();
|
||||
let body = resp_mock.single_request().body_json();
|
||||
tool_identifiers(&body)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ use codex_core::ConversationManager;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::config::OPENAI_DEFAULT_MODEL;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -99,10 +100,10 @@ async fn codex_mini_latest_tools() {
|
||||
config.cwd = cwd.path().to_path_buf();
|
||||
config.model_provider = model_provider;
|
||||
config.user_instructions = Some("be consistent and helpful".to_string());
|
||||
config.features.disable(Feature::ApplyPatchFreeform);
|
||||
|
||||
let conversation_manager =
|
||||
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
config.include_apply_patch_tool = false;
|
||||
config.model = "codex-mini-latest".to_string();
|
||||
config.model_family = find_family_for_model("codex-mini-latest").unwrap();
|
||||
|
||||
@@ -185,7 +186,7 @@ async fn prompt_tools_are_consistent_across_requests() {
|
||||
config.cwd = cwd.path().to_path_buf();
|
||||
config.model_provider = model_provider;
|
||||
config.user_instructions = Some("be consistent and helpful".to_string());
|
||||
config.include_plan_tool = true;
|
||||
config.features.enable(Feature::PlanTool);
|
||||
|
||||
let conversation_manager =
|
||||
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
|
||||
@@ -58,7 +58,7 @@ async fn read_file_tool_returns_requested_lines() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let second_mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -79,36 +79,12 @@ async fn read_file_tool_returns_requested_lines() -> anyhow::Result<()> {
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
assert!(
|
||||
!request_bodies.is_empty(),
|
||||
"expected at least one request body"
|
||||
);
|
||||
|
||||
let tool_output_item = request_bodies
|
||||
.iter()
|
||||
.find_map(|body| {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
})
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
panic!("function_call_output item not found in requests: {request_bodies:#?}")
|
||||
});
|
||||
|
||||
let req = second_mock.single_request();
|
||||
let tool_output_item = req.function_call_output(call_id);
|
||||
assert_eq!(
|
||||
tool_output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
|
||||
let output_text = tool_output_item
|
||||
.get("output")
|
||||
.and_then(|value| match value {
|
||||
|
||||
@@ -445,7 +445,7 @@ async fn review_input_isolated_from_parent_history() {
|
||||
.await;
|
||||
let _complete = wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
// Assert the request `input` contains the environment context followed by the review prompt.
|
||||
// Assert the request `input` contains the environment context followed by the user review prompt.
|
||||
let request = &server.received_requests().await.unwrap()[0];
|
||||
let body = request.body_json::<serde_json::Value>().unwrap();
|
||||
let input = body["input"].as_array().expect("input array");
|
||||
@@ -473,9 +473,14 @@ async fn review_input_isolated_from_parent_history() {
|
||||
assert_eq!(review_msg["role"].as_str().unwrap(), "user");
|
||||
assert_eq!(
|
||||
review_msg["content"][0]["text"].as_str().unwrap(),
|
||||
format!("{REVIEW_PROMPT}\n\n---\n\nNow, here's your task: Please review only this",)
|
||||
review_prompt,
|
||||
"user message should only contain the raw review prompt"
|
||||
);
|
||||
|
||||
// Ensure the REVIEW_PROMPT rubric is sent via instructions.
|
||||
let instructions = body["instructions"].as_str().expect("instructions string");
|
||||
assert_eq!(instructions, REVIEW_PROMPT);
|
||||
|
||||
// Also verify that a user interruption note was recorded in the rollout.
|
||||
codex.submit(Op::GetPath).await.unwrap();
|
||||
let history_event =
|
||||
|
||||
@@ -9,6 +9,7 @@ use std::time::UNIX_EPOCH;
|
||||
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use codex_core::features::Feature;
|
||||
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -74,7 +75,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
|
||||
let fixture = test_codex()
|
||||
.with_config(move |config| {
|
||||
config.use_experimental_use_rmcp_client = true;
|
||||
config.features.enable(Feature::RmcpClient);
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
@@ -86,6 +87,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
expected_env_value.to_string(),
|
||||
)])),
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
@@ -226,14 +228,15 @@ async fn streamable_http_tool_call_round_trip() -> anyhow::Result<()> {
|
||||
|
||||
let fixture = test_codex()
|
||||
.with_config(move |config| {
|
||||
config.use_experimental_use_rmcp_client = true;
|
||||
config.features.enable(Feature::RmcpClient);
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: server_url,
|
||||
bearer_token: None,
|
||||
bearer_token_env_var: None,
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
@@ -406,14 +409,15 @@ async fn streamable_http_with_oauth_round_trip() -> anyhow::Result<()> {
|
||||
|
||||
let fixture = test_codex()
|
||||
.with_config(move |config| {
|
||||
config.use_experimental_use_rmcp_client = true;
|
||||
config.features.enable(Feature::RmcpClient);
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: server_url,
|
||||
bearer_token: None,
|
||||
bearer_token_env_var: None,
|
||||
},
|
||||
enabled: true,
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -9,9 +10,12 @@ use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use core_test_support::assert_regex_match;
|
||||
use core_test_support::responses::ev_apply_patch_function_call;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_custom_tool_call;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_local_shell_call;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::sse;
|
||||
@@ -20,8 +24,11 @@ use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::TestCodex;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use regex_lite::Regex;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
|
||||
async fn submit_turn(test: &TestCodex, prompt: &str, sandbox_policy: SandboxPolicy) -> Result<()> {
|
||||
let session_model = test.session_configured.model.clone();
|
||||
@@ -71,13 +78,28 @@ fn find_function_call_output<'a>(bodies: &'a [Value], call_id: &str) -> Option<&
|
||||
None
|
||||
}
|
||||
|
||||
fn find_custom_tool_call_output<'a>(bodies: &'a [Value], call_id: &str) -> Option<&'a Value> {
|
||||
for body in bodies {
|
||||
if let Some(items) = body.get("input").and_then(Value::as_array) {
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) == Some("custom_tool_call_output")
|
||||
&& item.get("call_id").and_then(Value::as_str) == Some(call_id)
|
||||
{
|
||||
return Some(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_output_stays_json_without_freeform_apply_patch() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = false;
|
||||
config.features.disable(Feature::ApplyPatchFreeform);
|
||||
config.model = "gpt-5".to_string();
|
||||
config.model_family = find_family_for_model("gpt-5").expect("gpt-5 is a model family");
|
||||
});
|
||||
@@ -119,7 +141,12 @@ async fn shell_output_stays_json_without_freeform_apply_patch() -> Result<()> {
|
||||
.and_then(Value::as_str)
|
||||
.expect("shell output string");
|
||||
|
||||
let parsed: Value = serde_json::from_str(output)?;
|
||||
let mut parsed: Value = serde_json::from_str(output)?;
|
||||
if let Some(metadata) = parsed.get_mut("metadata").and_then(Value::as_object_mut) {
|
||||
// duration_seconds is non-deterministic; remove it for deep equality
|
||||
let _ = metadata.remove("duration_seconds");
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
parsed
|
||||
.get("metadata")
|
||||
@@ -143,7 +170,7 @@ async fn shell_output_is_structured_with_freeform_apply_patch() -> Result<()> {
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
config.features.enable(Feature::ApplyPatchFreeform);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -198,6 +225,83 @@ freeform shell
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_output_for_freeform_tool_records_duration() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let sleep_cmd = vec!["/bin/bash", "-c", "sleep 1"];
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let sleep_cmd = vec!["/bin/bash", "-c", "sleep 1"];
|
||||
|
||||
#[cfg(windows)]
|
||||
let sleep_cmd = "timeout 1";
|
||||
|
||||
let call_id = "shell-structured";
|
||||
let args = json!({
|
||||
"command": sleep_cmd,
|
||||
"timeout_ms": 2_000,
|
||||
});
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"run the structured shell command",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_function_call_output(&bodies, call_id).expect("structured output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("structured output string");
|
||||
|
||||
let expected_pattern = r#"(?s)^Exit code: 0
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
$"#;
|
||||
assert_regex_match(expected_pattern, output);
|
||||
|
||||
let wall_time_regex = Regex::new(r"(?m)^Wall (?:time|Clock): ([0-9]+(?:\.[0-9]+)?) seconds$")
|
||||
.expect("compile wall time regex");
|
||||
let wall_time_seconds = wall_time_regex
|
||||
.captures(output)
|
||||
.and_then(|caps| caps.get(1))
|
||||
.and_then(|value| value.as_str().parse::<f32>().ok())
|
||||
.expect("expected structured shell output to contain wall time seconds");
|
||||
assert!(
|
||||
wall_time_seconds > 0.5,
|
||||
"expected wall time to be greater than zero seconds, got {wall_time_seconds}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_output_reserializes_truncated_content() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -213,7 +317,7 @@ async fn shell_output_reserializes_truncated_content() -> Result<()> {
|
||||
let call_id = "shell-truncated";
|
||||
let args = json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 400"],
|
||||
"timeout_ms": 1_000,
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
@@ -275,3 +379,428 @@ $"#;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn apply_patch_custom_tool_output_is_structured() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "apply-patch-structured";
|
||||
let file_name = "structured.txt";
|
||||
let patch = format!(
|
||||
r#"*** Begin Patch
|
||||
*** Add File: {file_name}
|
||||
+from custom tool
|
||||
*** End Patch
|
||||
"#
|
||||
);
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_custom_tool_call(call_id, "apply_patch", &patch),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"apply the patch via custom tool",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_custom_tool_call_output(&bodies, call_id).expect("apply_patch output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("apply_patch output string");
|
||||
|
||||
let expected_pattern = format!(
|
||||
r"(?s)^Exit code: 0
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
Success. Updated the following files:
|
||||
A {file_name}
|
||||
?$"
|
||||
);
|
||||
assert_regex_match(&expected_pattern, output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn apply_patch_custom_tool_call_creates_file() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "apply-patch-add-file";
|
||||
let file_name = "custom_tool_apply_patch.txt";
|
||||
let patch = format!(
|
||||
"*** Begin Patch\n*** Add File: {file_name}\n+custom tool content\n*** End Patch\n"
|
||||
);
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_custom_tool_call(call_id, "apply_patch", &patch),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "apply_patch done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"apply the patch via custom tool to create a file",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_custom_tool_call_output(&bodies, call_id).expect("apply_patch output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("apply_patch output string");
|
||||
|
||||
let expected_pattern = format!(
|
||||
r"(?s)^Exit code: 0
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
Success. Updated the following files:
|
||||
A {file_name}
|
||||
?$"
|
||||
);
|
||||
assert_regex_match(&expected_pattern, output);
|
||||
|
||||
let new_file_path = test.cwd.path().join(file_name);
|
||||
let created_contents = fs::read_to_string(&new_file_path)?;
|
||||
assert_eq!(
|
||||
created_contents, "custom tool content\n",
|
||||
"expected file contents for {file_name}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn apply_patch_custom_tool_call_updates_existing_file() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "apply-patch-update-file";
|
||||
let file_name = "custom_tool_apply_patch_existing.txt";
|
||||
let file_path = test.cwd.path().join(file_name);
|
||||
fs::write(&file_path, "before\n")?;
|
||||
let patch = format!(
|
||||
"*** Begin Patch\n*** Update File: {file_name}\n@@\n-before\n+after\n*** End Patch\n"
|
||||
);
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_custom_tool_call(call_id, "apply_patch", &patch),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "apply_patch update done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"apply the patch via custom tool to update a file",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_custom_tool_call_output(&bodies, call_id).expect("apply_patch output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("apply_patch output string");
|
||||
|
||||
let expected_pattern = format!(
|
||||
r"(?s)^Exit code: 0
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
Success. Updated the following files:
|
||||
M {file_name}
|
||||
?$"
|
||||
);
|
||||
assert_regex_match(&expected_pattern, output);
|
||||
|
||||
let updated_contents = fs::read_to_string(file_path)?;
|
||||
assert_eq!(updated_contents, "after\n", "expected updated file content");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn apply_patch_custom_tool_call_reports_failure_output() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "apply-patch-failure";
|
||||
let missing_file = "missing_custom_tool_apply_patch.txt";
|
||||
let patch = format!(
|
||||
"*** Begin Patch\n*** Update File: {missing_file}\n@@\n-before\n+after\n*** End Patch\n"
|
||||
);
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_custom_tool_call(call_id, "apply_patch", &patch),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "apply_patch failure done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"attempt a failing apply_patch via custom tool",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_custom_tool_call_output(&bodies, call_id).expect("apply_patch output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("apply_patch output string");
|
||||
|
||||
let expected_output = format!(
|
||||
"apply_patch verification failed: Failed to read file to update {}/{missing_file}: No such file or directory (os error 2)",
|
||||
test.cwd.path().to_string_lossy()
|
||||
);
|
||||
assert_eq!(output, expected_output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn apply_patch_function_call_output_is_structured() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "apply-patch-function";
|
||||
let file_name = "function_apply_patch.txt";
|
||||
let patch =
|
||||
format!("*** Begin Patch\n*** Add File: {file_name}\n+via function call\n*** End Patch\n");
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_apply_patch_function_call(call_id, &patch),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "apply_patch function done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"apply the patch via function-call apply_patch",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_function_call_output(&bodies, call_id).expect("apply_patch function output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("apply_patch output string");
|
||||
|
||||
let expected_pattern = format!(
|
||||
r"(?s)^Exit code: 0
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
Success. Updated the following files:
|
||||
A {file_name}
|
||||
?$"
|
||||
);
|
||||
assert_regex_match(&expected_pattern, output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_output_is_structured_for_nonzero_exit() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.model = "gpt-5-codex".to_string();
|
||||
config.model_family =
|
||||
find_family_for_model("gpt-5-codex").expect("gpt-5-codex is a model family");
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-nonzero-exit";
|
||||
let args = json!({
|
||||
"command": ["/bin/sh", "-c", "exit 42"],
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "shell failure handled"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"run the failing shell command",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item = find_function_call_output(&bodies, call_id).expect("shell output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("shell output string");
|
||||
|
||||
let expected_pattern = r"(?s)^Exit code: 42
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
?$";
|
||||
assert_regex_match(expected_pattern, output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn local_shell_call_output_is_structured() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.model = "gpt-5-codex".to_string();
|
||||
config.model_family =
|
||||
find_family_for_model("gpt-5-codex").expect("gpt-5-codex is a model family");
|
||||
config.include_apply_patch_tool = true;
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "local-shell-call";
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_local_shell_call(call_id, "completed", vec!["/bin/echo", "local shell"]),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "local shell done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"run the local shell command",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("recorded requests present");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let output_item =
|
||||
find_function_call_output(&bodies, call_id).expect("local shell output present");
|
||||
let output = output_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("local shell output string");
|
||||
|
||||
let expected_pattern = r"(?s)^Exit code: 0
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
local shell
|
||||
?$";
|
||||
assert_regex_match(expected_pattern, output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use std::fs;
|
||||
|
||||
use assert_matches::assert_matches;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -27,16 +30,6 @@ use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
fn function_call_output(body: &Value) -> Option<&Value> {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_output_text(item: &Value) -> Option<&str> {
|
||||
item.get("output").and_then(|value| match value {
|
||||
Value::String(text) => Some(text.as_str()),
|
||||
@@ -45,12 +38,6 @@ fn extract_output_text(item: &Value) -> Option<&str> {
|
||||
})
|
||||
}
|
||||
|
||||
fn find_request_with_function_call_output(requests: &[Value]) -> Option<&Value> {
|
||||
requests
|
||||
.iter()
|
||||
.find(|body| function_call_output(body).is_some())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_tool_executes_command_and_streams_output() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -81,7 +68,7 @@ async fn shell_tool_executes_command_and_streams_output() -> anyhow::Result<()>
|
||||
ev_assistant_message("msg-1", "all done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let second_mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -102,18 +89,9 @@ async fn shell_tool_executes_command_and_streams_output() -> anyhow::Result<()>
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let req = second_mock.single_request();
|
||||
let output_item = req.function_call_output(call_id);
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
let exec_output: Value = serde_json::from_str(output_text)?;
|
||||
assert_eq!(exec_output["metadata"]["exit_code"], 0);
|
||||
let stdout = exec_output["output"].as_str().expect("stdout field");
|
||||
@@ -129,7 +107,7 @@ async fn update_plan_tool_emits_plan_update_event() -> anyhow::Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_plan_tool = true;
|
||||
config.features.enable(Feature::PlanTool);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
@@ -159,7 +137,7 @@ async fn update_plan_tool_emits_plan_update_event() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "plan acknowledged"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let second_mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -197,22 +175,13 @@ async fn update_plan_tool_emits_plan_update_event() -> anyhow::Result<()> {
|
||||
|
||||
assert!(saw_plan_update, "expected PlanUpdate event");
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let req = second_mock.single_request();
|
||||
let output_item = req.function_call_output(call_id);
|
||||
assert_eq!(
|
||||
output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
assert_eq!(output_text, "Plan updated");
|
||||
|
||||
Ok(())
|
||||
@@ -225,7 +194,7 @@ async fn update_plan_tool_rejects_malformed_payload() -> anyhow::Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_plan_tool = true;
|
||||
config.features.enable(Feature::PlanTool);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
@@ -251,7 +220,7 @@ async fn update_plan_tool_rejects_malformed_payload() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "malformed plan payload"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let second_mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -286,22 +255,13 @@ async fn update_plan_tool_rejects_malformed_payload() -> anyhow::Result<()> {
|
||||
"did not expect PlanUpdate event for malformed payload"
|
||||
);
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let req = second_mock.single_request();
|
||||
let output_item = req.function_call_output(call_id);
|
||||
assert_eq!(
|
||||
output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
assert!(
|
||||
output_text.contains("failed to parse function arguments"),
|
||||
"expected parse error message in output text, got {output_text:?}"
|
||||
@@ -328,7 +288,7 @@ async fn apply_patch_tool_executes_and_emits_patch_events() -> anyhow::Result<()
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
config.features.enable(Feature::ApplyPatchFreeform);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
@@ -337,15 +297,19 @@ async fn apply_patch_tool_executes_and_emits_patch_events() -> anyhow::Result<()
|
||||
..
|
||||
} = builder.build(&server).await?;
|
||||
|
||||
let file_name = "notes.txt";
|
||||
let file_path = cwd.path().join(file_name);
|
||||
let call_id = "apply-patch-call";
|
||||
let patch_content = r#"*** Begin Patch
|
||||
*** Add File: notes.txt
|
||||
let patch_content = format!(
|
||||
r#"*** Begin Patch
|
||||
*** Add File: {file_name}
|
||||
+Tool harness apply patch
|
||||
*** End Patch"#;
|
||||
*** End Patch"#
|
||||
);
|
||||
|
||||
let first_response = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_apply_patch_function_call(call_id, patch_content),
|
||||
ev_apply_patch_function_call(call_id, &patch_content),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), first_response).await;
|
||||
@@ -354,7 +318,7 @@ async fn apply_patch_tool_executes_and_emits_patch_events() -> anyhow::Result<()
|
||||
ev_assistant_message("msg-1", "patch complete"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let second_mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -394,56 +358,31 @@ async fn apply_patch_tool_executes_and_emits_patch_events() -> anyhow::Result<()
|
||||
assert!(saw_patch_begin, "expected PatchApplyBegin event");
|
||||
let patch_end_success =
|
||||
patch_end_success.expect("expected PatchApplyEnd event to capture success flag");
|
||||
assert!(patch_end_success);
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let req = second_mock.single_request();
|
||||
let output_item = req.function_call_output(call_id);
|
||||
assert_eq!(
|
||||
output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
|
||||
if let Ok(exec_output) = serde_json::from_str::<Value>(output_text) {
|
||||
let exit_code = exec_output["metadata"]["exit_code"]
|
||||
.as_i64()
|
||||
.expect("exit_code present");
|
||||
let summary = exec_output["output"].as_str().expect("output field");
|
||||
assert_eq!(
|
||||
exit_code, 0,
|
||||
"expected apply_patch exit_code=0, got {exit_code}, summary: {summary:?}"
|
||||
);
|
||||
assert!(
|
||||
patch_end_success,
|
||||
"expected PatchApplyEnd success flag, summary: {summary:?}"
|
||||
);
|
||||
assert!(
|
||||
summary.contains("Success."),
|
||||
"expected apply_patch summary to note success, got {summary:?}"
|
||||
);
|
||||
let expected_pattern = format!(
|
||||
r"(?s)^Exit code: 0
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Output:
|
||||
Success. Updated the following files:
|
||||
A {file_name}
|
||||
?$"
|
||||
);
|
||||
assert_regex_match(&expected_pattern, output_text);
|
||||
|
||||
let patched_path = cwd.path().join("notes.txt");
|
||||
let contents = std::fs::read_to_string(&patched_path)
|
||||
.unwrap_or_else(|e| panic!("failed reading {}: {e}", patched_path.display()));
|
||||
assert_eq!(contents, "Tool harness apply patch\n");
|
||||
} else {
|
||||
assert!(
|
||||
output_text.contains("codex-run-as-apply-patch"),
|
||||
"expected apply_patch failure message to mention codex-run-as-apply-patch, got {output_text:?}"
|
||||
);
|
||||
assert!(
|
||||
!patch_end_success,
|
||||
"expected PatchApplyEnd to report success=false when apply_patch invocation fails"
|
||||
);
|
||||
}
|
||||
let updated_contents = fs::read_to_string(file_path)?;
|
||||
assert_eq!(
|
||||
updated_contents, "Tool harness apply patch\n",
|
||||
"expected updated file content"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -455,7 +394,7 @@ async fn apply_patch_reports_parse_diagnostics() -> anyhow::Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
config.features.enable(Feature::ApplyPatchFreeform);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
@@ -480,7 +419,7 @@ async fn apply_patch_reports_parse_diagnostics() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "failed"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let second_mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -501,22 +440,13 @@ async fn apply_patch_reports_parse_diagnostics() -> anyhow::Result<()> {
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let req = second_mock.single_request();
|
||||
let output_item = req.function_call_output(call_id);
|
||||
assert_eq!(
|
||||
output_item.get("call_id").and_then(Value::as_str),
|
||||
Some(call_id)
|
||||
);
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
|
||||
assert!(
|
||||
output_text.contains("apply_patch verification failed"),
|
||||
|
||||
@@ -63,8 +63,9 @@ async fn build_codex_with_test_tool(server: &wiremock::MockServer) -> anyhow::Re
|
||||
}
|
||||
|
||||
fn assert_parallel_duration(actual: Duration) {
|
||||
// Allow headroom for runtime overhead while still differentiating from serial execution.
|
||||
assert!(
|
||||
actual < Duration::from_millis(500),
|
||||
actual < Duration::from_millis(750),
|
||||
"expected parallel execution to finish quickly, got {actual:?}"
|
||||
);
|
||||
}
|
||||
@@ -83,6 +84,16 @@ async fn read_file_tools_run_in_parallel() -> anyhow::Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let test = build_codex_with_test_tool(&server).await?;
|
||||
|
||||
let warmup_args = json!({
|
||||
"sleep_after_ms": 10,
|
||||
"barrier": {
|
||||
"id": "parallel-test-sync-warmup",
|
||||
"participants": 2,
|
||||
"timeout_ms": 1_000,
|
||||
}
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let parallel_args = json!({
|
||||
"sleep_after_ms": 300,
|
||||
"barrier": {
|
||||
@@ -93,6 +104,17 @@ async fn read_file_tools_run_in_parallel() -> anyhow::Result<()> {
|
||||
})
|
||||
.to_string();
|
||||
|
||||
let warmup_first = sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-warm-1"}}),
|
||||
ev_function_call("warm-call-1", "test_sync_tool", &warmup_args),
|
||||
ev_function_call("warm-call-2", "test_sync_tool", &warmup_args),
|
||||
ev_completed("resp-warm-1"),
|
||||
]);
|
||||
let warmup_second = sse(vec![
|
||||
ev_assistant_message("warm-msg-1", "warmup complete"),
|
||||
ev_completed("resp-warm-2"),
|
||||
]);
|
||||
|
||||
let first_response = sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_function_call("call-1", "test_sync_tool", ¶llel_args),
|
||||
@@ -103,7 +125,13 @@ async fn read_file_tools_run_in_parallel() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
mount_sse_sequence(&server, vec![first_response, second_response]).await;
|
||||
mount_sse_sequence(
|
||||
&server,
|
||||
vec![warmup_first, warmup_second, first_response, second_response],
|
||||
)
|
||||
.await;
|
||||
|
||||
run_turn(&test, "warm up parallel tool").await?;
|
||||
|
||||
let duration = run_turn_and_measure(&test, "exercise sync tool").await?;
|
||||
assert_parallel_duration(duration);
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -15,6 +16,7 @@ use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_custom_tool_call;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
@@ -25,7 +27,6 @@ use core_test_support::wait_for_event;
|
||||
use regex_lite::Regex;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use wiremock::Request;
|
||||
|
||||
async fn submit_turn(
|
||||
test: &TestCodex,
|
||||
@@ -58,27 +59,6 @@ async fn submit_turn(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn request_bodies(requests: &[Request]) -> Result<Vec<Value>> {
|
||||
requests
|
||||
.iter()
|
||||
.map(|req| Ok(serde_json::from_slice::<Value>(&req.body)?))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn collect_output_items<'a>(bodies: &'a [Value], ty: &str) -> Vec<&'a Value> {
|
||||
let mut out = Vec::new();
|
||||
for body in bodies {
|
||||
if let Some(items) = body.get("input").and_then(Value::as_array) {
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) == Some(ty) {
|
||||
out.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn tool_names(body: &Value) -> Vec<String> {
|
||||
body.get("tools")
|
||||
.and_then(Value::as_array)
|
||||
@@ -107,18 +87,23 @@ async fn custom_tool_unknown_returns_custom_output_error() -> Result<()> {
|
||||
let call_id = "custom-unsupported";
|
||||
let tool_name = "unsupported_tool";
|
||||
|
||||
let responses = vec![
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_custom_tool_call(call_id, tool_name, "\"payload\""),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
let mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
)
|
||||
.await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
@@ -128,13 +113,7 @@ async fn custom_tool_unknown_returns_custom_output_error() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let custom_items = collect_output_items(&bodies, "custom_tool_call_output");
|
||||
assert_eq!(custom_items.len(), 1, "expected single custom tool output");
|
||||
let item = custom_items[0];
|
||||
assert_eq!(item.get("call_id").and_then(Value::as_str), Some(call_id));
|
||||
|
||||
let item = mock.single_request().custom_tool_call_output(call_id);
|
||||
let output = item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
@@ -170,7 +149,8 @@ async fn shell_escalated_permissions_rejected_then_ok() -> Result<()> {
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(
|
||||
@@ -180,6 +160,10 @@ async fn shell_escalated_permissions_rejected_then_ok() -> Result<()> {
|
||||
),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
let second_mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-2"),
|
||||
ev_function_call(
|
||||
@@ -189,12 +173,16 @@ async fn shell_escalated_permissions_rejected_then_ok() -> Result<()> {
|
||||
),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
let third_mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-3"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
)
|
||||
.await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
@@ -204,46 +192,23 @@ async fn shell_escalated_permissions_rejected_then_ok() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let function_outputs = collect_output_items(&bodies, "function_call_output");
|
||||
for item in &function_outputs {
|
||||
let call_id = item
|
||||
.get("call_id")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or_default();
|
||||
assert!(
|
||||
call_id == call_id_blocked || call_id == call_id_success,
|
||||
"unexpected call id {call_id}"
|
||||
);
|
||||
}
|
||||
|
||||
let policy = AskForApproval::Never;
|
||||
let expected_message = format!(
|
||||
"approval policy is {policy:?}; reject command — you should not ask for escalated permissions if the approval policy is {policy:?}"
|
||||
);
|
||||
|
||||
let blocked_outputs: Vec<&Value> = function_outputs
|
||||
.iter()
|
||||
.filter(|item| item.get("call_id").and_then(Value::as_str) == Some(call_id_blocked))
|
||||
.copied()
|
||||
.collect();
|
||||
assert!(
|
||||
!blocked_outputs.is_empty(),
|
||||
"expected at least one rejection output for {call_id_blocked}"
|
||||
let blocked_item = second_mock
|
||||
.single_request()
|
||||
.function_call_output(call_id_blocked);
|
||||
assert_eq!(
|
||||
blocked_item.get("output").and_then(Value::as_str),
|
||||
Some(expected_message.as_str()),
|
||||
"unexpected rejection message"
|
||||
);
|
||||
for item in blocked_outputs {
|
||||
assert_eq!(
|
||||
item.get("output").and_then(Value::as_str),
|
||||
Some(expected_message.as_str()),
|
||||
"unexpected rejection message"
|
||||
);
|
||||
}
|
||||
|
||||
let success_item = function_outputs
|
||||
.iter()
|
||||
.find(|item| item.get("call_id").and_then(Value::as_str) == Some(call_id_success))
|
||||
.expect("success output present");
|
||||
let success_item = third_mock
|
||||
.single_request()
|
||||
.function_call_output(call_id_success);
|
||||
let output_json: Value = serde_json::from_str(
|
||||
success_item
|
||||
.get("output")
|
||||
@@ -282,18 +247,23 @@ async fn local_shell_missing_ids_maps_to_function_output_error() -> Result<()> {
|
||||
}
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
local_shell_event,
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
let second_mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
)
|
||||
.await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
@@ -303,15 +273,7 @@ async fn local_shell_missing_ids_maps_to_function_output_error() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let function_outputs = collect_output_items(&bodies, "function_call_output");
|
||||
assert_eq!(
|
||||
function_outputs.len(),
|
||||
1,
|
||||
"expected a single function output"
|
||||
);
|
||||
let item = function_outputs[0];
|
||||
let item = second_mock.single_request().function_call_output("");
|
||||
assert_eq!(item.get("call_id").and_then(Value::as_str), Some(""));
|
||||
assert_eq!(
|
||||
item.get("output").and_then(Value::as_str),
|
||||
@@ -329,10 +291,14 @@ async fn collect_tools(use_unified_exec: bool) -> Result<Vec<String>> {
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-1"),
|
||||
])];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
let mock = mount_sse_sequence(&server, responses).await;
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.use_experimental_unified_exec_tool = use_unified_exec;
|
||||
if use_unified_exec {
|
||||
config.features.enable(Feature::UnifiedExec);
|
||||
} else {
|
||||
config.features.disable(Feature::UnifiedExec);
|
||||
}
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -344,15 +310,8 @@ async fn collect_tools(use_unified_exec: bool) -> Result<Vec<String>> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert_eq!(
|
||||
requests.len(),
|
||||
1,
|
||||
"expected a single request for tools collection"
|
||||
);
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let first_body = bodies.first().expect("request body present");
|
||||
Ok(tool_names(first_body))
|
||||
let first_body = mock.single_request().body_json();
|
||||
Ok(tool_names(&first_body))
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
@@ -392,18 +351,23 @@ async fn shell_timeout_includes_timeout_prefix_and_metadata() -> Result<()> {
|
||||
"timeout_ms": timeout_ms,
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
let second_mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
)
|
||||
.await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
@@ -413,13 +377,7 @@ async fn shell_timeout_includes_timeout_prefix_and_metadata() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let function_outputs = collect_output_items(&bodies, "function_call_output");
|
||||
let timeout_item = function_outputs
|
||||
.iter()
|
||||
.find(|item| item.get("call_id").and_then(Value::as_str) == Some(call_id))
|
||||
.expect("timeout output present");
|
||||
let timeout_item = second_mock.single_request().function_call_output(call_id);
|
||||
|
||||
let output_str = timeout_item
|
||||
.get("output")
|
||||
@@ -437,19 +395,9 @@ async fn shell_timeout_includes_timeout_prefix_and_metadata() -> Result<()> {
|
||||
);
|
||||
|
||||
let stdout = output_json["output"].as_str().unwrap_or_default();
|
||||
let timeout_pattern = r"(?s)^Total output lines: \d+
|
||||
|
||||
command timed out after (?P<ms>\d+) milliseconds
|
||||
line
|
||||
.*$";
|
||||
let captures = assert_regex_match(timeout_pattern, stdout);
|
||||
let duration_ms = captures
|
||||
.name("ms")
|
||||
.and_then(|m| m.as_str().parse::<u64>().ok())
|
||||
.unwrap_or_default();
|
||||
assert!(
|
||||
duration_ms >= timeout_ms,
|
||||
"expected duration >= configured timeout, got {duration_ms} (timeout {timeout_ms})"
|
||||
stdout.contains("command timed out"),
|
||||
"timeout output missing `command timed out`: {stdout}"
|
||||
);
|
||||
} else {
|
||||
// Fallback: accept the signal classification path to deflake the test.
|
||||
@@ -460,80 +408,6 @@ line
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_sandbox_denied_truncates_error_output() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex();
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-denied";
|
||||
let long_line = "this is a long stderr line that should trigger truncation 0123456789abcdefghijklmnopqrstuvwxyz";
|
||||
let script = format!(
|
||||
"for i in $(seq 1 500); do >&2 echo '{long_line}'; done; cat <<'EOF' > denied.txt\ncontent\nEOF",
|
||||
);
|
||||
let args = json!({
|
||||
"command": ["/bin/sh", "-c", script],
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
"attempt to write in read-only sandbox",
|
||||
AskForApproval::Never,
|
||||
SandboxPolicy::ReadOnly,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let function_outputs = collect_output_items(&bodies, "function_call_output");
|
||||
let denied_item = function_outputs
|
||||
.iter()
|
||||
.find(|item| item.get("call_id").and_then(Value::as_str) == Some(call_id))
|
||||
.expect("denied output present");
|
||||
|
||||
let output = denied_item
|
||||
.get("output")
|
||||
.and_then(Value::as_str)
|
||||
.expect("denied output string");
|
||||
|
||||
let sandbox_pattern = r#"(?s)^Exit code: -?\d+
|
||||
Wall time: [0-9]+(?:\.[0-9]+)? seconds
|
||||
Total output lines: \d+
|
||||
Output:
|
||||
|
||||
failed in sandbox: .*?(?:Operation not permitted|Permission denied|Read-only file system).*?
|
||||
\[\.{3} omitted \d+ of \d+ lines \.{3}\]
|
||||
.*this is a long stderr line that should trigger truncation 0123456789abcdefghijklmnopqrstuvwxyz.*
|
||||
\n?$"#;
|
||||
let sandbox_regex = Regex::new(sandbox_pattern)?;
|
||||
if !sandbox_regex.is_match(output) {
|
||||
let fallback_pattern = r#"(?s)^Total output lines: \d+
|
||||
|
||||
failed in sandbox: this is a long stderr line that should trigger truncation 0123456789abcdefghijklmnopqrstuvwxyz
|
||||
.*this is a long stderr line that should trigger truncation 0123456789abcdefghijklmnopqrstuvwxyz.*
|
||||
.*(?:Operation not permitted|Permission denied|Read-only file system).*$"#;
|
||||
assert_regex_match(fallback_pattern, output);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_spawn_failure_truncates_exec_error() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -558,18 +432,23 @@ async fn shell_spawn_failure_truncates_exec_error() -> Result<()> {
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
let second_mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
)
|
||||
.await;
|
||||
|
||||
submit_turn(
|
||||
&test,
|
||||
@@ -579,13 +458,7 @@ async fn shell_spawn_failure_truncates_exec_error() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
let bodies = request_bodies(&requests)?;
|
||||
let function_outputs = collect_output_items(&bodies, "function_call_output");
|
||||
let failure_item = function_outputs
|
||||
.iter()
|
||||
.find(|item| item.get("call_id").and_then(Value::as_str) == Some(call_id))
|
||||
.expect("spawn failure output present");
|
||||
let failure_item = second_mock.single_request().function_call_output(call_id);
|
||||
|
||||
let output = failure_item
|
||||
.get("output")
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
@@ -42,7 +43,13 @@ fn collect_tool_outputs(bodies: &[Value]) -> Result<HashMap<String, Value>> {
|
||||
if let Some(call_id) = item.get("call_id").and_then(Value::as_str) {
|
||||
let content = extract_output_text(item)
|
||||
.ok_or_else(|| anyhow::anyhow!("missing tool output content"))?;
|
||||
let parsed: Value = serde_json::from_str(content)?;
|
||||
let trimmed = content.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let parsed: Value = serde_json::from_str(trimmed).map_err(|err| {
|
||||
anyhow::anyhow!("failed to parse tool output content {trimmed:?}: {err}")
|
||||
})?;
|
||||
outputs.insert(call_id.to_string(), parsed);
|
||||
}
|
||||
}
|
||||
@@ -59,7 +66,7 @@ async fn unified_exec_reuses_session_via_stdin() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.use_experimental_unified_exec_tool = true;
|
||||
config.features.enable(Feature::UnifiedExec);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
@@ -168,7 +175,7 @@ async fn unified_exec_reuses_session_via_stdin() -> Result<()> {
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn unified_exec_timeout_and_followup_poll() -> Result<()> {
|
||||
async fn unified_exec_streams_after_lagged_output() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_sandbox!(Ok(()));
|
||||
|
||||
@@ -176,6 +183,132 @@ async fn unified_exec_timeout_and_followup_poll() -> Result<()> {
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.use_experimental_unified_exec_tool = true;
|
||||
config.features.enable(Feature::UnifiedExec);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
cwd,
|
||||
session_configured,
|
||||
..
|
||||
} = builder.build(&server).await?;
|
||||
|
||||
let script = r#"python3 - <<'PY'
|
||||
import sys
|
||||
import time
|
||||
|
||||
chunk = b'x' * (1 << 20)
|
||||
for _ in range(4):
|
||||
sys.stdout.buffer.write(chunk)
|
||||
sys.stdout.flush()
|
||||
|
||||
time.sleep(0.2)
|
||||
for _ in range(5):
|
||||
sys.stdout.write("TAIL-MARKER\n")
|
||||
sys.stdout.flush()
|
||||
time.sleep(0.05)
|
||||
|
||||
time.sleep(0.2)
|
||||
PY
|
||||
"#;
|
||||
|
||||
let first_call_id = "uexec-lag-start";
|
||||
let first_args = serde_json::json!({
|
||||
"input": ["/bin/sh", "-c", script],
|
||||
"timeout_ms": 25,
|
||||
});
|
||||
|
||||
let second_call_id = "uexec-lag-poll";
|
||||
let second_args = serde_json::json!({
|
||||
"input": Vec::<String>::new(),
|
||||
"session_id": "0",
|
||||
"timeout_ms": 2_000,
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(
|
||||
first_call_id,
|
||||
"unified_exec",
|
||||
&serde_json::to_string(&first_args)?,
|
||||
),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_response_created("resp-2"),
|
||||
ev_function_call(
|
||||
second_call_id,
|
||||
"unified_exec",
|
||||
&serde_json::to_string(&second_args)?,
|
||||
),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "lag handled"),
|
||||
ev_completed("resp-3"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: "exercise lag handling".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let outputs = collect_tool_outputs(&bodies)?;
|
||||
|
||||
let start_output = outputs
|
||||
.get(first_call_id)
|
||||
.expect("missing initial unified_exec output");
|
||||
let session_id = start_output["session_id"].as_str().unwrap_or_default();
|
||||
assert!(
|
||||
!session_id.is_empty(),
|
||||
"expected session id from initial unified_exec response"
|
||||
);
|
||||
|
||||
let poll_output = outputs
|
||||
.get(second_call_id)
|
||||
.expect("missing poll unified_exec output");
|
||||
let poll_text = poll_output["output"].as_str().unwrap_or_default();
|
||||
assert!(
|
||||
poll_text.contains("TAIL-MARKER"),
|
||||
"expected poll output to contain tail marker, got {poll_text:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn unified_exec_timeout_and_followup_poll() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_sandbox!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.enable(Feature::UnifiedExec);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::os::unix::fs::PermissionsExt;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use core_test_support::fs_wait;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::TestCodex;
|
||||
@@ -17,8 +18,7 @@ use responses::ev_assistant_message;
|
||||
use responses::ev_completed;
|
||||
use responses::sse;
|
||||
use responses::start_mock_server;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::sleep;
|
||||
use std::time::Duration;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn summarize_context_three_requests_and_instructions() -> anyhow::Result<()> {
|
||||
@@ -60,14 +60,7 @@ echo -n "${@: -1}" > $(dirname "${0}")/notify.txt"#,
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
// We fork the notify script, so we need to wait for it to write to the file.
|
||||
for _ in 0..100u32 {
|
||||
if notify_file.exists() {
|
||||
break;
|
||||
}
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
assert!(notify_file.exists());
|
||||
fs_wait::wait_for_path_exists(¬ify_file, Duration::from_secs(5)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -22,16 +22,6 @@ use core_test_support::wait_for_event;
|
||||
use serde_json::Value;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
fn function_call_output(body: &Value) -> Option<&Value> {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
.and_then(|items| {
|
||||
items.iter().find(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn find_image_message(body: &Value) -> Option<&Value> {
|
||||
body.get("input")
|
||||
.and_then(Value::as_array)
|
||||
@@ -59,12 +49,6 @@ fn extract_output_text(item: &Value) -> Option<&str> {
|
||||
})
|
||||
}
|
||||
|
||||
fn find_request_with_function_call_output(requests: &[Value]) -> Option<&Value> {
|
||||
requests
|
||||
.iter()
|
||||
.find(|body| function_call_output(body).is_some())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -100,7 +84,7 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -137,25 +121,14 @@ async fn view_image_tool_attaches_local_image() -> anyhow::Result<()> {
|
||||
assert_eq!(tool_event.call_id, call_id);
|
||||
assert_eq!(tool_event.path, abs_path);
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(
|
||||
requests.len() >= 2,
|
||||
"expected at least two POST requests, got {}",
|
||||
requests.len()
|
||||
);
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
let body = mock.single_request().body_json();
|
||||
let output_item = mock.single_request().function_call_output(call_id);
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
assert_eq!(output_text, "attached local image path");
|
||||
|
||||
let image_message = find_image_message(body_with_tool_output)
|
||||
.expect("pending input image message not included in request");
|
||||
let image_message =
|
||||
find_image_message(&body).expect("pending input image message not included in request");
|
||||
let image_url = image_message
|
||||
.get("content")
|
||||
.and_then(Value::as_array)
|
||||
@@ -210,7 +183,7 @@ async fn view_image_tool_errors_when_path_is_directory() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -231,26 +204,14 @@ async fn view_image_tool_errors_when_path_is_directory() -> anyhow::Result<()> {
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(
|
||||
requests.len() >= 2,
|
||||
"expected at least two POST requests, got {}",
|
||||
requests.len()
|
||||
);
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let body_with_tool_output = mock.single_request().body_json();
|
||||
let output_item = mock.single_request().function_call_output(call_id);
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
let expected_message = format!("image path `{}` is not a file", abs_path.display());
|
||||
assert_eq!(output_text, expected_message);
|
||||
|
||||
assert!(
|
||||
find_image_message(body_with_tool_output).is_none(),
|
||||
find_image_message(&body_with_tool_output).is_none(),
|
||||
"directory path should not produce an input_image message"
|
||||
);
|
||||
|
||||
@@ -287,7 +248,7 @@ async fn view_image_tool_errors_when_file_missing() -> anyhow::Result<()> {
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
let mock = responses::mount_sse_once_match(&server, any(), second_response).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
@@ -308,21 +269,9 @@ async fn view_image_tool_errors_when_file_missing() -> anyhow::Result<()> {
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(
|
||||
requests.len() >= 2,
|
||||
"expected at least two POST requests, got {}",
|
||||
requests.len()
|
||||
);
|
||||
let request_bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let body_with_tool_output = find_request_with_function_call_output(&request_bodies)
|
||||
.expect("function_call_output item not found in requests");
|
||||
let output_item = function_call_output(body_with_tool_output).expect("tool output item");
|
||||
let output_text = extract_output_text(output_item).expect("output text present");
|
||||
let body_with_tool_output = mock.single_request().body_json();
|
||||
let output_item = mock.single_request().function_call_output(call_id);
|
||||
let output_text = extract_output_text(&output_item).expect("output text present");
|
||||
let expected_prefix = format!("unable to locate image at `{}`:", abs_path.display());
|
||||
assert!(
|
||||
output_text.starts_with(&expected_prefix),
|
||||
@@ -330,7 +279,7 @@ async fn view_image_tool_errors_when_file_missing() -> anyhow::Result<()> {
|
||||
);
|
||||
|
||||
assert!(
|
||||
find_image_message(body_with_tool_output).is_none(),
|
||||
find_image_message(&body_with_tool_output).is_none(),
|
||||
"missing file should not produce an input_image message"
|
||||
);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Codex MCP Interface [experimental]
|
||||
# Codex MCP Server Interface [experimental]
|
||||
|
||||
This document describes Codex’s experimental MCP interface: a JSON‑RPC API that runs over the Model Context Protocol (MCP) transport to control a local Codex engine.
|
||||
This document describes Codex’s experimental MCP server interface: a JSON‑RPC API that runs over the Model Context Protocol (MCP) transport to control a local Codex engine.
|
||||
|
||||
- Status: experimental and subject to change without notice
|
||||
- Server binary: `codex mcp-server` (or `codex-mcp-server`)
|
||||
|
||||
@@ -77,7 +77,7 @@ pub struct Cli {
|
||||
|
||||
/// Initial instructions for the agent. If not provided as an argument (or
|
||||
/// if `-` is used), instructions are read from stdin.
|
||||
#[arg(value_name = "PROMPT")]
|
||||
#[arg(value_name = "PROMPT", value_hint = clap::ValueHint::Other)]
|
||||
pub prompt: Option<String>,
|
||||
}
|
||||
|
||||
@@ -99,7 +99,7 @@ pub struct ResumeArgs {
|
||||
pub last: bool,
|
||||
|
||||
/// Prompt to send after resuming the session. If `-` is used, read from stdin.
|
||||
#[arg(value_name = "PROMPT")]
|
||||
#[arg(value_name = "PROMPT", value_hint = clap::ValueHint::Other)]
|
||||
pub prompt: Option<String>,
|
||||
}
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ use codex_core::default_client::set_default_originator;
|
||||
use codex_core::find_conversation_path_by_id_str;
|
||||
|
||||
pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
if let Err(err) = set_default_originator("codex_exec") {
|
||||
if let Err(err) = set_default_originator("codex_exec".to_string()) {
|
||||
tracing::warn!(?err, "Failed to set codex exec originator override {err:?}");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// Aggregates all former standalone integration tests as modules.
|
||||
mod apply_patch;
|
||||
mod auth_env;
|
||||
mod originator;
|
||||
mod output_schema;
|
||||
mod resume;
|
||||
mod sandbox;
|
||||
|
||||
52
codex-rs/exec/tests/suite/originator.rs
Normal file
52
codex-rs/exec/tests/suite/originator.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use core_test_support::responses;
|
||||
use core_test_support::test_codex_exec::test_codex_exec;
|
||||
use wiremock::matchers::header;
|
||||
|
||||
/// Verify that when the server reports an error, `codex-exec` exits with a
|
||||
/// non-zero status code so automation can detect failures.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn send_codex_exec_originator() -> anyhow::Result<()> {
|
||||
let test = test_codex_exec();
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("response_1"),
|
||||
responses::ev_assistant_message("response_1", "Hello, world!"),
|
||||
responses::ev_completed("response_1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, header("Originator", "codex_exec"), body).await;
|
||||
|
||||
test.cmd_with_server(&server)
|
||||
.arg("--skip-git-repo-check")
|
||||
.arg("tell me something")
|
||||
.assert()
|
||||
.code(0);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn supports_originator_override() -> anyhow::Result<()> {
|
||||
let test = test_codex_exec();
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("response_1"),
|
||||
responses::ev_assistant_message("response_1", "Hello, world!"),
|
||||
responses::ev_completed("response_1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, header("Originator", "codex_exec_override"), body)
|
||||
.await;
|
||||
|
||||
test.cmd_with_server(&server)
|
||||
.env("CODEX_INTERNAL_ORIGINATOR_OVERRIDE", "codex_exec_override")
|
||||
.arg("--skip-git-repo-check")
|
||||
.arg("tell me something")
|
||||
.assert()
|
||||
.code(0);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -28,7 +28,7 @@ async fn exec_includes_output_schema_in_request() -> anyhow::Result<()> {
|
||||
responses::ev_assistant_message("m1", "fixture hello"),
|
||||
responses::ev_completed("resp1"),
|
||||
]);
|
||||
responses::mount_sse_once_match(&server, any(), body).await;
|
||||
let response_mock = responses::mount_sse_once_match(&server, any(), body).await;
|
||||
|
||||
test.cmd_with_server(&server)
|
||||
.arg("--skip-git-repo-check")
|
||||
@@ -43,12 +43,8 @@ async fn exec_includes_output_schema_in_request() -> anyhow::Result<()> {
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("failed to capture requests");
|
||||
assert_eq!(requests.len(), 1, "expected exactly one request");
|
||||
let payload: Value = serde_json::from_slice(&requests[0].body)?;
|
||||
let request = response_mock.single_request();
|
||||
let payload: Value = request.body_json();
|
||||
let text = payload.get("text").expect("request missing text field");
|
||||
let format = text
|
||||
.get("format")
|
||||
|
||||
@@ -148,21 +148,15 @@ impl OtelEventManager {
|
||||
response
|
||||
}
|
||||
|
||||
pub async fn log_sse_event<Next, Fut, E>(
|
||||
pub fn log_sse_event<E>(
|
||||
&self,
|
||||
next: Next,
|
||||
) -> Result<Option<Result<StreamEvent, StreamError<E>>>, Elapsed>
|
||||
where
|
||||
Next: FnOnce() -> Fut,
|
||||
Fut: Future<Output = Result<Option<Result<StreamEvent, StreamError<E>>>, Elapsed>>,
|
||||
response: &Result<Option<Result<StreamEvent, StreamError<E>>>, Elapsed>,
|
||||
duration: Duration,
|
||||
) where
|
||||
E: Display,
|
||||
{
|
||||
let start = std::time::Instant::now();
|
||||
let response = next().await;
|
||||
let duration = start.elapsed();
|
||||
|
||||
match response {
|
||||
Ok(Some(Ok(ref sse))) => {
|
||||
Ok(Some(Ok(sse))) => {
|
||||
if sse.data.trim() == "[DONE]" {
|
||||
self.sse_event(&sse.event, duration);
|
||||
} else {
|
||||
@@ -191,7 +185,7 @@ impl OtelEventManager {
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Some(Err(ref error))) => {
|
||||
Ok(Some(Err(error))) => {
|
||||
self.sse_event_failed(None, duration, error);
|
||||
}
|
||||
Ok(None) => {}
|
||||
@@ -199,8 +193,6 @@ impl OtelEventManager {
|
||||
self.sse_event_failed(None, duration, &"idle timeout waiting for SSE");
|
||||
}
|
||||
}
|
||||
|
||||
response
|
||||
}
|
||||
|
||||
fn sse_event(&self, kind: &str, duration: Duration) {
|
||||
|
||||
@@ -548,10 +548,15 @@ pub struct TaskStartedEvent {
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, Default, TS)]
|
||||
pub struct TokenUsage {
|
||||
#[ts(type = "number")]
|
||||
pub input_tokens: u64,
|
||||
#[ts(type = "number")]
|
||||
pub cached_input_tokens: u64,
|
||||
#[ts(type = "number")]
|
||||
pub output_tokens: u64,
|
||||
#[ts(type = "number")]
|
||||
pub reasoning_output_tokens: u64,
|
||||
#[ts(type = "number")]
|
||||
pub total_tokens: u64,
|
||||
}
|
||||
|
||||
@@ -559,6 +564,7 @@ pub struct TokenUsage {
|
||||
pub struct TokenUsageInfo {
|
||||
pub total_token_usage: TokenUsage,
|
||||
pub last_token_usage: TokenUsage,
|
||||
#[ts(type = "number | null")]
|
||||
pub model_context_window: Option<u64>,
|
||||
}
|
||||
|
||||
@@ -634,8 +640,10 @@ pub struct RateLimitWindow {
|
||||
/// Percentage (0-100) of the window that has been consumed.
|
||||
pub used_percent: f64,
|
||||
/// Rolling window duration, in minutes.
|
||||
#[ts(type = "number | null")]
|
||||
pub window_minutes: Option<u64>,
|
||||
/// Seconds until the window resets.
|
||||
#[ts(type = "number | null")]
|
||||
pub resets_in_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
@@ -1195,6 +1203,11 @@ pub struct StreamErrorEvent {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
pub struct StreamInfoEvent {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
pub struct PatchApplyBeginEvent {
|
||||
/// Identifier so this can be paired with the PatchApplyEnd event.
|
||||
@@ -1235,6 +1248,30 @@ pub struct GetHistoryEntryResponseEvent {
|
||||
pub struct McpListToolsResponseEvent {
|
||||
/// Fully qualified tool name -> tool definition.
|
||||
pub tools: std::collections::HashMap<String, McpTool>,
|
||||
/// Authentication status for each configured MCP server.
|
||||
pub auth_statuses: std::collections::HashMap<String, McpAuthStatus>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(rename_all = "snake_case")]
|
||||
pub enum McpAuthStatus {
|
||||
Unsupported,
|
||||
NotLoggedIn,
|
||||
BearerToken,
|
||||
OAuth,
|
||||
}
|
||||
|
||||
impl fmt::Display for McpAuthStatus {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let text = match self {
|
||||
McpAuthStatus::Unsupported => "Unsupported",
|
||||
McpAuthStatus::NotLoggedIn => "Not logged in",
|
||||
McpAuthStatus::BearerToken => "Bearer token",
|
||||
McpAuthStatus::OAuth => "OAuth",
|
||||
};
|
||||
f.write_str(text)
|
||||
}
|
||||
}
|
||||
|
||||
/// Response payload for `Op::ListCustomPrompts`.
|
||||
|
||||
@@ -12,6 +12,7 @@ axum = { workspace = true, default-features = false, features = [
|
||||
"http1",
|
||||
"tokio",
|
||||
] }
|
||||
codex-protocol = { workspace = true }
|
||||
keyring = { workspace = true, features = [
|
||||
"apple-native",
|
||||
"crypto-rust",
|
||||
|
||||
125
codex-rs/rmcp-client/src/auth_status.rs
Normal file
125
codex-rs/rmcp-client/src/auth_status.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Error;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::protocol::McpAuthStatus;
|
||||
use reqwest::Client;
|
||||
use reqwest::StatusCode;
|
||||
use reqwest::Url;
|
||||
use serde::Deserialize;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::OAuthCredentialsStoreMode;
|
||||
use crate::oauth::has_oauth_tokens;
|
||||
|
||||
const DISCOVERY_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
const OAUTH_DISCOVERY_HEADER: &str = "MCP-Protocol-Version";
|
||||
const OAUTH_DISCOVERY_VERSION: &str = "2024-11-05";
|
||||
|
||||
/// Determine the authentication status for a streamable HTTP MCP server.
|
||||
pub async fn determine_streamable_http_auth_status(
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
bearer_token_env_var: Option<&str>,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<McpAuthStatus> {
|
||||
if bearer_token_env_var.is_some() {
|
||||
return Ok(McpAuthStatus::BearerToken);
|
||||
}
|
||||
|
||||
if has_oauth_tokens(server_name, url, store_mode)? {
|
||||
return Ok(McpAuthStatus::OAuth);
|
||||
}
|
||||
|
||||
match supports_oauth_login(url).await {
|
||||
Ok(true) => Ok(McpAuthStatus::NotLoggedIn),
|
||||
Ok(false) => Ok(McpAuthStatus::Unsupported),
|
||||
Err(error) => {
|
||||
debug!(
|
||||
"failed to detect OAuth support for MCP server `{server_name}` at {url}: {error:?}"
|
||||
);
|
||||
Ok(McpAuthStatus::Unsupported)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to determine whether a streamable HTTP MCP server advertises OAuth login.
|
||||
pub async fn supports_oauth_login(url: &str) -> Result<bool> {
|
||||
let base_url = Url::parse(url)?;
|
||||
let client = Client::builder().timeout(DISCOVERY_TIMEOUT).build()?;
|
||||
|
||||
let mut last_error: Option<Error> = None;
|
||||
for candidate_path in discovery_paths(base_url.path()) {
|
||||
let mut discovery_url = base_url.clone();
|
||||
discovery_url.set_path(&candidate_path);
|
||||
|
||||
let response = match client
|
||||
.get(discovery_url.clone())
|
||||
.header(OAUTH_DISCOVERY_HEADER, OAUTH_DISCOVERY_VERSION)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => response,
|
||||
Err(err) => {
|
||||
last_error = Some(err.into());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if response.status() != StatusCode::OK {
|
||||
continue;
|
||||
}
|
||||
|
||||
let metadata = match response.json::<OAuthDiscoveryMetadata>().await {
|
||||
Ok(metadata) => metadata,
|
||||
Err(err) => {
|
||||
last_error = Some(err.into());
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if metadata.authorization_endpoint.is_some() && metadata.token_endpoint.is_some() {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(err) = last_error {
|
||||
debug!("OAuth discovery requests failed for {url}: {err:?}");
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OAuthDiscoveryMetadata {
|
||||
#[serde(default)]
|
||||
authorization_endpoint: Option<String>,
|
||||
#[serde(default)]
|
||||
token_endpoint: Option<String>,
|
||||
}
|
||||
|
||||
/// Implements RFC 8414 section 3.1 for discovering well-known oauth endpoints.
|
||||
/// This is a requirement for MCP servers to support OAuth.
|
||||
/// https://datatracker.ietf.org/doc/html/rfc8414#section-3.1
|
||||
/// https://github.com/modelcontextprotocol/rust-sdk/blob/main/crates/rmcp/src/transport/auth.rs#L182
|
||||
fn discovery_paths(base_path: &str) -> Vec<String> {
|
||||
let trimmed = base_path.trim_start_matches('/').trim_end_matches('/');
|
||||
let canonical = "/.well-known/oauth-authorization-server".to_string();
|
||||
|
||||
if trimmed.is_empty() {
|
||||
return vec![canonical];
|
||||
}
|
||||
|
||||
let mut candidates = Vec::new();
|
||||
let mut push_unique = |candidate: String| {
|
||||
if !candidates.contains(&candidate) {
|
||||
candidates.push(candidate);
|
||||
}
|
||||
};
|
||||
|
||||
push_unique(format!("{canonical}/{trimmed}"));
|
||||
push_unique(format!("/{trimmed}/.well-known/oauth-authorization-server"));
|
||||
push_unique(canonical);
|
||||
|
||||
candidates
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
mod auth_status;
|
||||
mod find_codex_home;
|
||||
mod logging_client_handler;
|
||||
mod oauth;
|
||||
@@ -5,6 +6,10 @@ mod perform_oauth_login;
|
||||
mod rmcp_client;
|
||||
mod utils;
|
||||
|
||||
pub use auth_status::determine_streamable_http_auth_status;
|
||||
pub use auth_status::supports_oauth_login;
|
||||
pub use codex_protocol::protocol::McpAuthStatus;
|
||||
pub use oauth::OAuthCredentialsStoreMode;
|
||||
pub use oauth::StoredOAuthTokens;
|
||||
pub use oauth::WrappedOAuthTokenResponse;
|
||||
pub use oauth::delete_oauth_tokens;
|
||||
|
||||
@@ -58,6 +58,21 @@ pub struct StoredOAuthTokens {
|
||||
pub token_response: WrappedOAuthTokenResponse,
|
||||
}
|
||||
|
||||
/// Determine where Codex should store and read MCP credentials.
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum OAuthCredentialsStoreMode {
|
||||
/// `Keyring` when available; otherwise, `File`.
|
||||
/// Credentials stored in the keyring will only be readable by Codex unless the user explicitly grants access via OS-level keyring access.
|
||||
#[default]
|
||||
Auto,
|
||||
/// CODEX_HOME/.credentials.json
|
||||
/// This file will be readable to Codex and other applications running as the same user.
|
||||
File,
|
||||
/// Keyring when available, otherwise fail.
|
||||
Keyring,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CredentialStoreError(anyhow::Error);
|
||||
|
||||
@@ -83,15 +98,15 @@ impl fmt::Display for CredentialStoreError {
|
||||
|
||||
impl std::error::Error for CredentialStoreError {}
|
||||
|
||||
trait CredentialStore {
|
||||
trait KeyringStore {
|
||||
fn load(&self, service: &str, account: &str) -> Result<Option<String>, CredentialStoreError>;
|
||||
fn save(&self, service: &str, account: &str, value: &str) -> Result<(), CredentialStoreError>;
|
||||
fn delete(&self, service: &str, account: &str) -> Result<bool, CredentialStoreError>;
|
||||
}
|
||||
|
||||
struct KeyringCredentialStore;
|
||||
struct DefaultKeyringStore;
|
||||
|
||||
impl CredentialStore for KeyringCredentialStore {
|
||||
impl KeyringStore for DefaultKeyringStore {
|
||||
fn load(&self, service: &str, account: &str) -> Result<Option<String>, CredentialStoreError> {
|
||||
let entry = Entry::new(service, account).map_err(CredentialStoreError::new)?;
|
||||
match entry.get_password() {
|
||||
@@ -129,47 +144,93 @@ impl PartialEq for WrappedOAuthTokenResponse {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn load_oauth_tokens(server_name: &str, url: &str) -> Result<Option<StoredOAuthTokens>> {
|
||||
let store = KeyringCredentialStore;
|
||||
load_oauth_tokens_with_store(&store, server_name, url)
|
||||
pub(crate) fn load_oauth_tokens(
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<Option<StoredOAuthTokens>> {
|
||||
let keyring_store = DefaultKeyringStore;
|
||||
match store_mode {
|
||||
OAuthCredentialsStoreMode::Auto => {
|
||||
load_oauth_tokens_from_keyring_with_fallback_to_file(&keyring_store, server_name, url)
|
||||
}
|
||||
OAuthCredentialsStoreMode::File => load_oauth_tokens_from_file(server_name, url),
|
||||
OAuthCredentialsStoreMode::Keyring => {
|
||||
load_oauth_tokens_from_keyring(&keyring_store, server_name, url)
|
||||
.with_context(|| "failed to read OAuth tokens from keyring".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_oauth_tokens_with_store<C: CredentialStore>(
|
||||
store: &C,
|
||||
pub(crate) fn has_oauth_tokens(
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<bool> {
|
||||
Ok(load_oauth_tokens(server_name, url, store_mode)?.is_some())
|
||||
}
|
||||
|
||||
fn load_oauth_tokens_from_keyring_with_fallback_to_file<K: KeyringStore>(
|
||||
keyring_store: &K,
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
) -> Result<Option<StoredOAuthTokens>> {
|
||||
match load_oauth_tokens_from_keyring(keyring_store, server_name, url) {
|
||||
Ok(Some(tokens)) => Ok(Some(tokens)),
|
||||
Ok(None) => load_oauth_tokens_from_file(server_name, url),
|
||||
Err(error) => {
|
||||
warn!("failed to read OAuth tokens from keyring: {error}");
|
||||
load_oauth_tokens_from_file(server_name, url)
|
||||
.with_context(|| format!("failed to read OAuth tokens from keyring: {error}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_oauth_tokens_from_keyring<K: KeyringStore>(
|
||||
keyring_store: &K,
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
) -> Result<Option<StoredOAuthTokens>> {
|
||||
let key = compute_store_key(server_name, url)?;
|
||||
match store.load(KEYRING_SERVICE, &key) {
|
||||
match keyring_store.load(KEYRING_SERVICE, &key) {
|
||||
Ok(Some(serialized)) => {
|
||||
let tokens: StoredOAuthTokens = serde_json::from_str(&serialized)
|
||||
.context("failed to deserialize OAuth tokens from keyring")?;
|
||||
Ok(Some(tokens))
|
||||
}
|
||||
Ok(None) => load_oauth_tokens_from_file(server_name, url),
|
||||
Err(error) => {
|
||||
let message = error.message();
|
||||
warn!("failed to read OAuth tokens from keyring: {message}");
|
||||
load_oauth_tokens_from_file(server_name, url)
|
||||
.with_context(|| format!("failed to read OAuth tokens from keyring: {message}"))
|
||||
Ok(None) => Ok(None),
|
||||
Err(error) => Err(error.into_error()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_oauth_tokens(
|
||||
server_name: &str,
|
||||
tokens: &StoredOAuthTokens,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<()> {
|
||||
let keyring_store = DefaultKeyringStore;
|
||||
match store_mode {
|
||||
OAuthCredentialsStoreMode::Auto => save_oauth_tokens_with_keyring_with_fallback_to_file(
|
||||
&keyring_store,
|
||||
server_name,
|
||||
tokens,
|
||||
),
|
||||
OAuthCredentialsStoreMode::File => save_oauth_tokens_to_file(tokens),
|
||||
OAuthCredentialsStoreMode::Keyring => {
|
||||
save_oauth_tokens_with_keyring(&keyring_store, server_name, tokens)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_oauth_tokens(server_name: &str, tokens: &StoredOAuthTokens) -> Result<()> {
|
||||
let store = KeyringCredentialStore;
|
||||
save_oauth_tokens_with_store(&store, server_name, tokens)
|
||||
}
|
||||
|
||||
fn save_oauth_tokens_with_store<C: CredentialStore>(
|
||||
store: &C,
|
||||
fn save_oauth_tokens_with_keyring<K: KeyringStore>(
|
||||
keyring_store: &K,
|
||||
server_name: &str,
|
||||
tokens: &StoredOAuthTokens,
|
||||
) -> Result<()> {
|
||||
let serialized = serde_json::to_string(tokens).context("failed to serialize OAuth tokens")?;
|
||||
|
||||
let key = compute_store_key(server_name, &tokens.url)?;
|
||||
match store.save(KEYRING_SERVICE, &key, &serialized) {
|
||||
match keyring_store.save(KEYRING_SERVICE, &key, &serialized) {
|
||||
Ok(()) => {
|
||||
if let Err(error) = delete_oauth_tokens_from_file(&key) {
|
||||
warn!("failed to remove OAuth tokens from fallback storage: {error:?}");
|
||||
@@ -177,31 +238,61 @@ fn save_oauth_tokens_with_store<C: CredentialStore>(
|
||||
Ok(())
|
||||
}
|
||||
Err(error) => {
|
||||
let message = error.message();
|
||||
warn!("failed to write OAuth tokens to keyring: {message}");
|
||||
let message = format!(
|
||||
"failed to write OAuth tokens to keyring: {}",
|
||||
error.message()
|
||||
);
|
||||
warn!("{message}");
|
||||
Err(error.into_error().context(message))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn save_oauth_tokens_with_keyring_with_fallback_to_file<K: KeyringStore>(
|
||||
keyring_store: &K,
|
||||
server_name: &str,
|
||||
tokens: &StoredOAuthTokens,
|
||||
) -> Result<()> {
|
||||
match save_oauth_tokens_with_keyring(keyring_store, server_name, tokens) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(error) => {
|
||||
let message = error.to_string();
|
||||
warn!("falling back to file storage for OAuth tokens: {message}");
|
||||
save_oauth_tokens_to_file(tokens)
|
||||
.with_context(|| format!("failed to write OAuth tokens to keyring: {message}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_oauth_tokens(server_name: &str, url: &str) -> Result<bool> {
|
||||
let store = KeyringCredentialStore;
|
||||
delete_oauth_tokens_with_store(&store, server_name, url)
|
||||
pub fn delete_oauth_tokens(
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<bool> {
|
||||
let keyring_store = DefaultKeyringStore;
|
||||
delete_oauth_tokens_from_keyring_and_file(&keyring_store, store_mode, server_name, url)
|
||||
}
|
||||
|
||||
fn delete_oauth_tokens_with_store<C: CredentialStore>(
|
||||
store: &C,
|
||||
fn delete_oauth_tokens_from_keyring_and_file<K: KeyringStore>(
|
||||
keyring_store: &K,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
) -> Result<bool> {
|
||||
let key = compute_store_key(server_name, url)?;
|
||||
let keyring_removed = match store.delete(KEYRING_SERVICE, &key) {
|
||||
let keyring_result = keyring_store.delete(KEYRING_SERVICE, &key);
|
||||
let keyring_removed = match keyring_result {
|
||||
Ok(removed) => removed,
|
||||
Err(error) => {
|
||||
let message = error.message();
|
||||
warn!("failed to delete OAuth tokens from keyring: {message}");
|
||||
return Err(error.into_error()).context("failed to delete OAuth tokens from keyring");
|
||||
match store_mode {
|
||||
OAuthCredentialsStoreMode::Auto | OAuthCredentialsStoreMode::Keyring => {
|
||||
return Err(error.into_error())
|
||||
.context("failed to delete OAuth tokens from keyring");
|
||||
}
|
||||
OAuthCredentialsStoreMode::File => false,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -218,6 +309,7 @@ struct OAuthPersistorInner {
|
||||
server_name: String,
|
||||
url: String,
|
||||
authorization_manager: Arc<Mutex<AuthorizationManager>>,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
last_credentials: Mutex<Option<StoredOAuthTokens>>,
|
||||
}
|
||||
|
||||
@@ -225,14 +317,16 @@ impl OAuthPersistor {
|
||||
pub(crate) fn new(
|
||||
server_name: String,
|
||||
url: String,
|
||||
manager: Arc<Mutex<AuthorizationManager>>,
|
||||
authorization_manager: Arc<Mutex<AuthorizationManager>>,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
initial_credentials: Option<StoredOAuthTokens>,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(OAuthPersistorInner {
|
||||
server_name,
|
||||
url,
|
||||
authorization_manager: manager,
|
||||
authorization_manager,
|
||||
store_mode,
|
||||
last_credentials: Mutex::new(initial_credentials),
|
||||
}),
|
||||
}
|
||||
@@ -257,15 +351,18 @@ impl OAuthPersistor {
|
||||
};
|
||||
let mut last_credentials = self.inner.last_credentials.lock().await;
|
||||
if last_credentials.as_ref() != Some(&stored) {
|
||||
save_oauth_tokens(&self.inner.server_name, &stored)?;
|
||||
save_oauth_tokens(&self.inner.server_name, &stored, self.inner.store_mode)?;
|
||||
*last_credentials = Some(stored);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let mut last_serialized = self.inner.last_credentials.lock().await;
|
||||
if last_serialized.take().is_some()
|
||||
&& let Err(error) =
|
||||
delete_oauth_tokens(&self.inner.server_name, &self.inner.url)
|
||||
&& let Err(error) = delete_oauth_tokens(
|
||||
&self.inner.server_name,
|
||||
&self.inner.url,
|
||||
self.inner.store_mode,
|
||||
)
|
||||
{
|
||||
warn!(
|
||||
"failed to remove OAuth tokens for server {}: {error}",
|
||||
@@ -542,7 +639,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
impl CredentialStore for MockCredentialStore {
|
||||
impl KeyringStore for MockCredentialStore {
|
||||
fn load(
|
||||
&self,
|
||||
_service: &str,
|
||||
@@ -643,7 +740,8 @@ mod tests {
|
||||
let key = super::compute_store_key(&tokens.server_name, &tokens.url)?;
|
||||
store.save(KEYRING_SERVICE, &key, &serialized)?;
|
||||
|
||||
let loaded = super::load_oauth_tokens_with_store(&store, &tokens.server_name, &tokens.url)?;
|
||||
let loaded =
|
||||
super::load_oauth_tokens_from_keyring(&store, &tokens.server_name, &tokens.url)?;
|
||||
assert_eq!(loaded, Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
@@ -657,8 +755,12 @@ mod tests {
|
||||
|
||||
super::save_oauth_tokens_to_file(&tokens)?;
|
||||
|
||||
let loaded = super::load_oauth_tokens_with_store(&store, &tokens.server_name, &tokens.url)?
|
||||
.expect("tokens should load from fallback");
|
||||
let loaded = super::load_oauth_tokens_from_keyring_with_fallback_to_file(
|
||||
&store,
|
||||
&tokens.server_name,
|
||||
&tokens.url,
|
||||
)?
|
||||
.expect("tokens should load from fallback");
|
||||
assert_tokens_match_without_expiry(&loaded, &expected);
|
||||
Ok(())
|
||||
}
|
||||
@@ -674,8 +776,12 @@ mod tests {
|
||||
|
||||
super::save_oauth_tokens_to_file(&tokens)?;
|
||||
|
||||
let loaded = super::load_oauth_tokens_with_store(&store, &tokens.server_name, &tokens.url)?
|
||||
.expect("tokens should load from fallback");
|
||||
let loaded = super::load_oauth_tokens_from_keyring_with_fallback_to_file(
|
||||
&store,
|
||||
&tokens.server_name,
|
||||
&tokens.url,
|
||||
)?
|
||||
.expect("tokens should load from fallback");
|
||||
assert_tokens_match_without_expiry(&loaded, &expected);
|
||||
Ok(())
|
||||
}
|
||||
@@ -689,7 +795,11 @@ mod tests {
|
||||
|
||||
super::save_oauth_tokens_to_file(&tokens)?;
|
||||
|
||||
super::save_oauth_tokens_with_store(&store, &tokens.server_name, &tokens)?;
|
||||
super::save_oauth_tokens_with_keyring_with_fallback_to_file(
|
||||
&store,
|
||||
&tokens.server_name,
|
||||
&tokens,
|
||||
)?;
|
||||
|
||||
let fallback_path = super::fallback_file_path()?;
|
||||
assert!(!fallback_path.exists(), "fallback file should be removed");
|
||||
@@ -706,7 +816,11 @@ mod tests {
|
||||
let key = super::compute_store_key(&tokens.server_name, &tokens.url)?;
|
||||
store.set_error(&key, KeyringError::Invalid("error".into(), "save".into()));
|
||||
|
||||
super::save_oauth_tokens_with_store(&store, &tokens.server_name, &tokens)?;
|
||||
super::save_oauth_tokens_with_keyring_with_fallback_to_file(
|
||||
&store,
|
||||
&tokens.server_name,
|
||||
&tokens,
|
||||
)?;
|
||||
|
||||
let fallback_path = super::fallback_file_path()?;
|
||||
assert!(fallback_path.exists(), "fallback file should be created");
|
||||
@@ -734,8 +848,34 @@ mod tests {
|
||||
store.save(KEYRING_SERVICE, &key, &serialized)?;
|
||||
super::save_oauth_tokens_to_file(&tokens)?;
|
||||
|
||||
let removed =
|
||||
super::delete_oauth_tokens_with_store(&store, &tokens.server_name, &tokens.url)?;
|
||||
let removed = super::delete_oauth_tokens_from_keyring_and_file(
|
||||
&store,
|
||||
OAuthCredentialsStoreMode::Auto,
|
||||
&tokens.server_name,
|
||||
&tokens.url,
|
||||
)?;
|
||||
assert!(removed);
|
||||
assert!(!store.contains(&key));
|
||||
assert!(!super::fallback_file_path()?.exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delete_oauth_tokens_file_mode_removes_keyring_only_entry() -> Result<()> {
|
||||
let _env = TempCodexHome::new();
|
||||
let store = MockCredentialStore::default();
|
||||
let tokens = sample_tokens();
|
||||
let serialized = serde_json::to_string(&tokens)?;
|
||||
let key = super::compute_store_key(&tokens.server_name, &tokens.url)?;
|
||||
store.save(KEYRING_SERVICE, &key, &serialized)?;
|
||||
assert!(store.contains(&key));
|
||||
|
||||
let removed = super::delete_oauth_tokens_from_keyring_and_file(
|
||||
&store,
|
||||
OAuthCredentialsStoreMode::Auto,
|
||||
&tokens.server_name,
|
||||
&tokens.url,
|
||||
)?;
|
||||
assert!(removed);
|
||||
assert!(!store.contains(&key));
|
||||
assert!(!super::fallback_file_path()?.exists());
|
||||
@@ -751,8 +891,12 @@ mod tests {
|
||||
store.set_error(&key, KeyringError::Invalid("error".into(), "delete".into()));
|
||||
super::save_oauth_tokens_to_file(&tokens).unwrap();
|
||||
|
||||
let result =
|
||||
super::delete_oauth_tokens_with_store(&store, &tokens.server_name, &tokens.url);
|
||||
let result = super::delete_oauth_tokens_from_keyring_and_file(
|
||||
&store,
|
||||
OAuthCredentialsStoreMode::Auto,
|
||||
&tokens.server_name,
|
||||
&tokens.url,
|
||||
);
|
||||
assert!(result.is_err());
|
||||
assert!(super::fallback_file_path().unwrap().exists());
|
||||
Ok(())
|
||||
|
||||
@@ -12,6 +12,7 @@ use tokio::sync::oneshot;
|
||||
use tokio::time::timeout;
|
||||
use urlencoding::decode;
|
||||
|
||||
use crate::OAuthCredentialsStoreMode;
|
||||
use crate::StoredOAuthTokens;
|
||||
use crate::WrappedOAuthTokenResponse;
|
||||
use crate::save_oauth_tokens;
|
||||
@@ -26,7 +27,11 @@ impl Drop for CallbackServerGuard {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn perform_oauth_login(server_name: &str, server_url: &str) -> Result<()> {
|
||||
pub async fn perform_oauth_login(
|
||||
server_name: &str,
|
||||
server_url: &str,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<()> {
|
||||
let server = Arc::new(Server::http("127.0.0.1:0").map_err(|err| anyhow!(err))?);
|
||||
let guard = CallbackServerGuard {
|
||||
server: Arc::clone(&server),
|
||||
@@ -81,7 +86,7 @@ pub async fn perform_oauth_login(server_name: &str, server_url: &str) -> Result<
|
||||
client_id,
|
||||
token_response: WrappedOAuthTokenResponse(credentials),
|
||||
};
|
||||
save_oauth_tokens(server_name, &stored)?;
|
||||
save_oauth_tokens(server_name, &stored, store_mode)?;
|
||||
|
||||
drop(guard);
|
||||
Ok(())
|
||||
|
||||
@@ -35,6 +35,7 @@ use tracing::warn;
|
||||
|
||||
use crate::load_oauth_tokens;
|
||||
use crate::logging_client_handler::LoggingClientHandler;
|
||||
use crate::oauth::OAuthCredentialsStoreMode;
|
||||
use crate::oauth::OAuthPersistor;
|
||||
use crate::oauth::StoredOAuthTokens;
|
||||
use crate::utils::convert_call_tool_result;
|
||||
@@ -119,17 +120,22 @@ impl RmcpClient {
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
bearer_token: Option<String>,
|
||||
store_mode: OAuthCredentialsStoreMode,
|
||||
) -> Result<Self> {
|
||||
let initial_tokens = match load_oauth_tokens(server_name, url) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(err) => {
|
||||
warn!("failed to read tokens for server `{server_name}`: {err}");
|
||||
None
|
||||
}
|
||||
let initial_oauth_tokens = match bearer_token {
|
||||
Some(_) => None,
|
||||
None => match load_oauth_tokens(server_name, url, store_mode) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(err) => {
|
||||
warn!("failed to read tokens for server `{server_name}`: {err}");
|
||||
None
|
||||
}
|
||||
},
|
||||
};
|
||||
let transport = if let Some(initial_tokens) = initial_tokens.clone() {
|
||||
let transport = if let Some(initial_tokens) = initial_oauth_tokens.clone() {
|
||||
let (transport, oauth_persistor) =
|
||||
create_oauth_transport_and_runtime(server_name, url, initial_tokens).await?;
|
||||
create_oauth_transport_and_runtime(server_name, url, initial_tokens, store_mode)
|
||||
.await?;
|
||||
PendingTransport::StreamableHttpWithOAuth {
|
||||
transport,
|
||||
oauth_persistor,
|
||||
@@ -137,7 +143,7 @@ impl RmcpClient {
|
||||
} else {
|
||||
let mut http_config = StreamableHttpClientTransportConfig::with_uri(url.to_string());
|
||||
if let Some(bearer_token) = bearer_token {
|
||||
http_config = http_config.auth_header(format!("Bearer {bearer_token}"));
|
||||
http_config = http_config.auth_header(bearer_token);
|
||||
}
|
||||
|
||||
let transport = StreamableHttpClientTransport::from_config(http_config);
|
||||
@@ -283,6 +289,7 @@ async fn create_oauth_transport_and_runtime(
|
||||
server_name: &str,
|
||||
url: &str,
|
||||
initial_tokens: StoredOAuthTokens,
|
||||
credentials_store: OAuthCredentialsStoreMode,
|
||||
) -> Result<(
|
||||
StreamableHttpClientTransport<AuthClient<reqwest::Client>>,
|
||||
OAuthPersistor,
|
||||
@@ -317,6 +324,7 @@ async fn create_oauth_transport_and_runtime(
|
||||
server_name.to_string(),
|
||||
url.to_string(),
|
||||
auth_manager,
|
||||
credentials_store,
|
||||
Some(initial_tokens),
|
||||
);
|
||||
|
||||
|
||||
@@ -68,6 +68,8 @@ strum_macros = { workspace = true }
|
||||
supports-color = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
textwrap = { workspace = true }
|
||||
tree-sitter-highlight = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
|
||||
@@ -134,8 +134,9 @@ impl App {
|
||||
/// Useful when switching sessions to ensure prior history remains visible.
|
||||
pub(crate) fn render_transcript_once(&mut self, tui: &mut tui::Tui) {
|
||||
if !self.transcript_cells.is_empty() {
|
||||
let width = tui.terminal.last_known_screen_size.width;
|
||||
for cell in &self.transcript_cells {
|
||||
tui.insert_history_lines(cell.transcript_lines());
|
||||
tui.insert_history_lines(cell.display_lines(width));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ use crate::key_hint::KeyBinding;
|
||||
use crate::render::highlight::highlight_bash_to_lines;
|
||||
use crate::render::renderable::ColumnRenderable;
|
||||
use crate::render::renderable::Renderable;
|
||||
use crate::text_formatting::truncate_text;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::ReviewDecision;
|
||||
@@ -105,9 +104,9 @@ impl ApprovalOverlay {
|
||||
),
|
||||
};
|
||||
|
||||
let header = Box::new(ColumnRenderable::new([
|
||||
Box::new(Line::from(title.bold())),
|
||||
Box::new(Line::from("")),
|
||||
let header = Box::new(ColumnRenderable::with([
|
||||
Line::from(title.bold()).into(),
|
||||
Line::from("").into(),
|
||||
header,
|
||||
]));
|
||||
|
||||
@@ -160,11 +159,8 @@ impl ApprovalOverlay {
|
||||
}
|
||||
|
||||
fn handle_exec_decision(&self, id: &str, command: &[String], decision: ReviewDecision) {
|
||||
if let Some(lines) = build_exec_history_lines(command.to_vec(), decision) {
|
||||
self.app_event_tx.send(AppEvent::InsertHistoryCell(Box::new(
|
||||
history_cell::new_user_approval_decision(lines),
|
||||
)));
|
||||
}
|
||||
let cell = history_cell::new_approval_decision_cell(command.to_vec(), decision);
|
||||
self.app_event_tx.send(AppEvent::InsertHistoryCell(cell));
|
||||
self.app_event_tx.send(AppEvent::CodexOp(Op::ExecApproval {
|
||||
id: id.to_string(),
|
||||
decision,
|
||||
@@ -315,18 +311,19 @@ impl From<ApprovalRequest> for ApprovalRequestState {
|
||||
changes,
|
||||
} => {
|
||||
let mut header: Vec<Box<dyn Renderable>> = Vec::new();
|
||||
header.push(DiffSummary::new(changes, cwd).into());
|
||||
if let Some(reason) = reason
|
||||
&& !reason.is_empty()
|
||||
{
|
||||
header.push(Box::new(Line::from("")));
|
||||
header.push(Box::new(
|
||||
Paragraph::new(reason.italic()).wrap(Wrap { trim: false }),
|
||||
Paragraph::new(Line::from_iter(["Reason: ".into(), reason.italic()]))
|
||||
.wrap(Wrap { trim: false }),
|
||||
));
|
||||
header.push(Box::new(Line::from("")));
|
||||
}
|
||||
header.push(DiffSummary::new(changes, cwd).into());
|
||||
Self {
|
||||
variant: ApprovalVariant::ApplyPatch { id },
|
||||
header: Box::new(ColumnRenderable::new(header)),
|
||||
header: Box::new(ColumnRenderable::with(header)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -395,91 +392,11 @@ fn patch_options() -> Vec<ApprovalOption> {
|
||||
]
|
||||
}
|
||||
|
||||
fn build_exec_history_lines(
|
||||
command: Vec<String>,
|
||||
decision: ReviewDecision,
|
||||
) -> Option<Vec<Line<'static>>> {
|
||||
use ReviewDecision::*;
|
||||
|
||||
let (symbol, summary): (Span<'static>, Vec<Span<'static>>) = match decision {
|
||||
Approved => {
|
||||
let snippet = Span::from(exec_snippet(&command)).dim();
|
||||
(
|
||||
"✔ ".green(),
|
||||
vec![
|
||||
"You ".into(),
|
||||
"approved".bold(),
|
||||
" codex to run ".into(),
|
||||
snippet,
|
||||
" this time".bold(),
|
||||
],
|
||||
)
|
||||
}
|
||||
ApprovedForSession => {
|
||||
let snippet = Span::from(exec_snippet(&command)).dim();
|
||||
(
|
||||
"✔ ".green(),
|
||||
vec![
|
||||
"You ".into(),
|
||||
"approved".bold(),
|
||||
" codex to run ".into(),
|
||||
snippet,
|
||||
" every time this session".bold(),
|
||||
],
|
||||
)
|
||||
}
|
||||
Denied => {
|
||||
let snippet = Span::from(exec_snippet(&command)).dim();
|
||||
(
|
||||
"✗ ".red(),
|
||||
vec![
|
||||
"You ".into(),
|
||||
"did not approve".bold(),
|
||||
" codex to run ".into(),
|
||||
snippet,
|
||||
],
|
||||
)
|
||||
}
|
||||
Abort => {
|
||||
let snippet = Span::from(exec_snippet(&command)).dim();
|
||||
(
|
||||
"✗ ".red(),
|
||||
vec![
|
||||
"You ".into(),
|
||||
"canceled".bold(),
|
||||
" the request to run ".into(),
|
||||
snippet,
|
||||
],
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let mut lines = Vec::new();
|
||||
let mut spans = Vec::new();
|
||||
spans.push(symbol);
|
||||
spans.extend(summary);
|
||||
lines.push(Line::from(spans));
|
||||
Some(lines)
|
||||
}
|
||||
|
||||
fn truncate_exec_snippet(full_cmd: &str) -> String {
|
||||
let mut snippet = match full_cmd.split_once('\n') {
|
||||
Some((first, _)) => format!("{first} ..."),
|
||||
None => full_cmd.to_string(),
|
||||
};
|
||||
snippet = truncate_text(&snippet, 80);
|
||||
snippet
|
||||
}
|
||||
|
||||
fn exec_snippet(command: &[String]) -> String {
|
||||
let full_cmd = strip_bash_lc_and_escape(command);
|
||||
truncate_exec_snippet(&full_cmd)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::app_event::AppEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
|
||||
fn make_exec_request() -> ApprovalRequest {
|
||||
@@ -549,6 +466,34 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exec_history_cell_wraps_with_two_space_indent() {
|
||||
let command = vec![
|
||||
"/bin/zsh".into(),
|
||||
"-lc".into(),
|
||||
"git add tui/src/render/mod.rs tui/src/render/renderable.rs".into(),
|
||||
];
|
||||
let cell = history_cell::new_approval_decision_cell(command, ReviewDecision::Approved);
|
||||
let lines = cell.display_lines(28);
|
||||
let rendered: Vec<String> = lines
|
||||
.iter()
|
||||
.map(|line| {
|
||||
line.spans
|
||||
.iter()
|
||||
.map(|span| span.content.as_ref())
|
||||
.collect::<String>()
|
||||
})
|
||||
.collect();
|
||||
let expected = vec![
|
||||
"✔ You approved codex to".to_string(),
|
||||
" run /bin/zsh -lc 'git add".to_string(),
|
||||
" tui/src/render/mod.rs tui/".to_string(),
|
||||
" src/render/renderable.rs'".to_string(),
|
||||
" this time".to_string(),
|
||||
];
|
||||
assert_eq!(rendered, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enter_sets_last_selected_index_without_dismissing() {
|
||||
let (tx_raw, mut rx) = unbounded_channel::<AppEvent>();
|
||||
|
||||
@@ -38,7 +38,6 @@ use crate::bottom_pane::prompt_args::prompt_has_numeric_placeholders;
|
||||
use crate::slash_command::SlashCommand;
|
||||
use crate::slash_command::built_in_slash_commands;
|
||||
use crate::style::user_message_style;
|
||||
use crate::terminal_palette;
|
||||
use codex_protocol::custom_prompts::CustomPrompt;
|
||||
use codex_protocol::custom_prompts::PROMPTS_CMD_PREFIX;
|
||||
|
||||
@@ -150,7 +149,7 @@ impl ChatComposer {
|
||||
paste_burst: PasteBurst::default(),
|
||||
disable_paste_burst: false,
|
||||
custom_prompts: Vec::new(),
|
||||
footer_mode: FooterMode::ShortcutPrompt,
|
||||
footer_mode: FooterMode::ShortcutSummary,
|
||||
footer_hint_override: None,
|
||||
context_window_percent: None,
|
||||
};
|
||||
@@ -166,8 +165,9 @@ impl ChatComposer {
|
||||
.unwrap_or_else(|| footer_height(footer_props));
|
||||
let footer_spacing = Self::footer_spacing(footer_hint_height);
|
||||
let footer_total_height = footer_hint_height + footer_spacing;
|
||||
const COLS_WITH_MARGIN: u16 = LIVE_PREFIX_COLS + 1;
|
||||
self.textarea
|
||||
.desired_height(width.saturating_sub(LIVE_PREFIX_COLS))
|
||||
.desired_height(width.saturating_sub(COLS_WITH_MARGIN))
|
||||
+ 2
|
||||
+ match &self.active_popup {
|
||||
ActivePopup::None => footer_total_height,
|
||||
@@ -198,7 +198,9 @@ impl ChatComposer {
|
||||
let [composer_rect, popup_rect] =
|
||||
Layout::vertical([Constraint::Min(1), popup_constraint]).areas(area);
|
||||
let mut textarea_rect = composer_rect;
|
||||
textarea_rect.width = textarea_rect.width.saturating_sub(LIVE_PREFIX_COLS);
|
||||
textarea_rect.width = textarea_rect.width.saturating_sub(
|
||||
LIVE_PREFIX_COLS + 1, /* keep a one-column right margin for wrapping */
|
||||
);
|
||||
textarea_rect.x = textarea_rect.x.saturating_add(LIVE_PREFIX_COLS);
|
||||
[composer_rect, textarea_rect, popup_rect]
|
||||
}
|
||||
@@ -320,8 +322,12 @@ impl ChatComposer {
|
||||
}
|
||||
|
||||
/// Attempt to start a burst by retro-capturing recent chars before the cursor.
|
||||
pub fn attach_image(&mut self, path: PathBuf, width: u32, height: u32, format_label: &str) {
|
||||
let placeholder = format!("[image {width}x{height} {format_label}]");
|
||||
pub fn attach_image(&mut self, path: PathBuf, width: u32, height: u32, _format_label: &str) {
|
||||
let file_label = path
|
||||
.file_name()
|
||||
.map(|name| name.to_string_lossy().into_owned())
|
||||
.unwrap_or_else(|| "image".to_string());
|
||||
let placeholder = format!("[{file_label} {width}x{height}]");
|
||||
// Insert as an element to match large paste placeholder behavior:
|
||||
// styled distinctly and treated atomically for cursor/mutations.
|
||||
self.textarea.insert_element(&placeholder);
|
||||
@@ -959,6 +965,7 @@ impl ChatComposer {
|
||||
}
|
||||
let mut text = self.textarea.text().to_string();
|
||||
let original_input = text.clone();
|
||||
let input_starts_with_space = original_input.starts_with(' ');
|
||||
self.textarea.set_text("");
|
||||
|
||||
// Replace all pending pastes in the text
|
||||
@@ -972,6 +979,35 @@ impl ChatComposer {
|
||||
// If there is neither text nor attachments, suppress submission entirely.
|
||||
let has_attachments = !self.attached_images.is_empty();
|
||||
text = text.trim().to_string();
|
||||
if let Some((name, _rest)) = parse_slash_name(&text) {
|
||||
let treat_as_plain_text = input_starts_with_space || name.contains('/');
|
||||
if !treat_as_plain_text {
|
||||
let is_builtin = built_in_slash_commands()
|
||||
.into_iter()
|
||||
.any(|(command_name, _)| command_name == name);
|
||||
let prompt_prefix = format!("{PROMPTS_CMD_PREFIX}:");
|
||||
let is_known_prompt = name
|
||||
.strip_prefix(&prompt_prefix)
|
||||
.map(|prompt_name| {
|
||||
self.custom_prompts
|
||||
.iter()
|
||||
.any(|prompt| prompt.name == prompt_name)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
if !is_builtin && !is_known_prompt {
|
||||
let message = format!(
|
||||
r#"Unrecognized command '/{name}'. Type "/" for a list of supported commands."#
|
||||
);
|
||||
self.app_event_tx.send(AppEvent::InsertHistoryCell(Box::new(
|
||||
history_cell::new_info_event(message, None),
|
||||
)));
|
||||
self.textarea.set_text(&original_input);
|
||||
self.textarea.set_cursor(original_input.len());
|
||||
return (InputResult::None, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let expanded_prompt = match expand_custom_prompt(&text, &self.custom_prompts) {
|
||||
Ok(expanded) => expanded,
|
||||
Err(err) => {
|
||||
@@ -1346,8 +1382,8 @@ impl ChatComposer {
|
||||
FooterMode::EscHint => FooterMode::EscHint,
|
||||
FooterMode::ShortcutOverlay => FooterMode::ShortcutOverlay,
|
||||
FooterMode::CtrlCReminder => FooterMode::CtrlCReminder,
|
||||
FooterMode::ShortcutPrompt if self.ctrl_c_quit_hint => FooterMode::CtrlCReminder,
|
||||
FooterMode::ShortcutPrompt if !self.is_empty() => FooterMode::Empty,
|
||||
FooterMode::ShortcutSummary if self.ctrl_c_quit_hint => FooterMode::CtrlCReminder,
|
||||
FooterMode::ShortcutSummary if !self.is_empty() => FooterMode::ContextOnly,
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
@@ -1533,7 +1569,7 @@ impl WidgetRef for ChatComposer {
|
||||
}
|
||||
}
|
||||
}
|
||||
let style = user_message_style(terminal_palette::default_bg());
|
||||
let style = user_message_style();
|
||||
let mut block_rect = composer_rect;
|
||||
block_rect.y = composer_rect.y.saturating_sub(1);
|
||||
block_rect.height = composer_rect.height.saturating_add(1);
|
||||
@@ -1780,11 +1816,11 @@ mod tests {
|
||||
|
||||
// Toggle back to prompt mode so subsequent typing captures characters.
|
||||
let _ = composer.handle_key_event(KeyEvent::new(KeyCode::Char('?'), KeyModifiers::NONE));
|
||||
assert_eq!(composer.footer_mode, FooterMode::ShortcutPrompt);
|
||||
assert_eq!(composer.footer_mode, FooterMode::ShortcutSummary);
|
||||
|
||||
type_chars_humanlike(&mut composer, &['h']);
|
||||
assert_eq!(composer.textarea.text(), "h");
|
||||
assert_eq!(composer.footer_mode(), FooterMode::Empty);
|
||||
assert_eq!(composer.footer_mode(), FooterMode::ContextOnly);
|
||||
|
||||
let (result, needs_redraw) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Char('?'), KeyModifiers::NONE));
|
||||
@@ -1793,8 +1829,8 @@ mod tests {
|
||||
std::thread::sleep(ChatComposer::recommended_paste_flush_delay());
|
||||
let _ = composer.flush_paste_burst_if_due();
|
||||
assert_eq!(composer.textarea.text(), "h?");
|
||||
assert_eq!(composer.footer_mode, FooterMode::ShortcutPrompt);
|
||||
assert_eq!(composer.footer_mode(), FooterMode::Empty);
|
||||
assert_eq!(composer.footer_mode, FooterMode::ShortcutSummary);
|
||||
assert_eq!(composer.footer_mode(), FooterMode::ContextOnly);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2582,7 +2618,7 @@ mod tests {
|
||||
let (result, _) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
match result {
|
||||
InputResult::Submitted(text) => assert_eq!(text, "[image 32x16 PNG] hi"),
|
||||
InputResult::Submitted(text) => assert_eq!(text, "[image1.png 32x16] hi"),
|
||||
_ => panic!("expected Submitted"),
|
||||
}
|
||||
let imgs = composer.take_recent_submission_images();
|
||||
@@ -2605,7 +2641,7 @@ mod tests {
|
||||
let (result, _) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
match result {
|
||||
InputResult::Submitted(text) => assert_eq!(text, "[image 10x5 PNG]"),
|
||||
InputResult::Submitted(text) => assert_eq!(text, "[image2.png 10x5]"),
|
||||
_ => panic!("expected Submitted"),
|
||||
}
|
||||
let imgs = composer.take_recent_submission_images();
|
||||
@@ -2678,7 +2714,12 @@ mod tests {
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
|
||||
assert_eq!(composer.attached_images.len(), 1);
|
||||
assert!(composer.textarea.text().starts_with("[image 10x5 PNG]"));
|
||||
assert!(
|
||||
composer
|
||||
.textarea
|
||||
.text()
|
||||
.starts_with("[image_multibyte.png 10x5]")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2701,21 +2742,31 @@ mod tests {
|
||||
composer.handle_paste(" ".into());
|
||||
composer.attach_image(path2.clone(), 10, 5, "PNG");
|
||||
|
||||
let ph = composer.attached_images[0].placeholder.clone();
|
||||
let placeholder1 = composer.attached_images[0].placeholder.clone();
|
||||
let placeholder2 = composer.attached_images[1].placeholder.clone();
|
||||
let text = composer.textarea.text().to_string();
|
||||
let start1 = text.find(&ph).expect("first placeholder present");
|
||||
let end1 = start1 + ph.len();
|
||||
let start1 = text.find(&placeholder1).expect("first placeholder present");
|
||||
let end1 = start1 + placeholder1.len();
|
||||
composer.textarea.set_cursor(end1);
|
||||
|
||||
// Backspace should delete the first placeholder and its mapping.
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Backspace, KeyModifiers::NONE));
|
||||
|
||||
let new_text = composer.textarea.text().to_string();
|
||||
assert_eq!(1, new_text.matches(&ph).count(), "one placeholder remains");
|
||||
assert_eq!(
|
||||
0,
|
||||
new_text.matches(&placeholder1).count(),
|
||||
"first placeholder removed"
|
||||
);
|
||||
assert_eq!(
|
||||
1,
|
||||
new_text.matches(&placeholder2).count(),
|
||||
"second placeholder remains"
|
||||
);
|
||||
assert_eq!(
|
||||
vec![AttachedImage {
|
||||
path: path2,
|
||||
placeholder: "[image 10x5 PNG]".to_string()
|
||||
placeholder: "[image_dup2.png 10x5]".to_string()
|
||||
}],
|
||||
composer.attached_images,
|
||||
"one image mapping remains"
|
||||
@@ -2742,7 +2793,12 @@ mod tests {
|
||||
|
||||
let needs_redraw = composer.handle_paste(tmp_path.to_string_lossy().to_string());
|
||||
assert!(needs_redraw);
|
||||
assert!(composer.textarea.text().starts_with("[image 3x2 PNG] "));
|
||||
assert!(
|
||||
composer
|
||||
.textarea
|
||||
.text()
|
||||
.starts_with("[codex_tui_test_paste_image.png 3x2] ")
|
||||
);
|
||||
|
||||
let imgs = composer.take_recent_submission_images();
|
||||
assert_eq!(imgs, vec![tmp_path]);
|
||||
@@ -2854,6 +2910,76 @@ mod tests {
|
||||
assert!(composer.textarea.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn slash_path_input_submits_without_command_error() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, mut rx) = unbounded_channel::<AppEvent>();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(
|
||||
true,
|
||||
sender,
|
||||
false,
|
||||
"Ask Codex to do anything".to_string(),
|
||||
false,
|
||||
);
|
||||
|
||||
composer
|
||||
.textarea
|
||||
.set_text("/Users/example/project/src/main.rs");
|
||||
|
||||
let (result, _needs_redraw) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
|
||||
if let InputResult::Submitted(text) = result {
|
||||
assert_eq!(text, "/Users/example/project/src/main.rs");
|
||||
} else {
|
||||
panic!("expected Submitted");
|
||||
}
|
||||
assert!(composer.textarea.is_empty());
|
||||
match rx.try_recv() {
|
||||
Ok(event) => panic!("unexpected event: {event:?}"),
|
||||
Err(tokio::sync::mpsc::error::TryRecvError::Empty) => {}
|
||||
Err(err) => panic!("unexpected channel state: {err:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn slash_with_leading_space_submits_as_text() {
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
|
||||
let (tx, mut rx) = unbounded_channel::<AppEvent>();
|
||||
let sender = AppEventSender::new(tx);
|
||||
let mut composer = ChatComposer::new(
|
||||
true,
|
||||
sender,
|
||||
false,
|
||||
"Ask Codex to do anything".to_string(),
|
||||
false,
|
||||
);
|
||||
|
||||
composer.textarea.set_text(" /this-looks-like-a-command");
|
||||
|
||||
let (result, _needs_redraw) =
|
||||
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
|
||||
if let InputResult::Submitted(text) = result {
|
||||
assert_eq!(text, "/this-looks-like-a-command");
|
||||
} else {
|
||||
panic!("expected Submitted");
|
||||
}
|
||||
assert!(composer.textarea.is_empty());
|
||||
match rx.try_recv() {
|
||||
Ok(event) => panic!("unexpected event: {event:?}"),
|
||||
Err(tokio::sync::mpsc::error::TryRecvError::Empty) => {}
|
||||
Err(err) => panic!("unexpected channel state: {err:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_prompt_invalid_args_reports_error() {
|
||||
let (tx, mut rx) = unbounded_channel::<AppEvent>();
|
||||
|
||||
@@ -59,14 +59,15 @@ impl ChatComposerHistory {
|
||||
return;
|
||||
}
|
||||
|
||||
self.history_cursor = None;
|
||||
self.last_history_text = None;
|
||||
|
||||
// Avoid inserting a duplicate if identical to the previous entry.
|
||||
if self.local_history.last().is_some_and(|prev| prev == text) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.local_history.push(text.to_string());
|
||||
self.history_cursor = None;
|
||||
self.last_history_text = None;
|
||||
}
|
||||
|
||||
/// Should Up/Down key presses be interpreted as history navigation given
|
||||
|
||||
@@ -23,10 +23,10 @@ pub(crate) struct FooterProps {
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub(crate) enum FooterMode {
|
||||
CtrlCReminder,
|
||||
ShortcutPrompt,
|
||||
ShortcutSummary,
|
||||
ShortcutOverlay,
|
||||
EscHint,
|
||||
Empty,
|
||||
ContextOnly,
|
||||
}
|
||||
|
||||
pub(crate) fn toggle_shortcut_mode(current: FooterMode, ctrl_c_hint: bool) -> FooterMode {
|
||||
@@ -35,7 +35,7 @@ pub(crate) fn toggle_shortcut_mode(current: FooterMode, ctrl_c_hint: bool) -> Fo
|
||||
}
|
||||
|
||||
match current {
|
||||
FooterMode::ShortcutOverlay | FooterMode::CtrlCReminder => FooterMode::ShortcutPrompt,
|
||||
FooterMode::ShortcutOverlay | FooterMode::CtrlCReminder => FooterMode::ShortcutSummary,
|
||||
_ => FooterMode::ShortcutOverlay,
|
||||
}
|
||||
}
|
||||
@@ -53,7 +53,7 @@ pub(crate) fn reset_mode_after_activity(current: FooterMode) -> FooterMode {
|
||||
FooterMode::EscHint
|
||||
| FooterMode::ShortcutOverlay
|
||||
| FooterMode::CtrlCReminder
|
||||
| FooterMode::Empty => FooterMode::ShortcutPrompt,
|
||||
| FooterMode::ContextOnly => FooterMode::ShortcutSummary,
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
@@ -72,26 +72,29 @@ pub(crate) fn render_footer(area: Rect, buf: &mut Buffer, props: FooterProps) {
|
||||
}
|
||||
|
||||
fn footer_lines(props: FooterProps) -> Vec<Line<'static>> {
|
||||
// Show the context indicator on the left, appended after the primary hint
|
||||
// (e.g., "? for shortcuts"). Keep it visible even when typing (i.e., when
|
||||
// the shortcut hint is hidden). Hide it only for the multi-line
|
||||
// ShortcutOverlay.
|
||||
match props.mode {
|
||||
FooterMode::CtrlCReminder => vec![ctrl_c_reminder_line(CtrlCReminderState {
|
||||
is_task_running: props.is_task_running,
|
||||
})],
|
||||
FooterMode::ShortcutPrompt => {
|
||||
if props.is_task_running {
|
||||
vec![context_window_line(props.context_window_percent)]
|
||||
} else {
|
||||
vec![Line::from(vec![
|
||||
key_hint::plain(KeyCode::Char('?')).into(),
|
||||
" for shortcuts".dim(),
|
||||
])]
|
||||
}
|
||||
FooterMode::ShortcutSummary => {
|
||||
let mut line = context_window_line(props.context_window_percent);
|
||||
line.push_span(" · ".dim());
|
||||
line.extend(vec![
|
||||
key_hint::plain(KeyCode::Char('?')).into(),
|
||||
" for shortcuts".dim(),
|
||||
]);
|
||||
vec![line]
|
||||
}
|
||||
FooterMode::ShortcutOverlay => shortcut_overlay_lines(ShortcutsState {
|
||||
use_shift_enter_hint: props.use_shift_enter_hint,
|
||||
esc_backtrack_hint: props.esc_backtrack_hint,
|
||||
}),
|
||||
FooterMode::EscHint => vec![esc_hint_line(props.esc_backtrack_hint)],
|
||||
FooterMode::Empty => Vec::new(),
|
||||
FooterMode::ContextOnly => vec![context_window_line(props.context_window_percent)],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -219,18 +222,8 @@ fn build_columns(entries: Vec<Line<'static>>) -> Vec<Line<'static>> {
|
||||
}
|
||||
|
||||
fn context_window_line(percent: Option<u8>) -> Line<'static> {
|
||||
let mut spans: Vec<Span<'static>> = Vec::new();
|
||||
match percent {
|
||||
Some(percent) => {
|
||||
spans.push(format!("{percent}%").dim());
|
||||
spans.push(" context left".dim());
|
||||
}
|
||||
None => {
|
||||
spans.push(key_hint::plain(KeyCode::Char('?')).into());
|
||||
spans.push(" for shortcuts".dim());
|
||||
}
|
||||
}
|
||||
Line::from(spans)
|
||||
let percent = percent.unwrap_or(100);
|
||||
Line::from(vec![Span::from(format!("{percent}% context left")).dim()])
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
@@ -402,7 +395,7 @@ mod tests {
|
||||
snapshot_footer(
|
||||
"footer_shortcuts_default",
|
||||
FooterProps {
|
||||
mode: FooterMode::ShortcutPrompt,
|
||||
mode: FooterMode::ShortcutSummary,
|
||||
esc_backtrack_hint: false,
|
||||
use_shift_enter_hint: false,
|
||||
is_task_running: false,
|
||||
@@ -468,7 +461,7 @@ mod tests {
|
||||
snapshot_footer(
|
||||
"footer_shortcuts_context_running",
|
||||
FooterProps {
|
||||
mode: FooterMode::ShortcutPrompt,
|
||||
mode: FooterMode::ShortcutSummary,
|
||||
esc_backtrack_hint: false,
|
||||
use_shift_enter_hint: false,
|
||||
is_task_running: true,
|
||||
|
||||
@@ -20,7 +20,6 @@ use crate::render::RectExt as _;
|
||||
use crate::render::renderable::ColumnRenderable;
|
||||
use crate::render::renderable::Renderable;
|
||||
use crate::style::user_message_style;
|
||||
use crate::terminal_palette;
|
||||
|
||||
use super::CancellationEvent;
|
||||
use super::bottom_pane_view::BottomPaneView;
|
||||
@@ -88,7 +87,7 @@ impl ListSelectionView {
|
||||
if params.title.is_some() || params.subtitle.is_some() {
|
||||
let title = params.title.map(|title| Line::from(title.bold()));
|
||||
let subtitle = params.subtitle.map(|subtitle| Line::from(subtitle.dim()));
|
||||
header = Box::new(ColumnRenderable::new([
|
||||
header = Box::new(ColumnRenderable::with([
|
||||
header,
|
||||
Box::new(title),
|
||||
Box::new(subtitle),
|
||||
@@ -350,7 +349,7 @@ impl Renderable for ListSelectionView {
|
||||
.areas(area);
|
||||
|
||||
Block::default()
|
||||
.style(user_message_style(terminal_palette::default_bg()))
|
||||
.style(user_message_style())
|
||||
.render(content_area, buf);
|
||||
|
||||
let header_height = self
|
||||
|
||||
@@ -81,7 +81,7 @@ pub(crate) struct BottomPaneParams {
|
||||
}
|
||||
|
||||
impl BottomPane {
|
||||
const BOTTOM_PAD_LINES: u16 = 1;
|
||||
const BOTTOM_PAD_LINES: u16 = 0;
|
||||
pub fn new(params: BottomPaneParams) -> Self {
|
||||
let enhanced_keys_supported = params.enhanced_keys_supported;
|
||||
Self {
|
||||
@@ -522,10 +522,29 @@ impl WidgetRef for &BottomPane {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::app_event::AppEvent;
|
||||
use insta::assert_snapshot;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
|
||||
fn snapshot_buffer(buf: &Buffer) -> String {
|
||||
let mut lines = Vec::new();
|
||||
for y in 0..buf.area().height {
|
||||
let mut row = String::new();
|
||||
for x in 0..buf.area().width {
|
||||
row.push(buf[(x, y)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
lines.push(row);
|
||||
}
|
||||
lines.join("\n")
|
||||
}
|
||||
|
||||
fn render_snapshot(pane: &BottomPane, area: Rect) -> String {
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
snapshot_buffer(&buf)
|
||||
}
|
||||
|
||||
fn exec_request() -> ApprovalRequest {
|
||||
ApprovalRequest::Exec {
|
||||
id: "1".to_string(),
|
||||
@@ -685,7 +704,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bottom_padding_present_with_status_above_composer() {
|
||||
fn status_and_composer_fill_height_without_bottom_padding() {
|
||||
let (tx_raw, _rx) = unbounded_channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
@@ -700,43 +719,21 @@ mod tests {
|
||||
// Activate spinner (status view replaces composer) with no live ring.
|
||||
pane.set_task_running(true);
|
||||
|
||||
// Use height == desired_height; expect 1 status row at top and 2 bottom padding rows.
|
||||
// Use height == desired_height; expect spacer + status + composer rows without trailing padding.
|
||||
let height = pane.desired_height(30);
|
||||
assert!(
|
||||
height >= 3,
|
||||
"expected at least 3 rows with bottom padding; got {height}"
|
||||
"expected at least 3 rows to render spacer, status, and composer; got {height}"
|
||||
);
|
||||
let area = Rect::new(0, 0, 30, height);
|
||||
let mut buf = Buffer::empty(area);
|
||||
(&pane).render_ref(area, &mut buf);
|
||||
|
||||
// Row 1 contains the status header (row 0 is the spacer)
|
||||
let mut top = String::new();
|
||||
for x in 0..area.width {
|
||||
top.push(buf[(x, 1)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
top.trim_start().starts_with("• Working"),
|
||||
"expected top row to start with '• Working': {top:?}"
|
||||
);
|
||||
assert!(
|
||||
top.contains("Working"),
|
||||
"expected Working header on top row: {top:?}"
|
||||
);
|
||||
|
||||
// Last row should be blank padding; the row above should generally contain composer content.
|
||||
let mut r_last = String::new();
|
||||
for x in 0..area.width {
|
||||
r_last.push(buf[(x, height - 1)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
r_last.trim().is_empty(),
|
||||
"expected last row blank: {r_last:?}"
|
||||
assert_snapshot!(
|
||||
"status_and_composer_fill_height_without_bottom_padding",
|
||||
render_snapshot(&pane, area)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bottom_padding_shrinks_when_tiny() {
|
||||
fn status_hidden_when_height_too_small() {
|
||||
let (tx_raw, _rx) = unbounded_channel::<AppEvent>();
|
||||
let tx = AppEventSender::new(tx_raw);
|
||||
let mut pane = BottomPane::new(BottomPaneParams {
|
||||
@@ -750,37 +747,18 @@ mod tests {
|
||||
|
||||
pane.set_task_running(true);
|
||||
|
||||
// Height=2 → status on one row, composer on the other.
|
||||
// Height=2 → composer takes the full space; status collapses when there is no room.
|
||||
let area2 = Rect::new(0, 0, 20, 2);
|
||||
let mut buf2 = Buffer::empty(area2);
|
||||
(&pane).render_ref(area2, &mut buf2);
|
||||
let mut row0 = String::new();
|
||||
let mut row1 = String::new();
|
||||
for x in 0..area2.width {
|
||||
row0.push(buf2[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
row1.push(buf2[(x, 1)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
let has_composer = row0.contains("Ask Codex") || row1.contains("Ask Codex");
|
||||
assert!(
|
||||
has_composer,
|
||||
"expected composer to be visible on one of the rows: row0={row0:?}, row1={row1:?}"
|
||||
);
|
||||
assert!(
|
||||
row0.contains("Working") || row1.contains("Working"),
|
||||
"expected status header to be visible at height=2: row0={row0:?}, row1={row1:?}"
|
||||
assert_snapshot!(
|
||||
"status_hidden_when_height_too_small_height_2",
|
||||
render_snapshot(&pane, area2)
|
||||
);
|
||||
|
||||
// Height=1 → no padding; single row is the composer (status hidden).
|
||||
let area1 = Rect::new(0, 0, 20, 1);
|
||||
let mut buf1 = Buffer::empty(area1);
|
||||
(&pane).render_ref(area1, &mut buf1);
|
||||
let mut only = String::new();
|
||||
for x in 0..area1.width {
|
||||
only.push(buf1[(x, 0)].symbol().chars().next().unwrap_or(' '));
|
||||
}
|
||||
assert!(
|
||||
only.contains("Ask Codex"),
|
||||
"expected composer with no padding: {only:?}"
|
||||
assert_snapshot!(
|
||||
"status_hidden_when_height_too_small_height_1",
|
||||
render_snapshot(&pane, area1)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,4 +11,4 @@ expression: terminal.backend()
|
||||
" "
|
||||
" "
|
||||
" "
|
||||
" "
|
||||
" 100% context left "
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user