mirror of
https://github.com/openai/codex.git
synced 2026-02-03 23:43:39 +00:00
Compare commits
55 Commits
easong/clo
...
robin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b3f6608e6b | ||
|
|
0e051644a9 | ||
|
|
40d14c0756 | ||
|
|
af65666561 | ||
|
|
2ae1f81d84 | ||
|
|
d363a0968e | ||
|
|
bce030ddb5 | ||
|
|
f4af6e389e | ||
|
|
b315b22f7b | ||
|
|
c9e149fd5c | ||
|
|
bacdc004be | ||
|
|
ab5972d447 | ||
|
|
767b66f407 | ||
|
|
830ab4ce20 | ||
|
|
3f73e2c892 | ||
|
|
1822ffe870 | ||
|
|
7e2165f394 | ||
|
|
8e5f38c0f0 | ||
|
|
1388e99674 | ||
|
|
f56d1dc8fc | ||
|
|
9be310041b | ||
|
|
0fbcdd77c8 | ||
|
|
9bce050385 | ||
|
|
3f92ad4190 | ||
|
|
54ee302a06 | ||
|
|
44fa06ae36 | ||
|
|
856f97f449 | ||
|
|
fe7a3f0c2b | ||
|
|
c30ca0d5b6 | ||
|
|
a8a6cbdd1c | ||
|
|
e4257f432e | ||
|
|
2c793083f4 | ||
|
|
e150798baf | ||
|
|
33a6cc66ab | ||
|
|
52d0ec4cd8 | ||
|
|
397279d46e | ||
|
|
30ca89424c | ||
|
|
d909048a85 | ||
|
|
888c6dd9e7 | ||
|
|
b5dd189067 | ||
|
|
54e6e4ac32 | ||
|
|
e8af41de8a | ||
|
|
d6c30ed25e | ||
|
|
fb9849e1e3 | ||
|
|
72a1453ac5 | ||
|
|
6d67b8b283 | ||
|
|
74a75679d9 | ||
|
|
92e3046733 | ||
|
|
65c13f1ae7 | ||
|
|
b00a7cf40d | ||
|
|
13d378f2ce | ||
|
|
a6597a9958 | ||
|
|
692989c277 | ||
|
|
2fde03b4a0 | ||
|
|
056c8f8279 |
1
.github/workflows/issue-deduplicator.yml
vendored
1
.github/workflows/issue-deduplicator.yml
vendored
@@ -46,7 +46,6 @@ jobs:
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
model: gpt-5.1
|
||||
prompt: |
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
|
||||
22
.github/workflows/rust-release.yml
vendored
22
.github/workflows/rust-release.yml
vendored
@@ -371,8 +371,20 @@ jobs:
|
||||
path: |
|
||||
codex-rs/dist/${{ matrix.target }}/*
|
||||
|
||||
shell-tool-mcp:
|
||||
name: shell-tool-mcp
|
||||
needs: tag-check
|
||||
uses: ./.github/workflows/shell-tool-mcp.yml
|
||||
with:
|
||||
release-tag: ${{ github.ref_name }}
|
||||
# We are not ready to publish yet.
|
||||
publish: false
|
||||
secrets: inherit
|
||||
|
||||
release:
|
||||
needs: build
|
||||
needs:
|
||||
- build
|
||||
- shell-tool-mcp
|
||||
name: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@@ -395,6 +407,14 @@ jobs:
|
||||
- name: List
|
||||
run: ls -R dist/
|
||||
|
||||
# This is a temporary fix: we should modify shell-tool-mcp.yml so these
|
||||
# files do not end up in dist/ in the first place.
|
||||
- name: Delete entries from dist/ that should not go in the release
|
||||
run: |
|
||||
rm -rf dist/shell-tool-mcp*
|
||||
|
||||
ls -R dist/
|
||||
|
||||
- name: Define release name
|
||||
id: release_name
|
||||
run: |
|
||||
|
||||
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
Normal file
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: shell-tool-mcp CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Format check
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run format
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp test
|
||||
|
||||
- name: Build
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
412
.github/workflows/shell-tool-mcp.yml
vendored
Normal file
412
.github/workflows/shell-tool-mcp.yml
vendored
Normal file
@@ -0,0 +1,412 @@
|
||||
name: shell-tool-mcp
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release-version:
|
||||
description: Version to publish (x.y.z or x.y.z-alpha.N). Defaults to GITHUB_REF_NAME when it starts with rust-v.
|
||||
required: false
|
||||
type: string
|
||||
release-tag:
|
||||
description: Tag name to use when downloading release artifacts (defaults to rust-v<version>).
|
||||
required: false
|
||||
type: string
|
||||
publish:
|
||||
description: Whether to publish to npm when the version is releasable.
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
metadata:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.compute.outputs.version }}
|
||||
release_tag: ${{ steps.compute.outputs.release_tag }}
|
||||
should_publish: ${{ steps.compute.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.compute.outputs.npm_tag }}
|
||||
steps:
|
||||
- name: Compute version and tags
|
||||
id: compute
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
version="${{ inputs.release-version }}"
|
||||
release_tag="${{ inputs.release-tag }}"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
if [[ -n "$release_tag" && "$release_tag" =~ ^rust-v.+ ]]; then
|
||||
version="${release_tag#rust-v}"
|
||||
elif [[ "${GITHUB_REF_NAME:-}" =~ ^rust-v.+ ]]; then
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
release_tag="${GITHUB_REF_NAME}"
|
||||
else
|
||||
echo "release-version is required when GITHUB_REF_NAME is not a rust-v tag."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$release_tag" ]]; then
|
||||
release_tag="rust-v${version}"
|
||||
fi
|
||||
|
||||
npm_tag=""
|
||||
should_publish="false"
|
||||
if [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
elif [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
npm_tag="alpha"
|
||||
fi
|
||||
|
||||
echo "version=${version}" >> "$GITHUB_OUTPUT"
|
||||
echo "release_tag=${release_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=${npm_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "should_publish=${should_publish}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
rust-binaries:
|
||||
name: Build Rust - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
install_musl: true
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
install_musl: true
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install musl build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
- name: Stage exec server binaries
|
||||
run: |
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}"
|
||||
mkdir -p "$dest"
|
||||
cp "target/${{ matrix.target }}/release/codex-exec-mcp-server" "$dest/"
|
||||
cp "target/${{ matrix.target }}/release/codex-execve-wrapper" "$dest/"
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: shell-tool-mcp-rust-${{ matrix.target }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-linux:
|
||||
name: Build Bash (Linux) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: ubuntu:24.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: ubuntu:22.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-20.04
|
||||
image: ubuntu:20.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: debian:12
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: debian:11
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: arm64v8/ubuntu:24.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: arm64v8/ubuntu:22.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-20.04
|
||||
image: arm64v8/ubuntu:20.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: arm64v8/debian:12
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: arm64v8/debian:11
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
steps:
|
||||
- name: Install build prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
dnf install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
else
|
||||
echo "Unsupported package manager in container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(command -v nproc >/dev/null 2>&1 && nproc || getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-darwin:
|
||||
name: Build Bash (macOS) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-15
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-14
|
||||
- runner: macos-13
|
||||
target: x86_64-apple-darwin
|
||||
variant: macos-13
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
package:
|
||||
name: Package npm module
|
||||
needs:
|
||||
- metadata
|
||||
- rust-binaries
|
||||
- bash-linux
|
||||
- bash-darwin
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PACKAGE_VERSION: ${{ needs.metadata.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Install JavaScript dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build (shell-tool-mcp)
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Assemble staging directory
|
||||
id: staging
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${STAGING_DIR}"
|
||||
mkdir -p "$staging" "$staging/vendor"
|
||||
cp shell-tool-mcp/README.md "$staging/"
|
||||
cp shell-tool-mcp/package.json "$staging/"
|
||||
cp -R shell-tool-mcp/bin "$staging/"
|
||||
|
||||
found_vendor="false"
|
||||
shopt -s nullglob
|
||||
for vendor_dir in artifacts/*/vendor; do
|
||||
rsync -av "$vendor_dir/" "$staging/vendor/"
|
||||
found_vendor="true"
|
||||
done
|
||||
if [[ "$found_vendor" == "false" ]]; then
|
||||
echo "No vendor payloads were downloaded."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
node - <<'NODE'
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const stagingDir = process.env.STAGING_DIR;
|
||||
const version = process.env.PACKAGE_VERSION;
|
||||
const pkgPath = path.join(stagingDir, "package.json");
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
|
||||
pkg.version = version;
|
||||
fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n");
|
||||
NODE
|
||||
|
||||
echo "dir=$staging" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
STAGING_DIR: ${{ runner.temp }}/shell-tool-mcp
|
||||
|
||||
- name: Ensure binaries are executable
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
chmod +x \
|
||||
"$staging"/vendor/*/codex-exec-mcp-server \
|
||||
"$staging"/vendor/*/codex-execve-wrapper \
|
||||
"$staging"/vendor/*/bash/*/bash
|
||||
|
||||
- name: Create npm tarball
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p dist/npm
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
pack_info=$(cd "$staging" && npm pack --ignore-scripts --json --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
filename=$(PACK_INFO="$pack_info" node -e 'const data = JSON.parse(process.env.PACK_INFO); console.log(data[0].filename);')
|
||||
mv "dist/npm/${filename}" "dist/npm/codex-shell-tool-mcp-npm-${PACKAGE_VERSION}.tgz"
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm/codex-shell-tool-mcp-npm-${{ env.PACKAGE_VERSION }}.tgz
|
||||
if-no-files-found: error
|
||||
|
||||
publish:
|
||||
name: Publish npm package
|
||||
needs:
|
||||
- metadata
|
||||
- package
|
||||
if: ${{ inputs.publish && needs.metadata.outputs.should_publish == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarball
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm
|
||||
|
||||
- name: Publish to npm
|
||||
env:
|
||||
NPM_TAG: ${{ needs.metadata.outputs.npm_tag }}
|
||||
VERSION: ${{ needs.metadata.outputs.version }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
npm publish "dist/npm/codex-shell-tool-mcp-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
33
README.md
33
README.md
@@ -69,7 +69,38 @@ Codex can access MCP servers. To configure them, refer to the [config docs](./do
|
||||
|
||||
Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
|
||||
|
||||
---
|
||||
### Execpolicy Quickstart
|
||||
|
||||
Codex can enforce your own rules-based execution policy before it runs shell commands.
|
||||
|
||||
1. Create a policy directory: `mkdir -p ~/.codex/policy`.
|
||||
2. Create one or more `.codexpolicy` files in that folder. Codex automatically loads every `.codexpolicy` file in there on startup.
|
||||
3. Write `prefix_rule` entries to describe the commands you want to allow, prompt, or block:
|
||||
|
||||
```starlark
|
||||
prefix_rule(
|
||||
pattern = ["git", ["push", "fetch"]],
|
||||
decision = "prompt", # allow | prompt | forbidden
|
||||
match = [["git", "push", "origin", "main"]], # examples that must match
|
||||
not_match = [["git", "status"]], # examples that must not match
|
||||
)
|
||||
```
|
||||
|
||||
- `pattern` is a list of shell tokens, evaluated from left to right; wrap tokens in a nested list to express alternatives (e.g., match both `push` and `fetch`).
|
||||
- `decision` sets the severity; Codex picks the strictest decision when multiple rules match (forbidden > prompt > allow).
|
||||
- `match` and `not_match` act as (optional) unit tests. Codex validates them when it loads your policy, so you get feedback if an example has unexpected behavior.
|
||||
|
||||
In this example rule, if Codex wants to run commands with the prefix `git push` or `git fetch`, it will first ask for user approval.
|
||||
|
||||
Use the `codex execpolicy check` subcommand to preview decisions before you save a rule (see the [`codex-execpolicy` README](./codex-rs/execpolicy/README.md) for syntax details):
|
||||
|
||||
```shell
|
||||
codex execpolicy check --policy ~/.codex/policy/default.codexpolicy git push origin main
|
||||
```
|
||||
|
||||
Pass multiple `--policy` flags to test how several files combine, and use `--pretty` for formatted JSON output. See the [`codex-rs/execpolicy` README](./codex-rs/execpolicy/README.md) for a more detailed walkthrough of the available syntax.
|
||||
|
||||
## Note: `execpolicy` commands are still in preview. The API may have breaking changes in the future.
|
||||
|
||||
### Docs & FAQ
|
||||
|
||||
|
||||
@@ -7,3 +7,7 @@ slow-timeout = { period = "15s", terminate-after = 2 }
|
||||
# Do not add new tests here
|
||||
filter = 'test(rmcp_client) | test(humanlike_typing_1000_chars_appears_live_no_placeholder)'
|
||||
slow-timeout = { period = "1m", terminate-after = 4 }
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'test(approval_matrix_covers_all_modes)'
|
||||
slow-timeout = { period = "30s", terminate-after = 2 }
|
||||
|
||||
107
codex-rs/Cargo.lock
generated
107
codex-rs/Cargo.lock
generated
@@ -187,8 +187,10 @@ dependencies = [
|
||||
"codex-app-server-protocol",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
"core_test_support",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"tokio",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
@@ -260,7 +262,7 @@ dependencies = [
|
||||
"memchr",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"syn 2.0.104",
|
||||
@@ -726,6 +728,17 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
||||
|
||||
[[package]]
|
||||
name = "chardetng"
|
||||
version = "0.1.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "14b8f0b65b7b08ae3c8187e8d77174de20cb6777864c6b832d8ad365999cf1ea"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"encoding_rs",
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.42"
|
||||
@@ -849,7 +862,6 @@ dependencies = [
|
||||
"codex-login",
|
||||
"codex-protocol",
|
||||
"codex-utils-json-to-toml",
|
||||
"codex-windows-sandbox",
|
||||
"core_test_support",
|
||||
"mcp-types",
|
||||
"opentelemetry-appender-tracing",
|
||||
@@ -858,6 +870,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"toml",
|
||||
@@ -880,6 +893,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum_macros 0.27.2",
|
||||
"thiserror 2.0.17",
|
||||
"ts-rs",
|
||||
"uuid",
|
||||
]
|
||||
@@ -990,6 +1004,7 @@ dependencies = [
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-exec",
|
||||
"codex-execpolicy",
|
||||
"codex-login",
|
||||
"codex-mcp-server",
|
||||
"codex-process-hardening",
|
||||
@@ -1081,11 +1096,13 @@ dependencies = [
|
||||
"async-trait",
|
||||
"base64",
|
||||
"bytes",
|
||||
"chardetng",
|
||||
"chrono",
|
||||
"codex-app-server-protocol",
|
||||
"codex-apply-patch",
|
||||
"codex-arg0",
|
||||
"codex-async-utils",
|
||||
"codex-execpolicy",
|
||||
"codex-file-search",
|
||||
"codex-git",
|
||||
"codex-keyring-store",
|
||||
@@ -1095,13 +1112,13 @@ dependencies = [
|
||||
"codex-utils-pty",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-string",
|
||||
"codex-utils-tokenizer",
|
||||
"codex-windows-sandbox",
|
||||
"core-foundation 0.9.4",
|
||||
"core_test_support",
|
||||
"ctor 0.5.0",
|
||||
"dirs",
|
||||
"dunce",
|
||||
"encoding_rs",
|
||||
"env-flags",
|
||||
"escargot",
|
||||
"eventsource-stream",
|
||||
@@ -1188,6 +1205,7 @@ name = "codex-exec-server"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"clap",
|
||||
"codex-core",
|
||||
"libc",
|
||||
@@ -1196,9 +1214,11 @@ dependencies = [
|
||||
"rmcp",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"socket2 0.6.0",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
@@ -1206,6 +1226,21 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "codex-execpolicy"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"multimap",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"starlark",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-execpolicy-legacy"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"allocative",
|
||||
"anyhow",
|
||||
@@ -1223,21 +1258,6 @@ dependencies = [
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-execpolicy2"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"multimap",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"starlark",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-feedback"
|
||||
version = "0.0.0"
|
||||
@@ -1612,18 +1632,6 @@ dependencies = [
|
||||
name = "codex-utils-string"
|
||||
version = "0.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-tokenizer"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-utils-cache",
|
||||
"pretty_assertions",
|
||||
"thiserror 2.0.17",
|
||||
"tiktoken-rs",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-windows-sandbox"
|
||||
version = "0.1.0"
|
||||
@@ -1771,6 +1779,7 @@ dependencies = [
|
||||
"notify",
|
||||
"regex-lite",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"walkdir",
|
||||
@@ -2445,17 +2454,6 @@ dependencies = [
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
|
||||
dependencies = [
|
||||
"bit-set",
|
||||
"regex-automata",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastrand"
|
||||
version = "2.3.0"
|
||||
@@ -3743,11 +3741,13 @@ dependencies = [
|
||||
"assert_cmd",
|
||||
"codex-core",
|
||||
"codex-mcp-server",
|
||||
"core_test_support",
|
||||
"mcp-types",
|
||||
"os_info",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"tokio",
|
||||
"wiremock",
|
||||
]
|
||||
@@ -4780,7 +4780,7 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"quinn-proto",
|
||||
"quinn-udp",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustc-hash",
|
||||
"rustls",
|
||||
"socket2 0.6.0",
|
||||
"thiserror 2.0.17",
|
||||
@@ -4800,7 +4800,7 @@ dependencies = [
|
||||
"lru-slab",
|
||||
"rand 0.9.2",
|
||||
"ring",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustc-hash",
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"slab",
|
||||
@@ -5145,12 +5145,6 @@ version = "0.1.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "2.1.1"
|
||||
@@ -6371,21 +6365,6 @@ dependencies = [
|
||||
"zune-jpeg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiktoken-rs"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a19830747d9034cd9da43a60eaa8e552dfda7712424aebf187b7a60126bae0d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
"bstr",
|
||||
"fancy-regex",
|
||||
"lazy_static",
|
||||
"regex",
|
||||
"rustc-hash 1.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.44"
|
||||
|
||||
@@ -18,7 +18,7 @@ members = [
|
||||
"exec",
|
||||
"exec-server",
|
||||
"execpolicy",
|
||||
"execpolicy2",
|
||||
"execpolicy-legacy",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"linux-sandbox",
|
||||
@@ -41,7 +41,6 @@ members = [
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"utils/tokenizer",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -67,6 +66,7 @@ codex-chatgpt = { path = "chatgpt" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-execpolicy = { path = "execpolicy" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git = { path = "utils/git" }
|
||||
@@ -89,7 +89,6 @@ codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
@@ -110,6 +109,7 @@ axum = { version = "0.8", default-features = false }
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chrono = "0.4.42"
|
||||
chardetng = "0.1.17"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
@@ -122,6 +122,7 @@ dotenvy = "0.15.7"
|
||||
dunce = "1.0.4"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
encoding_rs = "0.8.35"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
@@ -168,7 +169,6 @@ reqwest = "0.12"
|
||||
rmcp = { version = "0.8.5", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.34.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.14"
|
||||
@@ -187,7 +187,6 @@ tempfile = "3.23.0"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.17"
|
||||
tiktoken-rs = "0.9"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
@@ -265,7 +264,6 @@ ignored = [
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-tokenizer",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
|
||||
@@ -19,6 +19,7 @@ schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
|
||||
@@ -378,7 +378,7 @@ macro_rules! server_notification_definitions {
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, serde_json::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
@@ -438,6 +438,13 @@ server_request_definitions! {
|
||||
response: v2::CommandExecutionRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// Sent when approval is requested for a specific file change.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
FileChangeRequestApproval => "item/fileChange/requestApproval" {
|
||||
params: v2::FileChangeRequestApprovalParams,
|
||||
response: v2::FileChangeRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
@@ -480,6 +487,7 @@ pub struct FuzzyFileSearchResponse {
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
Error => "error" (v2::ErrorNotification),
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
|
||||
@@ -11,6 +11,7 @@ use codex_protocol::items::AgentMessageContent as CoreAgentMessageContent;
|
||||
use codex_protocol::items::TurnItem as CoreTurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::parse_command::ParsedCommand as CoreParsedCommand;
|
||||
use codex_protocol::protocol::CodexErrorInfo as CoreCodexErrorInfo;
|
||||
use codex_protocol::protocol::CreditsSnapshot as CoreCreditsSnapshot;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
@@ -20,6 +21,7 @@ use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value as JsonValue;
|
||||
use thiserror::Error;
|
||||
use ts_rs::TS;
|
||||
|
||||
// Macro to declare a camelCased API v2 enum mirroring a core enum which
|
||||
@@ -47,6 +49,72 @@ macro_rules! v2_enum_from_core {
|
||||
};
|
||||
}
|
||||
|
||||
/// This translation layer make sure that we expose codex error code in camel case.
|
||||
///
|
||||
/// When an upstream HTTP status is available (for example, from the Responses API or a provider),
|
||||
/// it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum CodexErrorInfo {
|
||||
ContextWindowExceeded,
|
||||
UsageLimitExceeded,
|
||||
HttpConnectionFailed {
|
||||
#[serde(rename = "httpStatusCode")]
|
||||
#[ts(rename = "httpStatusCode")]
|
||||
http_status_code: Option<u16>,
|
||||
},
|
||||
/// Failed to connect to the response SSE stream.
|
||||
ResponseStreamConnectionFailed {
|
||||
#[serde(rename = "httpStatusCode")]
|
||||
#[ts(rename = "httpStatusCode")]
|
||||
http_status_code: Option<u16>,
|
||||
},
|
||||
InternalServerError,
|
||||
Unauthorized,
|
||||
BadRequest,
|
||||
SandboxError,
|
||||
/// The response SSE stream disconnected in the middle of a turn before completion.
|
||||
ResponseStreamDisconnected {
|
||||
#[serde(rename = "httpStatusCode")]
|
||||
#[ts(rename = "httpStatusCode")]
|
||||
http_status_code: Option<u16>,
|
||||
},
|
||||
/// Reached the retry limit for responses.
|
||||
ResponseTooManyFailedAttempts {
|
||||
#[serde(rename = "httpStatusCode")]
|
||||
#[ts(rename = "httpStatusCode")]
|
||||
http_status_code: Option<u16>,
|
||||
},
|
||||
Other,
|
||||
}
|
||||
|
||||
impl From<CoreCodexErrorInfo> for CodexErrorInfo {
|
||||
fn from(value: CoreCodexErrorInfo) -> Self {
|
||||
match value {
|
||||
CoreCodexErrorInfo::ContextWindowExceeded => CodexErrorInfo::ContextWindowExceeded,
|
||||
CoreCodexErrorInfo::UsageLimitExceeded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CoreCodexErrorInfo::HttpConnectionFailed { http_status_code } => {
|
||||
CodexErrorInfo::HttpConnectionFailed { http_status_code }
|
||||
}
|
||||
CoreCodexErrorInfo::ResponseStreamConnectionFailed { http_status_code } => {
|
||||
CodexErrorInfo::ResponseStreamConnectionFailed { http_status_code }
|
||||
}
|
||||
CoreCodexErrorInfo::InternalServerError => CodexErrorInfo::InternalServerError,
|
||||
CoreCodexErrorInfo::Unauthorized => CodexErrorInfo::Unauthorized,
|
||||
CoreCodexErrorInfo::BadRequest => CodexErrorInfo::BadRequest,
|
||||
CoreCodexErrorInfo::SandboxError => CodexErrorInfo::SandboxError,
|
||||
CoreCodexErrorInfo::ResponseStreamDisconnected { http_status_code } => {
|
||||
CodexErrorInfo::ResponseStreamDisconnected { http_status_code }
|
||||
}
|
||||
CoreCodexErrorInfo::ResponseTooManyFailedAttempts { http_status_code } => {
|
||||
CodexErrorInfo::ResponseTooManyFailedAttempts { http_status_code }
|
||||
}
|
||||
CoreCodexErrorInfo::Other => CodexErrorInfo::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum AskForApproval from codex_protocol::protocol::AskForApproval {
|
||||
UnlessTrusted, OnFailure, OnRequest, Never
|
||||
@@ -544,11 +612,20 @@ pub struct Turn {
|
||||
pub status: TurnStatus,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Error)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
#[error("{message}")]
|
||||
pub struct TurnError {
|
||||
pub message: String,
|
||||
pub codex_error_info: Option<CodexErrorInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnError {
|
||||
pub message: String,
|
||||
pub struct ErrorNotification {
|
||||
pub error: TurnError,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -782,6 +859,7 @@ pub enum CommandExecutionStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
Declined,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -794,20 +872,23 @@ pub struct FileUpdateChange {
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchChangeKind {
|
||||
Add,
|
||||
Delete,
|
||||
Update,
|
||||
Update { move_path: Option<PathBuf> },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum PatchApplyStatus {
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
Declined,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -982,6 +1063,26 @@ pub struct CommandExecutionRequestApprovalResponse {
|
||||
pub accept_settings: Option<CommandExecutionRequestAcceptSettings>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FileChangeRequestApprovalParams {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// [UNSTABLE] When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct FileChangeRequestApprovalResponse {
|
||||
pub decision: ApprovalDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1068,6 +1169,7 @@ mod tests {
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
@@ -1153,4 +1255,20 @@ mod tests {
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn codex_error_info_serializes_http_status_code_in_camel_case() {
|
||||
let value = CodexErrorInfo::ResponseTooManyFailedAttempts {
|
||||
http_status_code: Some(401),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
serde_json::to_value(value).unwrap(),
|
||||
json!({
|
||||
"responseTooManyFailedAttempts": {
|
||||
"httpStatusCode": 401
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,8 @@ use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::CommandExecutionRequestAcceptSettings;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
@@ -677,6 +679,9 @@ impl CodexClient {
|
||||
ServerRequest::CommandExecutionRequestApproval { request_id, params } => {
|
||||
self.handle_command_execution_request_approval(request_id, params)?;
|
||||
}
|
||||
ServerRequest::FileChangeRequestApproval { request_id, params } => {
|
||||
self.approve_file_change_request(request_id, params)?;
|
||||
}
|
||||
other => {
|
||||
bail!("received unsupported server request: {other:?}");
|
||||
}
|
||||
@@ -717,6 +722,37 @@ impl CodexClient {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn approve_file_change_request(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: FileChangeRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let FileChangeRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
grant_root,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< fileChange approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(grant_root) = grant_root.as_deref() {
|
||||
println!("< grant root: {}", grant_root.display());
|
||||
}
|
||||
|
||||
let response = FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved fileChange request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_server_request_response<T>(&mut self, request_id: RequestId, response: &T) -> Result<()>
|
||||
where
|
||||
T: Serialize,
|
||||
|
||||
@@ -40,7 +40,6 @@ tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
opentelemetry-appender-tracing = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
codex-windows-sandbox.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
@@ -54,3 +53,4 @@ serial_test = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -339,6 +339,29 @@ Event notifications are the server-initiated event stream for thread lifecycles,
|
||||
|
||||
The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` plus token `usage`), and clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
|
||||
|
||||
- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo? } }`.
|
||||
|
||||
Today both notifications carry an empty `items` array even when item events were streamed; rely on `item/*` notifications for the canonical item list until this is fixed.
|
||||
|
||||
#### Errors
|
||||
`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
|
||||
|
||||
`codexErrorInfo` maps to the `CodexErrorInfo` enum. Common values:
|
||||
- `ContextWindowExceeded`
|
||||
- `UsageLimitExceeded`
|
||||
- `HttpConnectionFailed { httpStatusCode? }`: upstream HTTP failures including 4xx/5xx
|
||||
- `ResponseStreamConnectionFailed { httpStatusCode? }`: failure to connect to the response SSE stream
|
||||
- `ResponseStreamDisconnected { httpStatusCode? }`: disconnect of the response SSE stream in the middle of a turn before completion
|
||||
- `ResponseTooManyFailedAttempts { httpStatusCode? }`
|
||||
- `BadRequest`
|
||||
- `Unauthorized`
|
||||
- `SandboxError`
|
||||
- `InternalServerError`
|
||||
- `Other`: all unclassified errors
|
||||
|
||||
When an upstream HTTP status is available (for example, from the Responses API or a provider), it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
|
||||
|
||||
#### Thread items
|
||||
|
||||
`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
|
||||
|
||||
@@ -8,19 +8,26 @@ use codex_app_server_protocol::AgentMessageDeltaNotification;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalParams;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalResponse;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::CodexErrorInfo as V2CodexErrorInfo;
|
||||
use codex_app_server_protocol::CommandAction as V2ParsedCommand;
|
||||
use codex_app_server_protocol::CommandExecutionOutputDeltaNotification;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::ErrorNotification;
|
||||
use codex_app_server_protocol::ExecCommandApprovalParams;
|
||||
use codex_app_server_protocol::ExecCommandApprovalResponse;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::FileUpdateChange;
|
||||
use codex_app_server_protocol::InterruptConversationResponse;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::McpToolCallError;
|
||||
use codex_app_server_protocol::McpToolCallResult;
|
||||
use codex_app_server_protocol::McpToolCallStatus;
|
||||
use codex_app_server_protocol::PatchApplyStatus;
|
||||
use codex_app_server_protocol::PatchChangeKind as V2PatchChangeKind;
|
||||
use codex_app_server_protocol::ReasoningSummaryPartAddedNotification;
|
||||
use codex_app_server_protocol::ReasoningSummaryTextDeltaNotification;
|
||||
use codex_app_server_protocol::ReasoningTextDeltaNotification;
|
||||
@@ -40,6 +47,7 @@ use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecApprovalRequestEvent;
|
||||
use codex_core::protocol::ExecCommandEndEvent;
|
||||
use codex_core::protocol::FileChange as CoreFileChange;
|
||||
use codex_core::protocol::McpToolCallBeginEvent;
|
||||
use codex_core::protocol::McpToolCallEndEvent;
|
||||
use codex_core::protocol::Op;
|
||||
@@ -47,7 +55,9 @@ use codex_core::protocol::ReviewDecision;
|
||||
use codex_core::review_format::format_review_findings_block;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::ReviewOutputEvent;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::error;
|
||||
@@ -70,24 +80,74 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id,
|
||||
turn_id,
|
||||
changes,
|
||||
reason,
|
||||
grant_root,
|
||||
}) => {
|
||||
let params = ApplyPatchApprovalParams {
|
||||
conversation_id,
|
||||
call_id,
|
||||
file_changes: changes,
|
||||
reason,
|
||||
grant_root,
|
||||
};
|
||||
let rx = outgoing
|
||||
.send_request(ServerRequestPayload::ApplyPatchApproval(params))
|
||||
.await;
|
||||
tokio::spawn(async move {
|
||||
on_patch_approval_response(event_id, rx, conversation).await;
|
||||
});
|
||||
}
|
||||
}) => match api_version {
|
||||
ApiVersion::V1 => {
|
||||
let params = ApplyPatchApprovalParams {
|
||||
conversation_id,
|
||||
call_id,
|
||||
file_changes: changes.clone(),
|
||||
reason,
|
||||
grant_root,
|
||||
};
|
||||
let rx = outgoing
|
||||
.send_request(ServerRequestPayload::ApplyPatchApproval(params))
|
||||
.await;
|
||||
tokio::spawn(async move {
|
||||
on_patch_approval_response(event_id, rx, conversation).await;
|
||||
});
|
||||
}
|
||||
ApiVersion::V2 => {
|
||||
// Until we migrate the core to be aware of a first class FileChangeItem
|
||||
// and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
|
||||
let item_id = call_id.clone();
|
||||
let patch_changes = convert_patch_changes(&changes);
|
||||
|
||||
let first_start = {
|
||||
let mut map = turn_summary_store.lock().await;
|
||||
let summary = map.entry(conversation_id).or_default();
|
||||
summary.file_change_started.insert(item_id.clone())
|
||||
};
|
||||
if first_start {
|
||||
let item = ThreadItem::FileChange {
|
||||
id: item_id.clone(),
|
||||
changes: patch_changes.clone(),
|
||||
status: PatchApplyStatus::InProgress,
|
||||
};
|
||||
let notification = ItemStartedNotification { item };
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(notification))
|
||||
.await;
|
||||
}
|
||||
|
||||
let params = FileChangeRequestApprovalParams {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: turn_id.clone(),
|
||||
item_id: item_id.clone(),
|
||||
reason,
|
||||
grant_root,
|
||||
};
|
||||
let rx = outgoing
|
||||
.send_request(ServerRequestPayload::FileChangeRequestApproval(params))
|
||||
.await;
|
||||
tokio::spawn(async move {
|
||||
on_file_change_request_approval_response(
|
||||
event_id,
|
||||
conversation_id,
|
||||
item_id,
|
||||
patch_changes,
|
||||
rx,
|
||||
conversation,
|
||||
outgoing,
|
||||
turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
});
|
||||
}
|
||||
},
|
||||
EventMsg::ExecApprovalRequest(ExecApprovalRequestEvent {
|
||||
call_id,
|
||||
turn_id,
|
||||
@@ -115,12 +175,20 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
});
|
||||
}
|
||||
ApiVersion::V2 => {
|
||||
let item_id = call_id.clone();
|
||||
let command_actions = parsed_cmd
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(V2ParsedCommand::from)
|
||||
.collect::<Vec<_>>();
|
||||
let command_string = shlex_join(&command);
|
||||
|
||||
let params = CommandExecutionRequestApprovalParams {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: turn_id.clone(),
|
||||
// Until we migrate the core to be aware of a first class CommandExecutionItem
|
||||
// and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
|
||||
item_id: call_id.clone(),
|
||||
item_id: item_id.clone(),
|
||||
reason,
|
||||
risk: risk.map(V2SandboxCommandAssessment::from),
|
||||
};
|
||||
@@ -130,8 +198,17 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
))
|
||||
.await;
|
||||
tokio::spawn(async move {
|
||||
on_command_execution_request_approval_response(event_id, rx, conversation)
|
||||
.await;
|
||||
on_command_execution_request_approval_response(
|
||||
event_id,
|
||||
item_id,
|
||||
command_string,
|
||||
cwd,
|
||||
command_actions,
|
||||
rx,
|
||||
conversation,
|
||||
outgoing,
|
||||
)
|
||||
.await;
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -202,7 +279,29 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
}
|
||||
EventMsg::Error(ev) => {
|
||||
handle_error(conversation_id, ev.message, &turn_summary_store).await;
|
||||
let turn_error = TurnError {
|
||||
message: ev.message,
|
||||
codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from),
|
||||
};
|
||||
handle_error(conversation_id, turn_error.clone(), &turn_summary_store).await;
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::Error(ErrorNotification {
|
||||
error: turn_error,
|
||||
}))
|
||||
.await;
|
||||
}
|
||||
EventMsg::StreamError(ev) => {
|
||||
// We don't need to update the turn summary store for stream errors as they are intermediate error states for retries,
|
||||
// but we notify the client.
|
||||
let turn_error = TurnError {
|
||||
message: ev.message,
|
||||
codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from),
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::Error(ErrorNotification {
|
||||
error: turn_error,
|
||||
}))
|
||||
.await;
|
||||
}
|
||||
EventMsg::EnteredReviewMode(review_request) => {
|
||||
let notification = ItemStartedNotification {
|
||||
@@ -244,17 +343,65 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::PatchApplyBegin(patch_begin_event) => {
|
||||
// Until we migrate the core to be aware of a first class FileChangeItem
|
||||
// and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
|
||||
let item_id = patch_begin_event.call_id.clone();
|
||||
|
||||
let first_start = {
|
||||
let mut map = turn_summary_store.lock().await;
|
||||
let summary = map.entry(conversation_id).or_default();
|
||||
summary.file_change_started.insert(item_id.clone())
|
||||
};
|
||||
if first_start {
|
||||
let item = ThreadItem::FileChange {
|
||||
id: item_id.clone(),
|
||||
changes: convert_patch_changes(&patch_begin_event.changes),
|
||||
status: PatchApplyStatus::InProgress,
|
||||
};
|
||||
let notification = ItemStartedNotification { item };
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(notification))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
EventMsg::PatchApplyEnd(patch_end_event) => {
|
||||
// Until we migrate the core to be aware of a first class FileChangeItem
|
||||
// and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
|
||||
let item_id = patch_end_event.call_id.clone();
|
||||
|
||||
let status = if patch_end_event.success {
|
||||
PatchApplyStatus::Completed
|
||||
} else {
|
||||
PatchApplyStatus::Failed
|
||||
};
|
||||
let changes = convert_patch_changes(&patch_end_event.changes);
|
||||
complete_file_change_item(
|
||||
conversation_id,
|
||||
item_id,
|
||||
changes,
|
||||
status,
|
||||
outgoing.as_ref(),
|
||||
&turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
EventMsg::ExecCommandBegin(exec_command_begin_event) => {
|
||||
let item_id = exec_command_begin_event.call_id.clone();
|
||||
let command_actions = exec_command_begin_event
|
||||
.parsed_cmd
|
||||
.into_iter()
|
||||
.map(V2ParsedCommand::from)
|
||||
.collect::<Vec<_>>();
|
||||
let command = shlex_join(&exec_command_begin_event.command);
|
||||
let cwd = exec_command_begin_event.cwd;
|
||||
|
||||
let item = ThreadItem::CommandExecution {
|
||||
id: exec_command_begin_event.call_id.clone(),
|
||||
command: shlex_join(&exec_command_begin_event.command),
|
||||
cwd: exec_command_begin_event.cwd,
|
||||
id: item_id,
|
||||
command,
|
||||
cwd,
|
||||
status: CommandExecutionStatus::InProgress,
|
||||
command_actions: exec_command_begin_event
|
||||
.parsed_cmd
|
||||
.into_iter()
|
||||
.map(V2ParsedCommand::from)
|
||||
.collect(),
|
||||
command_actions,
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
@@ -292,6 +439,10 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
} else {
|
||||
CommandExecutionStatus::Failed
|
||||
};
|
||||
let command_actions = parsed_cmd
|
||||
.into_iter()
|
||||
.map(V2ParsedCommand::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let aggregated_output = if aggregated_output.is_empty() {
|
||||
None
|
||||
@@ -306,7 +457,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
command: shlex_join(&command),
|
||||
cwd,
|
||||
status,
|
||||
command_actions: parsed_cmd.into_iter().map(V2ParsedCommand::from).collect(),
|
||||
command_actions,
|
||||
aggregated_output,
|
||||
exit_code: Some(exit_code),
|
||||
duration_ms: Some(duration_ms),
|
||||
@@ -365,6 +516,56 @@ async fn emit_turn_completed_with_status(
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn complete_file_change_item(
|
||||
conversation_id: ConversationId,
|
||||
item_id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
status: PatchApplyStatus,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
turn_summary_store: &TurnSummaryStore,
|
||||
) {
|
||||
{
|
||||
let mut map = turn_summary_store.lock().await;
|
||||
if let Some(summary) = map.get_mut(&conversation_id) {
|
||||
summary.file_change_started.remove(&item_id);
|
||||
}
|
||||
}
|
||||
|
||||
let item = ThreadItem::FileChange {
|
||||
id: item_id,
|
||||
changes,
|
||||
status,
|
||||
};
|
||||
let notification = ItemCompletedNotification { item };
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn complete_command_execution_item(
|
||||
item_id: String,
|
||||
command: String,
|
||||
cwd: PathBuf,
|
||||
command_actions: Vec<V2ParsedCommand>,
|
||||
status: CommandExecutionStatus,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
) {
|
||||
let item = ThreadItem::CommandExecution {
|
||||
id: item_id,
|
||||
command,
|
||||
cwd,
|
||||
status,
|
||||
command_actions,
|
||||
aggregated_output: None,
|
||||
exit_code: None,
|
||||
duration_ms: None,
|
||||
};
|
||||
let notification = ItemCompletedNotification { item };
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn find_and_remove_turn_summary(
|
||||
conversation_id: ConversationId,
|
||||
turn_summary_store: &TurnSummaryStore,
|
||||
@@ -381,10 +582,8 @@ async fn handle_turn_complete(
|
||||
) {
|
||||
let turn_summary = find_and_remove_turn_summary(conversation_id, turn_summary_store).await;
|
||||
|
||||
let status = if let Some(message) = turn_summary.last_error_message {
|
||||
TurnStatus::Failed {
|
||||
error: TurnError { message },
|
||||
}
|
||||
let status = if let Some(error) = turn_summary.last_error {
|
||||
TurnStatus::Failed { error }
|
||||
} else {
|
||||
TurnStatus::Completed
|
||||
};
|
||||
@@ -405,11 +604,11 @@ async fn handle_turn_interrupted(
|
||||
|
||||
async fn handle_error(
|
||||
conversation_id: ConversationId,
|
||||
message: String,
|
||||
error: TurnError,
|
||||
turn_summary_store: &TurnSummaryStore,
|
||||
) {
|
||||
let mut map = turn_summary_store.lock().await;
|
||||
map.entry(conversation_id).or_default().last_error_message = Some(message);
|
||||
map.entry(conversation_id).or_default().last_error = Some(error);
|
||||
}
|
||||
|
||||
async fn on_patch_approval_response(
|
||||
@@ -512,42 +711,172 @@ fn render_review_output_text(output: &ReviewOutputEvent) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
async fn on_command_execution_request_approval_response(
|
||||
fn convert_patch_changes(changes: &HashMap<PathBuf, CoreFileChange>) -> Vec<FileUpdateChange> {
|
||||
let mut converted: Vec<FileUpdateChange> = changes
|
||||
.iter()
|
||||
.map(|(path, change)| FileUpdateChange {
|
||||
path: path.to_string_lossy().into_owned(),
|
||||
kind: map_patch_change_kind(change),
|
||||
diff: format_file_change_diff(change),
|
||||
})
|
||||
.collect();
|
||||
converted.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
converted
|
||||
}
|
||||
|
||||
fn map_patch_change_kind(change: &CoreFileChange) -> V2PatchChangeKind {
|
||||
match change {
|
||||
CoreFileChange::Add { .. } => V2PatchChangeKind::Add,
|
||||
CoreFileChange::Delete { .. } => V2PatchChangeKind::Delete,
|
||||
CoreFileChange::Update { move_path, .. } => V2PatchChangeKind::Update {
|
||||
move_path: move_path.clone(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn format_file_change_diff(change: &CoreFileChange) -> String {
|
||||
match change {
|
||||
CoreFileChange::Add { content } => content.clone(),
|
||||
CoreFileChange::Delete { content } => content.clone(),
|
||||
CoreFileChange::Update {
|
||||
unified_diff,
|
||||
move_path,
|
||||
} => {
|
||||
if let Some(path) = move_path {
|
||||
format!("{unified_diff}\n\nMoved to: {}", path.display())
|
||||
} else {
|
||||
unified_diff.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn on_file_change_request_approval_response(
|
||||
event_id: String,
|
||||
conversation_id: ConversationId,
|
||||
item_id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
conversation: Arc<CodexConversation>,
|
||||
codex: Arc<CodexConversation>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
turn_summary_store: TurnSummaryStore,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
Ok(value) => value,
|
||||
let (decision, completion_status) = match response {
|
||||
Ok(value) => {
|
||||
let response = serde_json::from_value::<FileChangeRequestApprovalResponse>(value)
|
||||
.unwrap_or_else(|err| {
|
||||
error!("failed to deserialize FileChangeRequestApprovalResponse: {err}");
|
||||
FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
}
|
||||
});
|
||||
|
||||
let (decision, completion_status) = match response.decision {
|
||||
ApprovalDecision::Accept => (ReviewDecision::Approved, None),
|
||||
ApprovalDecision::Decline => {
|
||||
(ReviewDecision::Denied, Some(PatchApplyStatus::Declined))
|
||||
}
|
||||
ApprovalDecision::Cancel => {
|
||||
(ReviewDecision::Abort, Some(PatchApplyStatus::Declined))
|
||||
}
|
||||
};
|
||||
// Allow EventMsg::PatchApplyEnd to emit ItemCompleted for accepted patches.
|
||||
// Only short-circuit on declines/cancels/failures.
|
||||
(decision, completion_status)
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
return;
|
||||
(ReviewDecision::Denied, Some(PatchApplyStatus::Failed))
|
||||
}
|
||||
};
|
||||
|
||||
let response = serde_json::from_value::<CommandExecutionRequestApprovalResponse>(value)
|
||||
.unwrap_or_else(|err| {
|
||||
error!("failed to deserialize CommandExecutionRequestApprovalResponse: {err}");
|
||||
CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
accept_settings: None,
|
||||
}
|
||||
});
|
||||
if let Some(status) = completion_status {
|
||||
complete_file_change_item(
|
||||
conversation_id,
|
||||
item_id,
|
||||
changes,
|
||||
status,
|
||||
outgoing.as_ref(),
|
||||
&turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
let CommandExecutionRequestApprovalResponse {
|
||||
decision,
|
||||
accept_settings,
|
||||
} = response;
|
||||
if let Err(err) = codex
|
||||
.submit(Op::PatchApproval {
|
||||
id: event_id,
|
||||
decision,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit PatchApproval: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
let decision = match (decision, accept_settings) {
|
||||
(ApprovalDecision::Accept, Some(settings)) if settings.for_session => {
|
||||
ReviewDecision::ApprovedForSession
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn on_command_execution_request_approval_response(
|
||||
event_id: String,
|
||||
item_id: String,
|
||||
command: String,
|
||||
cwd: PathBuf,
|
||||
command_actions: Vec<V2ParsedCommand>,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
conversation: Arc<CodexConversation>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let (decision, completion_status) = match response {
|
||||
Ok(value) => {
|
||||
let response = serde_json::from_value::<CommandExecutionRequestApprovalResponse>(value)
|
||||
.unwrap_or_else(|err| {
|
||||
error!("failed to deserialize CommandExecutionRequestApprovalResponse: {err}");
|
||||
CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
accept_settings: None,
|
||||
}
|
||||
});
|
||||
|
||||
let CommandExecutionRequestApprovalResponse {
|
||||
decision,
|
||||
accept_settings,
|
||||
} = response;
|
||||
|
||||
let (decision, completion_status) = match (decision, accept_settings) {
|
||||
(ApprovalDecision::Accept, Some(settings)) if settings.for_session => {
|
||||
(ReviewDecision::ApprovedForSession, None)
|
||||
}
|
||||
(ApprovalDecision::Accept, _) => (ReviewDecision::Approved, None),
|
||||
(ApprovalDecision::Decline, _) => (
|
||||
ReviewDecision::Denied,
|
||||
Some(CommandExecutionStatus::Declined),
|
||||
),
|
||||
(ApprovalDecision::Cancel, _) => (
|
||||
ReviewDecision::Abort,
|
||||
Some(CommandExecutionStatus::Declined),
|
||||
),
|
||||
};
|
||||
(decision, completion_status)
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
(ReviewDecision::Denied, Some(CommandExecutionStatus::Failed))
|
||||
}
|
||||
(ApprovalDecision::Accept, _) => ReviewDecision::Approved,
|
||||
(ApprovalDecision::Decline, _) => ReviewDecision::Denied,
|
||||
(ApprovalDecision::Cancel, _) => ReviewDecision::Abort,
|
||||
};
|
||||
|
||||
if let Some(status) = completion_status {
|
||||
complete_command_execution_item(
|
||||
item_id.clone(),
|
||||
command.clone(),
|
||||
cwd.clone(),
|
||||
command_actions.clone(),
|
||||
status,
|
||||
outgoing.as_ref(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
if let Err(err) = conversation
|
||||
.submit(Op::ExecApproval {
|
||||
id: event_id,
|
||||
@@ -642,10 +971,24 @@ mod tests {
|
||||
let conversation_id = ConversationId::new();
|
||||
let turn_summary_store = new_turn_summary_store();
|
||||
|
||||
handle_error(conversation_id, "boom".to_string(), &turn_summary_store).await;
|
||||
handle_error(
|
||||
conversation_id,
|
||||
TurnError {
|
||||
message: "boom".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::InternalServerError),
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
|
||||
let turn_summary = find_and_remove_turn_summary(conversation_id, &turn_summary_store).await;
|
||||
assert_eq!(turn_summary.last_error_message, Some("boom".to_string()));
|
||||
assert_eq!(
|
||||
turn_summary.last_error,
|
||||
Some(TurnError {
|
||||
message: "boom".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::InternalServerError),
|
||||
})
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -685,7 +1028,15 @@ mod tests {
|
||||
let conversation_id = ConversationId::new();
|
||||
let event_id = "interrupt1".to_string();
|
||||
let turn_summary_store = new_turn_summary_store();
|
||||
handle_error(conversation_id, "oops".to_string(), &turn_summary_store).await;
|
||||
handle_error(
|
||||
conversation_id,
|
||||
TurnError {
|
||||
message: "oops".to_string(),
|
||||
codex_error_info: None,
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = Arc::new(OutgoingMessageSender::new(tx));
|
||||
|
||||
@@ -717,7 +1068,15 @@ mod tests {
|
||||
let conversation_id = ConversationId::new();
|
||||
let event_id = "complete_err1".to_string();
|
||||
let turn_summary_store = new_turn_summary_store();
|
||||
handle_error(conversation_id, "bad".to_string(), &turn_summary_store).await;
|
||||
handle_error(
|
||||
conversation_id,
|
||||
TurnError {
|
||||
message: "bad".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::Other),
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = Arc::new(OutgoingMessageSender::new(tx));
|
||||
|
||||
@@ -741,6 +1100,7 @@ mod tests {
|
||||
TurnStatus::Failed {
|
||||
error: TurnError {
|
||||
message: "bad".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::Other),
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -791,7 +1151,15 @@ mod tests {
|
||||
|
||||
// Turn 1 on conversation A
|
||||
let a_turn1 = "a_turn1".to_string();
|
||||
handle_error(conversation_a, "a1".to_string(), &turn_summary_store).await;
|
||||
handle_error(
|
||||
conversation_a,
|
||||
TurnError {
|
||||
message: "a1".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::BadRequest),
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
handle_turn_complete(
|
||||
conversation_a,
|
||||
a_turn1.clone(),
|
||||
@@ -802,7 +1170,15 @@ mod tests {
|
||||
|
||||
// Turn 1 on conversation B
|
||||
let b_turn1 = "b_turn1".to_string();
|
||||
handle_error(conversation_b, "b1".to_string(), &turn_summary_store).await;
|
||||
handle_error(
|
||||
conversation_b,
|
||||
TurnError {
|
||||
message: "b1".to_string(),
|
||||
codex_error_info: None,
|
||||
},
|
||||
&turn_summary_store,
|
||||
)
|
||||
.await;
|
||||
handle_turn_complete(
|
||||
conversation_b,
|
||||
b_turn1.clone(),
|
||||
@@ -834,6 +1210,7 @@ mod tests {
|
||||
TurnStatus::Failed {
|
||||
error: TurnError {
|
||||
message: "a1".to_string(),
|
||||
codex_error_info: Some(V2CodexErrorInfo::BadRequest),
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -854,6 +1231,7 @@ mod tests {
|
||||
TurnStatus::Failed {
|
||||
error: TurnError {
|
||||
message: "b1".to_string(),
|
||||
codex_error_info: None,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
@@ -83,6 +83,7 @@ use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::ThreadStartedNotification;
|
||||
use codex_app_server_protocol::Turn;
|
||||
use codex_app_server_protocol::TurnError;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
@@ -91,7 +92,6 @@ use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInfoResponse;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_app_server_protocol::WindowsWorldWritableWarningNotification;
|
||||
use codex_app_server_protocol::build_turns_from_event_msgs;
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_core::AuthManager;
|
||||
@@ -139,6 +139,7 @@ use codex_protocol::protocol::USER_MESSAGE_BEGIN;
|
||||
use codex_protocol::user_input::UserInput as CoreInputItem;
|
||||
use codex_utils_json_to_toml::json_to_toml;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::io::Error as IoError;
|
||||
use std::path::Path;
|
||||
@@ -161,7 +162,8 @@ pub(crate) type PendingInterrupts = Arc<Mutex<HashMap<ConversationId, PendingInt
|
||||
/// Per-conversation accumulation of the latest states e.g. error message while a turn runs.
|
||||
#[derive(Default, Clone)]
|
||||
pub(crate) struct TurnSummary {
|
||||
pub(crate) last_error_message: Option<String>,
|
||||
pub(crate) file_change_started: HashSet<String>,
|
||||
pub(crate) last_error: Option<TurnError>,
|
||||
}
|
||||
|
||||
pub(crate) type TurnSummaryStore = Arc<Mutex<HashMap<ConversationId, TurnSummary>>>;
|
||||
@@ -1168,7 +1170,7 @@ impl CodexMessageProcessor {
|
||||
let exec_params = ExecParams {
|
||||
command: params.command,
|
||||
cwd,
|
||||
timeout_ms,
|
||||
expiration: timeout_ms.into(),
|
||||
env,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
@@ -1274,10 +1276,6 @@ impl CodexMessageProcessor {
|
||||
return;
|
||||
}
|
||||
};
|
||||
if cfg!(windows) && config.features.enabled(Feature::WindowsSandbox) {
|
||||
self.handle_windows_world_writable_warning(config.cwd.clone())
|
||||
.await;
|
||||
}
|
||||
|
||||
match self.conversation_manager.new_conversation(config).await {
|
||||
Ok(conversation_id) => {
|
||||
@@ -1997,10 +1995,6 @@ impl CodexMessageProcessor {
|
||||
return;
|
||||
}
|
||||
};
|
||||
if cfg!(windows) && config.features.enabled(Feature::WindowsSandbox) {
|
||||
self.handle_windows_world_writable_warning(config.cwd.clone())
|
||||
.await;
|
||||
}
|
||||
|
||||
let conversation_history = if let Some(path) = path {
|
||||
match RolloutRecorder::get_rollout_history(&path).await {
|
||||
@@ -2859,53 +2853,6 @@ impl CodexMessageProcessor {
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// On Windows, when using the experimental sandbox, we need to warn the user about world-writable directories.
|
||||
async fn handle_windows_world_writable_warning(&self, cwd: PathBuf) {
|
||||
if !cfg!(windows) {
|
||||
return;
|
||||
}
|
||||
|
||||
if !self.config.features.enabled(Feature::WindowsSandbox) {
|
||||
return;
|
||||
}
|
||||
|
||||
if !matches!(
|
||||
self.config.sandbox_policy,
|
||||
codex_protocol::protocol::SandboxPolicy::WorkspaceWrite { .. }
|
||||
| codex_protocol::protocol::SandboxPolicy::ReadOnly
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
if self
|
||||
.config
|
||||
.notices
|
||||
.hide_world_writable_warning
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// This function is stubbed out to return None on non-Windows platforms
|
||||
if let Some((sample_paths, extra_count, failed_scan)) =
|
||||
codex_windows_sandbox::world_writable_warning_details(
|
||||
self.config.codex_home.as_path(),
|
||||
cwd,
|
||||
)
|
||||
{
|
||||
tracing::warn!("world writable warning: {sample_paths:?} {extra_count} {failed_scan}");
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::WindowsWorldWritableWarning(
|
||||
WindowsWorldWritableWarningNotification {
|
||||
sample_paths,
|
||||
extra_count,
|
||||
failed_scan,
|
||||
},
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn derive_config_from_params(
|
||||
|
||||
@@ -24,3 +24,5 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
core_test_support = { path = "../../../core/tests/common" }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -9,12 +9,14 @@ pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use core_test_support::format_with_current_shell;
|
||||
pub use core_test_support::format_with_current_shell_display;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_chat_completions_server_unchecked;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
pub use responses::create_shell_command_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn create_shell_sse_response(
|
||||
pub fn create_shell_command_sse_response(
|
||||
command: Vec<String>,
|
||||
workdir: Option<&Path>,
|
||||
timeout_ms: Option<u64>,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments`` for the `shell` tool is a serialized JSON object.
|
||||
// The `arguments` for the `shell_command` tool is a serialized JSON object.
|
||||
let command_str = shlex::try_join(command.iter().map(String::as_str))?;
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command,
|
||||
"command": command_str,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
"timeout": timeout_ms
|
||||
"timeout_ms": timeout_ms
|
||||
}))?;
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
@@ -21,7 +22,7 @@ pub fn create_shell_sse_response(
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"name": "shell_command",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
@@ -62,10 +63,10 @@ pub fn create_apply_patch_sse_response(
|
||||
patch_content: &str,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// Use shell command to call apply_patch with heredoc format
|
||||
let shell_command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
// Use shell_command to call apply_patch with heredoc format
|
||||
let command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": ["bash", "-lc", shell_command]
|
||||
"command": command
|
||||
}))?;
|
||||
|
||||
let tool_call = json!({
|
||||
@@ -76,7 +77,7 @@ pub fn create_apply_patch_sse_response(
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"name": "shell_command",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@ use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::format_with_current_shell;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
@@ -56,7 +57,7 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
// Create a mock model server that immediately ends each turn.
|
||||
// Two turns are expected: initial session configure + one user message.
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
create_shell_command_sse_response(
|
||||
vec!["ls".to_string()],
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
@@ -175,7 +176,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
|
||||
// Mock server will request a python shell call for the first and second turn, then finish.
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
@@ -186,7 +187,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
@@ -267,11 +268,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call1".to_string(),
|
||||
command: vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
command: format_with_current_shell("python3 -c 'print(42)'"),
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
risk: None,
|
||||
@@ -353,23 +350,15 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
create_shell_command_sse_response(
|
||||
vec!["echo".to_string(), "first".to_string(), "turn".to_string()],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
create_shell_command_sse_response(
|
||||
vec!["echo".to_string(), "second".to_string(), "turn".to_string()],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
@@ -481,13 +470,9 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
exec_begin.cwd, second_cwd,
|
||||
"exec turn should run from updated cwd"
|
||||
);
|
||||
let expected_command = format_with_current_shell("echo second turn");
|
||||
assert_eq!(
|
||||
exec_begin.command,
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string()
|
||||
],
|
||||
exec_begin.command, expected_command,
|
||||
"exec turn should run expected command"
|
||||
);
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ use tokio::time::timeout;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::to_response;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
@@ -56,7 +56,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Create mock server with a single SSE response: the long sleep command
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_sse_response(
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_command_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000), // 10 seconds timeout in ms
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -41,7 +41,7 @@ async fn turn_interrupt_aborts_running_turn() -> Result<()> {
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Mock server: long-running shell command then (after abort) nothing else needed.
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_sse_response(
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_command_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000),
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_apply_patch_sse_response;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_mock_chat_completions_server_unchecked;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::format_with_current_shell_display;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::PatchApplyStatus;
|
||||
use codex_app_server_protocol::PatchChangeKind;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
@@ -197,7 +205,7 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
// Mock server: first turn requests a shell call (elicitation), then completes.
|
||||
// Second turn same, but we'll set approval_policy=never to avoid elicitation.
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
@@ -208,7 +216,7 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_sse_response(
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
@@ -322,6 +330,145 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_exec_approval_decline_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().to_path_buf();
|
||||
let workspace = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-decline",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(codex_home.as_path(), &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.as_path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "run python".to_string(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
let started_command_execution = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let started_notif = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(started_notif.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::CommandExecution { .. } = started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::CommandExecution { id, status, .. } = started_command_execution else {
|
||||
unreachable!("loop ensures we break on command execution items");
|
||||
};
|
||||
assert_eq!(id, "call-decline");
|
||||
assert_eq!(status, CommandExecutionStatus::InProgress);
|
||||
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::CommandExecutionRequestApproval { request_id, params } = server_req else {
|
||||
panic!("expected CommandExecutionRequestApproval request")
|
||||
};
|
||||
assert_eq!(params.item_id, "call-decline");
|
||||
assert_eq!(params.thread_id, thread.id);
|
||||
assert_eq!(params.turn_id, turn.id);
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(CommandExecutionRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
accept_settings: None,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completed_command_execution = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let completed_notif = mcp
|
||||
.read_stream_until_notification_message("item/completed")
|
||||
.await?;
|
||||
let completed: ItemCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.clone()
|
||||
.expect("item/completed params"),
|
||||
)?;
|
||||
if let ThreadItem::CommandExecution { .. } = completed.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(completed.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::CommandExecution {
|
||||
id,
|
||||
status,
|
||||
exit_code,
|
||||
aggregated_output,
|
||||
..
|
||||
} = completed_command_execution
|
||||
else {
|
||||
unreachable!("loop ensures we break on command execution items");
|
||||
};
|
||||
assert_eq!(id, "call-decline");
|
||||
assert_eq!(status, CommandExecutionStatus::Declined);
|
||||
assert!(exit_code.is_none());
|
||||
assert!(aggregated_output.is_none());
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -337,23 +484,15 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
create_shell_command_sse_response(
|
||||
vec!["echo".to_string(), "first".to_string(), "turn".to_string()],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
create_shell_command_sse_response(
|
||||
vec!["echo".to_string(), "second".to_string(), "turn".to_string()],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
@@ -459,7 +598,8 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
unreachable!("loop ensures we break on command execution items");
|
||||
};
|
||||
assert_eq!(cwd, second_cwd);
|
||||
assert_eq!(command, "bash -lc 'echo second turn'");
|
||||
let expected_command = format_with_current_shell_display("echo second turn");
|
||||
assert_eq!(command, expected_command);
|
||||
assert_eq!(status, CommandExecutionStatus::InProgress);
|
||||
|
||||
timeout(
|
||||
@@ -471,6 +611,308 @@ async fn turn_start_updates_sandbox_and_cwd_between_turns_v2() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_file_change_approval_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
if cfg!(windows) {
|
||||
// TODO apply_patch approvals are not parsed from powershell commands yet
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace)?;
|
||||
|
||||
let patch = r#"*** Begin Patch
|
||||
*** Add File: README.md
|
||||
+new line
|
||||
*** End Patch
|
||||
"#;
|
||||
let responses = vec![
|
||||
create_apply_patch_sse_response(patch, "patch-call")?,
|
||||
create_final_assistant_message_sse_response("patch applied")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
cwd: Some(workspace.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch".into(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
let started_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let started_notif = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(started_notif.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::FileChange { .. } = started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange {
|
||||
ref id,
|
||||
status,
|
||||
ref changes,
|
||||
} = started_file_change
|
||||
else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::InProgress);
|
||||
let started_changes = changes.clone();
|
||||
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::FileChangeRequestApproval { request_id, params } = server_req else {
|
||||
panic!("expected FileChangeRequestApproval request")
|
||||
};
|
||||
assert_eq!(params.item_id, "patch-call");
|
||||
assert_eq!(params.thread_id, thread.id);
|
||||
assert_eq!(params.turn_id, turn.id);
|
||||
let expected_readme_path = workspace.join("README.md");
|
||||
let expected_readme_path = expected_readme_path.to_string_lossy().into_owned();
|
||||
pretty_assertions::assert_eq!(
|
||||
started_changes,
|
||||
vec![codex_app_server_protocol::FileUpdateChange {
|
||||
path: expected_readme_path.clone(),
|
||||
kind: PatchChangeKind::Add,
|
||||
diff: "new line\n".to_string(),
|
||||
}]
|
||||
);
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Accept,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completed_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let completed_notif = mcp
|
||||
.read_stream_until_notification_message("item/completed")
|
||||
.await?;
|
||||
let completed: ItemCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.clone()
|
||||
.expect("item/completed params"),
|
||||
)?;
|
||||
if let ThreadItem::FileChange { .. } = completed.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(completed.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange { ref id, status, .. } = completed_file_change else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::Completed);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let readme_contents = std::fs::read_to_string(expected_readme_path)?;
|
||||
assert_eq!(readme_contents, "new line\n");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_file_change_approval_decline_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
if cfg!(windows) {
|
||||
// TODO apply_patch approvals are not parsed from powershell commands yet
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tmp = TempDir::new()?;
|
||||
let codex_home = tmp.path().join("codex_home");
|
||||
std::fs::create_dir(&codex_home)?;
|
||||
let workspace = tmp.path().join("workspace");
|
||||
std::fs::create_dir(&workspace)?;
|
||||
|
||||
let patch = r#"*** Begin Patch
|
||||
*** Add File: README.md
|
||||
+new line
|
||||
*** End Patch
|
||||
"#;
|
||||
let responses = vec![
|
||||
create_apply_patch_sse_response(patch, "patch-call")?,
|
||||
create_final_assistant_message_sse_response("patch declined")?,
|
||||
];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let start_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
cwd: Some(workspace.to_string_lossy().into_owned()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "apply patch".into(),
|
||||
}],
|
||||
cwd: Some(workspace.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
let started_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let started_notif = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(started_notif.params.clone().expect("item/started params"))?;
|
||||
if let ThreadItem::FileChange { .. } = started.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(started.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange {
|
||||
ref id,
|
||||
status,
|
||||
ref changes,
|
||||
} = started_file_change
|
||||
else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::InProgress);
|
||||
let started_changes = changes.clone();
|
||||
|
||||
let server_req = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_request_message(),
|
||||
)
|
||||
.await??;
|
||||
let ServerRequest::FileChangeRequestApproval { request_id, params } = server_req else {
|
||||
panic!("expected FileChangeRequestApproval request")
|
||||
};
|
||||
assert_eq!(params.item_id, "patch-call");
|
||||
assert_eq!(params.thread_id, thread.id);
|
||||
assert_eq!(params.turn_id, turn.id);
|
||||
let expected_readme_path = workspace.join("README.md");
|
||||
let expected_readme_path_str = expected_readme_path.to_string_lossy().into_owned();
|
||||
pretty_assertions::assert_eq!(
|
||||
started_changes,
|
||||
vec![codex_app_server_protocol::FileUpdateChange {
|
||||
path: expected_readme_path_str.clone(),
|
||||
kind: PatchChangeKind::Add,
|
||||
diff: "new line\n".to_string(),
|
||||
}]
|
||||
);
|
||||
|
||||
mcp.send_response(
|
||||
request_id,
|
||||
serde_json::to_value(FileChangeRequestApprovalResponse {
|
||||
decision: ApprovalDecision::Decline,
|
||||
})?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completed_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let completed_notif = mcp
|
||||
.read_stream_until_notification_message("item/completed")
|
||||
.await?;
|
||||
let completed: ItemCompletedNotification = serde_json::from_value(
|
||||
completed_notif
|
||||
.params
|
||||
.clone()
|
||||
.expect("item/completed params"),
|
||||
)?;
|
||||
if let ThreadItem::FileChange { .. } = completed.item {
|
||||
return Ok::<ThreadItem, anyhow::Error>(completed.item);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let ThreadItem::FileChange { ref id, status, .. } = completed_file_change else {
|
||||
unreachable!("loop ensures we break on file change items");
|
||||
};
|
||||
assert_eq!(id, "patch-call");
|
||||
assert_eq!(status, PatchApplyStatus::Declined);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
assert!(
|
||||
!expected_readme_path.exists(),
|
||||
"declined patch should not be applied"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
|
||||
@@ -30,6 +30,7 @@ pub use standalone_executable::main;
|
||||
pub const APPLY_PATCH_TOOL_INSTRUCTIONS: &str = include_str!("../apply_patch_tool_instructions.md");
|
||||
|
||||
const APPLY_PATCH_COMMANDS: [&str; 2] = ["apply_patch", "applypatch"];
|
||||
const APPLY_PATCH_SHELLS: [&str; 3] = ["bash", "zsh", "sh"];
|
||||
|
||||
#[derive(Debug, Error, PartialEq)]
|
||||
pub enum ApplyPatchError {
|
||||
@@ -96,6 +97,13 @@ pub struct ApplyPatchArgs {
|
||||
pub workdir: Option<String>,
|
||||
}
|
||||
|
||||
fn shell_supports_apply_patch(shell: &str) -> bool {
|
||||
std::path::Path::new(shell)
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.is_some_and(|name| APPLY_PATCH_SHELLS.contains(&name))
|
||||
}
|
||||
|
||||
pub fn maybe_parse_apply_patch(argv: &[String]) -> MaybeApplyPatch {
|
||||
match argv {
|
||||
// Direct invocation: apply_patch <patch>
|
||||
@@ -104,7 +112,7 @@ pub fn maybe_parse_apply_patch(argv: &[String]) -> MaybeApplyPatch {
|
||||
Err(e) => MaybeApplyPatch::PatchParseError(e),
|
||||
},
|
||||
// Bash heredoc form: (optional `cd <path> &&`) apply_patch <<'EOF' ...
|
||||
[bash, flag, script] if bash == "bash" && flag == "-lc" => {
|
||||
[shell, flag, script] if shell_supports_apply_patch(shell) && flag == "-lc" => {
|
||||
match extract_apply_patch_from_bash(script) {
|
||||
Ok((body, workdir)) => match parse_patch(&body) {
|
||||
Ok(mut source) => {
|
||||
@@ -224,12 +232,12 @@ pub fn maybe_parse_apply_patch_verified(argv: &[String], cwd: &Path) -> MaybeApp
|
||||
);
|
||||
}
|
||||
}
|
||||
[bash, flag, script] if bash == "bash" && flag == "-lc" => {
|
||||
if parse_patch(script).is_ok() {
|
||||
return MaybeApplyPatchVerified::CorrectnessError(
|
||||
ApplyPatchError::ImplicitInvocation,
|
||||
);
|
||||
}
|
||||
[shell, flag, script]
|
||||
if shell_supports_apply_patch(shell)
|
||||
&& flag == "-lc"
|
||||
&& parse_patch(script).is_ok() =>
|
||||
{
|
||||
return MaybeApplyPatchVerified::CorrectnessError(ApplyPatchError::ImplicitInvocation);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ codex-cloud-tasks = { path = "../cloud-tasks" }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-exec = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
|
||||
@@ -18,6 +18,7 @@ use codex_cli::login::run_logout;
|
||||
use codex_cloud_tasks::Cli as CloudTasksCli;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_execpolicy::ExecPolicyCheckCommand;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
@@ -93,6 +94,10 @@ enum Subcommand {
|
||||
#[clap(visible_alias = "debug")]
|
||||
Sandbox(SandboxArgs),
|
||||
|
||||
/// Execpolicy tooling.
|
||||
#[clap(hide = true)]
|
||||
Execpolicy(ExecpolicyCommand),
|
||||
|
||||
/// Apply the latest diff produced by Codex agent as a `git apply` to your local working tree.
|
||||
#[clap(visible_alias = "a")]
|
||||
Apply(ApplyCommand),
|
||||
@@ -162,6 +167,19 @@ enum SandboxCommand {
|
||||
Windows(WindowsCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct ExecpolicyCommand {
|
||||
#[command(subcommand)]
|
||||
sub: ExecpolicySubcommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum ExecpolicySubcommand {
|
||||
/// Check execpolicy files against a command.
|
||||
#[clap(name = "check")]
|
||||
Check(ExecPolicyCheckCommand),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct LoginCommand {
|
||||
#[clap(skip)]
|
||||
@@ -327,6 +345,10 @@ fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_execpolicycheck(cmd: ExecPolicyCheckCommand) -> anyhow::Result<()> {
|
||||
cmd.run()
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Parser, Clone)]
|
||||
struct FeatureToggles {
|
||||
/// Enable a feature (repeatable). Equivalent to `-c features.<name>=true`.
|
||||
@@ -549,6 +571,9 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
.await?;
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Execpolicy(ExecpolicyCommand { sub })) => match sub {
|
||||
ExecpolicySubcommand::Check(cmd) => run_execpolicycheck(cmd)?,
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(
|
||||
&mut apply_cli.config_overrides,
|
||||
|
||||
@@ -79,6 +79,7 @@ pub struct GetArgs {
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
#[command(override_usage = "codex mcp add [OPTIONS] <NAME> (--url <URL> | -- <COMMAND>...)")]
|
||||
pub struct AddArgs {
|
||||
/// Name for the MCP server configuration.
|
||||
pub name: String,
|
||||
|
||||
58
codex-rs/cli/tests/execpolicy.rs
Normal file
58
codex-rs/cli/tests/execpolicy.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::fs;
|
||||
|
||||
use assert_cmd::Command;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn execpolicy_check_matches_expected_json() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let policy_path = codex_home.path().join("policy.codexpolicy");
|
||||
fs::write(
|
||||
&policy_path,
|
||||
r#"
|
||||
prefix_rule(
|
||||
pattern = ["git", "push"],
|
||||
decision = "forbidden",
|
||||
)
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let output = Command::cargo_bin("codex")?
|
||||
.env("CODEX_HOME", codex_home.path())
|
||||
.args([
|
||||
"execpolicy",
|
||||
"check",
|
||||
"--policy",
|
||||
policy_path
|
||||
.to_str()
|
||||
.expect("policy path should be valid UTF-8"),
|
||||
"git",
|
||||
"push",
|
||||
"origin",
|
||||
"main",
|
||||
])
|
||||
.output()?;
|
||||
|
||||
assert!(output.status.success());
|
||||
let result: serde_json::Value = serde_json::from_slice(&output.stdout)?;
|
||||
assert_eq!(
|
||||
result,
|
||||
json!({
|
||||
"match": {
|
||||
"decision": "forbidden",
|
||||
"matchedRules": [
|
||||
{
|
||||
"prefixRuleMatch": {
|
||||
"matchedPrefix": ["git", "push"],
|
||||
"decision": "forbidden"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -15,13 +15,12 @@ pub fn create_config_summary_entries(config: &Config) -> Vec<(&'static str, Stri
|
||||
if config.model_provider.wire_api == WireApi::Responses
|
||||
&& config.model_family.supports_reasoning_summaries
|
||||
{
|
||||
entries.push((
|
||||
"reasoning effort",
|
||||
config
|
||||
.model_reasoning_effort
|
||||
.map(|effort| effort.to_string())
|
||||
.unwrap_or_else(|| "none".to_string()),
|
||||
));
|
||||
let reasoning_effort = config
|
||||
.model_reasoning_effort
|
||||
.or(config.model_family.default_reasoning_effort)
|
||||
.map(|effort| effort.to_string())
|
||||
.unwrap_or_else(|| "none".to_string());
|
||||
entries.push(("reasoning effort", reasoning_effort));
|
||||
entries.push((
|
||||
"reasoning summaries",
|
||||
config.model_reasoning_summary.to_string(),
|
||||
|
||||
@@ -19,9 +19,11 @@ async-trait = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
bytes = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
chardetng = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-async-utils = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-git = { workspace = true }
|
||||
codex-keyring-store = { workspace = true }
|
||||
@@ -31,11 +33,11 @@ codex-rmcp-client = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-utils-tokenizer = { workspace = true }
|
||||
codex-windows-sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
dirs = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
encoding_rs = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
|
||||
@@ -100,7 +100,7 @@ pub fn extract_bash_command(command: &[String]) -> Option<(&str, &str)> {
|
||||
if !matches!(flag.as_str(), "-lc" | "-c")
|
||||
|| !matches!(
|
||||
detect_shell_type(&PathBuf::from(shell)),
|
||||
Some(ShellType::Zsh) | Some(ShellType::Bash)
|
||||
Some(ShellType::Zsh) | Some(ShellType::Bash) | Some(ShellType::Sh)
|
||||
)
|
||||
{
|
||||
return None;
|
||||
|
||||
@@ -66,7 +66,6 @@ use crate::context_manager::ContextManager;
|
||||
use crate::environment_context::EnvironmentContext;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result as CodexResult;
|
||||
use crate::error::http_status_code_value;
|
||||
#[cfg(test)]
|
||||
use crate::exec::StreamOutput;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
@@ -121,6 +120,7 @@ use crate::user_instructions::UserInstructions;
|
||||
use crate::user_notification::UserNotification;
|
||||
use crate::util::backoff;
|
||||
use codex_async_utils::OrCancelExt;
|
||||
use codex_execpolicy::Policy as ExecPolicy;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
@@ -128,12 +128,11 @@ use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::CodexErrorInfo;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use codex_utils_readiness::Readiness;
|
||||
use codex_utils_readiness::ReadinessFlag;
|
||||
use codex_utils_tokenizer::warm_model_cache;
|
||||
use reqwest::StatusCode;
|
||||
|
||||
/// The high-level interface to the Codex system.
|
||||
/// It operates as a queue pair where you send submissions and receive events.
|
||||
@@ -167,6 +166,10 @@ impl Codex {
|
||||
|
||||
let user_instructions = get_user_instructions(&config).await;
|
||||
|
||||
let exec_policy = crate::exec_policy::exec_policy_for(&config.features, &config.codex_home)
|
||||
.await
|
||||
.map_err(|err| CodexErr::Fatal(format!("failed to load execpolicy: {err}")))?;
|
||||
|
||||
let config = Arc::new(config);
|
||||
|
||||
let session_configuration = SessionConfiguration {
|
||||
@@ -183,6 +186,7 @@ impl Codex {
|
||||
cwd: config.cwd.clone(),
|
||||
original_config_do_not_use: Arc::clone(&config),
|
||||
features: config.features.clone(),
|
||||
exec_policy,
|
||||
session_source,
|
||||
};
|
||||
|
||||
@@ -280,6 +284,7 @@ pub(crate) struct TurnContext {
|
||||
pub(crate) final_output_json_schema: Option<Value>,
|
||||
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub(crate) tool_call_gate: Arc<ReadinessFlag>,
|
||||
pub(crate) exec_policy: Arc<ExecPolicy>,
|
||||
pub(crate) truncation_policy: TruncationPolicy,
|
||||
}
|
||||
|
||||
@@ -336,6 +341,8 @@ pub(crate) struct SessionConfiguration {
|
||||
|
||||
/// Set of feature flags for this session
|
||||
features: Features,
|
||||
/// Execpolicy policy, applied only when enabled by feature flag.
|
||||
exec_policy: Arc<ExecPolicy>,
|
||||
|
||||
// TODO(pakrym): Remove config from here
|
||||
original_config_do_not_use: Arc<Config>,
|
||||
@@ -436,6 +443,7 @@ impl Session {
|
||||
final_output_json_schema: None,
|
||||
codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(),
|
||||
tool_call_gate: Arc::new(ReadinessFlag::new()),
|
||||
exec_policy: session_configuration.exec_policy.clone(),
|
||||
truncation_policy: TruncationPolicy::new(&per_turn_config),
|
||||
}
|
||||
}
|
||||
@@ -484,7 +492,7 @@ impl Session {
|
||||
// - load history metadata
|
||||
let rollout_fut = RolloutRecorder::new(&config, rollout_params);
|
||||
|
||||
let default_shell_fut = shell::default_user_shell();
|
||||
let default_shell = shell::default_user_shell();
|
||||
let history_meta_fut = crate::message_history::history_metadata(&config);
|
||||
let auth_statuses_fut = compute_auth_statuses(
|
||||
config.mcp_servers.iter(),
|
||||
@@ -492,12 +500,8 @@ impl Session {
|
||||
);
|
||||
|
||||
// Join all independent futures.
|
||||
let (rollout_recorder, default_shell, (history_log_id, history_entry_count), auth_statuses) = tokio::join!(
|
||||
rollout_fut,
|
||||
default_shell_fut,
|
||||
history_meta_fut,
|
||||
auth_statuses_fut
|
||||
);
|
||||
let (rollout_recorder, (history_log_id, history_entry_count), auth_statuses) =
|
||||
tokio::join!(rollout_fut, history_meta_fut, auth_statuses_fut);
|
||||
|
||||
let rollout_recorder = rollout_recorder.map_err(|e| {
|
||||
error!("failed to initialize rollout recorder: {e:#}");
|
||||
@@ -539,7 +543,6 @@ impl Session {
|
||||
config.model_reasoning_effort,
|
||||
config.model_reasoning_summary,
|
||||
config.model_context_window,
|
||||
config.model_max_output_tokens,
|
||||
config.model_auto_compact_token_limit,
|
||||
config.approval_policy,
|
||||
config.sandbox_policy.clone(),
|
||||
@@ -550,9 +553,6 @@ impl Session {
|
||||
// Create the mutable state for the Session.
|
||||
let state = SessionState::new(session_configuration.clone());
|
||||
|
||||
// Warm the tokenizer cache for the session model without blocking startup.
|
||||
warm_model_cache(&session_configuration.model);
|
||||
|
||||
let services = SessionServices {
|
||||
mcp_connection_manager: Arc::new(RwLock::new(McpConnectionManager::default())),
|
||||
mcp_startup_cancellation_token: CancellationToken::new(),
|
||||
@@ -910,6 +910,7 @@ impl Session {
|
||||
|
||||
let event = EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id,
|
||||
turn_id: turn_context.sub_id.clone(),
|
||||
changes,
|
||||
reason,
|
||||
grant_root,
|
||||
@@ -1047,7 +1048,7 @@ impl Session {
|
||||
Some(turn_context.cwd.clone()),
|
||||
Some(turn_context.approval_policy),
|
||||
Some(turn_context.sandbox_policy.clone()),
|
||||
Some(self.user_shell().clone()),
|
||||
self.user_shell().clone(),
|
||||
)));
|
||||
items
|
||||
}
|
||||
@@ -1187,11 +1188,14 @@ impl Session {
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
message: impl Into<String>,
|
||||
http_status_code: Option<StatusCode>,
|
||||
codex_error: CodexErr,
|
||||
) {
|
||||
let codex_error_info = CodexErrorInfo::ResponseStreamDisconnected {
|
||||
http_status_code: codex_error.http_status_code_value(),
|
||||
};
|
||||
let event = EventMsg::StreamError(StreamErrorEvent {
|
||||
message: message.into(),
|
||||
http_status_code: http_status_code_value(http_status_code),
|
||||
codex_error_info: Some(codex_error_info),
|
||||
});
|
||||
self.send_event(turn_context, event).await;
|
||||
}
|
||||
@@ -1437,6 +1441,7 @@ mod handlers {
|
||||
use crate::tasks::UndoTask;
|
||||
use crate::tasks::UserShellCommandTask;
|
||||
use codex_protocol::custom_prompts::CustomPrompt;
|
||||
use codex_protocol::protocol::CodexErrorInfo;
|
||||
use codex_protocol::protocol::ErrorEvent;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
@@ -1683,7 +1688,7 @@ mod handlers {
|
||||
id: sub_id.clone(),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: "Failed to shutdown rollout recorder".to_string(),
|
||||
http_status_code: None,
|
||||
codex_error_info: Some(CodexErrorInfo::Other),
|
||||
}),
|
||||
};
|
||||
sess.send_event_raw(event).await;
|
||||
@@ -1789,6 +1794,7 @@ async fn spawn_review_thread(
|
||||
final_output_json_schema: None,
|
||||
codex_linux_sandbox_exe: parent_turn_context.codex_linux_sandbox_exe.clone(),
|
||||
tool_call_gate: Arc::new(ReadinessFlag::new()),
|
||||
exec_policy: parent_turn_context.exec_policy.clone(),
|
||||
truncation_policy: TruncationPolicy::new(&per_turn_config),
|
||||
};
|
||||
|
||||
@@ -1937,8 +1943,8 @@ pub(crate) async fn run_task(
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Turn error: {e:#}");
|
||||
sess.send_event(&turn_context, EventMsg::Error(e.to_error_event(None)))
|
||||
.await;
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
// let the user continue the conversation
|
||||
break;
|
||||
}
|
||||
@@ -2062,7 +2068,7 @@ async fn run_turn(
|
||||
sess.notify_stream_error(
|
||||
&turn_context,
|
||||
format!("Reconnecting... {retries}/{max_retries}"),
|
||||
e.http_status_code(),
|
||||
e,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -2381,6 +2387,7 @@ mod tests {
|
||||
use crate::config::ConfigOverrides;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::shell::default_user_shell;
|
||||
use crate::tools::format_exec_output_str;
|
||||
|
||||
use crate::protocol::CompactedItem;
|
||||
@@ -2608,6 +2615,7 @@ mod tests {
|
||||
cwd: config.cwd.clone(),
|
||||
original_config_do_not_use: Arc::clone(&config),
|
||||
features: Features::default(),
|
||||
exec_policy: Arc::new(ExecPolicy::empty()),
|
||||
session_source: SessionSource::Exec,
|
||||
};
|
||||
|
||||
@@ -2619,7 +2627,7 @@ mod tests {
|
||||
unified_exec_manager: UnifiedExecSessionManager::default(),
|
||||
notifier: UserNotifier::new(None),
|
||||
rollout: Mutex::new(None),
|
||||
user_shell: shell::Shell::Unknown,
|
||||
user_shell: default_user_shell(),
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
auth_manager: Arc::clone(&auth_manager),
|
||||
otel_event_manager: otel_event_manager.clone(),
|
||||
@@ -2685,6 +2693,7 @@ mod tests {
|
||||
cwd: config.cwd.clone(),
|
||||
original_config_do_not_use: Arc::clone(&config),
|
||||
features: Features::default(),
|
||||
exec_policy: Arc::new(ExecPolicy::empty()),
|
||||
session_source: SessionSource::Exec,
|
||||
};
|
||||
|
||||
@@ -2696,7 +2705,7 @@ mod tests {
|
||||
unified_exec_manager: UnifiedExecSessionManager::default(),
|
||||
notifier: UserNotifier::new(None),
|
||||
rollout: Mutex::new(None),
|
||||
user_shell: shell::Shell::Unknown,
|
||||
user_shell: default_user_shell(),
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
auth_manager: Arc::clone(&auth_manager),
|
||||
otel_event_manager: otel_event_manager.clone(),
|
||||
@@ -3041,6 +3050,7 @@ mod tests {
|
||||
let session = Arc::new(session);
|
||||
let mut turn_context = Arc::new(turn_context_raw);
|
||||
|
||||
let timeout_ms = 1000;
|
||||
let params = ExecParams {
|
||||
command: if cfg!(windows) {
|
||||
vec![
|
||||
@@ -3056,7 +3066,7 @@ mod tests {
|
||||
]
|
||||
},
|
||||
cwd: turn_context.cwd.clone(),
|
||||
timeout_ms: Some(1000),
|
||||
expiration: timeout_ms.into(),
|
||||
env: HashMap::new(),
|
||||
with_escalated_permissions: Some(true),
|
||||
justification: Some("test".to_string()),
|
||||
@@ -3065,7 +3075,12 @@ mod tests {
|
||||
|
||||
let params2 = ExecParams {
|
||||
with_escalated_permissions: Some(false),
|
||||
..params.clone()
|
||||
command: params.command.clone(),
|
||||
cwd: params.cwd.clone(),
|
||||
expiration: timeout_ms.into(),
|
||||
env: HashMap::new(),
|
||||
justification: params.justification.clone(),
|
||||
arg0: None,
|
||||
};
|
||||
|
||||
let turn_diff_tracker = Arc::new(tokio::sync::Mutex::new(TurnDiffTracker::new()));
|
||||
@@ -3085,7 +3100,7 @@ mod tests {
|
||||
arguments: serde_json::json!({
|
||||
"command": params.command.clone(),
|
||||
"workdir": Some(turn_context.cwd.to_string_lossy().to_string()),
|
||||
"timeout_ms": params.timeout_ms,
|
||||
"timeout_ms": params.expiration.timeout_ms(),
|
||||
"with_escalated_permissions": params.with_escalated_permissions,
|
||||
"justification": params.justification.clone(),
|
||||
})
|
||||
@@ -3122,7 +3137,7 @@ mod tests {
|
||||
arguments: serde_json::json!({
|
||||
"command": params2.command.clone(),
|
||||
"workdir": Some(turn_context.cwd.to_string_lossy().to_string()),
|
||||
"timeout_ms": params2.timeout_ms,
|
||||
"timeout_ms": params2.expiration.timeout_ms(),
|
||||
"with_escalated_permissions": params2.with_escalated_permissions,
|
||||
"justification": params2.justification.clone(),
|
||||
})
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
|
||||
use crate::sandboxing::SandboxPermissions;
|
||||
|
||||
use crate::bash::parse_shell_lc_plain_commands;
|
||||
use crate::is_safe_command::is_known_safe_command;
|
||||
|
||||
@@ -8,7 +10,7 @@ pub fn requires_initial_appoval(
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
command: &[String],
|
||||
with_escalated_permissions: bool,
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
) -> bool {
|
||||
if is_known_safe_command(command) {
|
||||
return false;
|
||||
@@ -24,8 +26,7 @@ pub fn requires_initial_appoval(
|
||||
// In restricted sandboxes (ReadOnly/WorkspaceWrite), do not prompt for
|
||||
// non‑escalated, non‑dangerous commands — let the sandbox enforce
|
||||
// restrictions (e.g., block network/write) without a user prompt.
|
||||
let wants_escalation: bool = with_escalated_permissions;
|
||||
if wants_escalation {
|
||||
if sandbox_permissions.requires_escalated_permissions() {
|
||||
return true;
|
||||
}
|
||||
command_might_be_dangerous(command)
|
||||
|
||||
@@ -267,6 +267,20 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn windows_powershell_full_path_is_safe() {
|
||||
if !cfg!(windows) {
|
||||
// Windows only because on Linux path splitting doesn't handle `/` separators properly
|
||||
return;
|
||||
}
|
||||
|
||||
assert!(is_known_safe_command(&vec_str(&[
|
||||
r"C:\Program Files\PowerShell\7\pwsh.exe",
|
||||
"-Command",
|
||||
"Get-Location",
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bash_lc_safe_examples() {
|
||||
assert!(is_known_safe_command(&vec_str(&["bash", "-lc", "ls"])));
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use shlex::split as shlex_split;
|
||||
use std::path::Path;
|
||||
|
||||
/// On Windows, we conservatively allow only clearly read-only PowerShell invocations
|
||||
/// that match a small safelist. Anything else (including direct CMD commands) is unsafe.
|
||||
@@ -131,8 +132,14 @@ fn split_into_commands(tokens: Vec<String>) -> Option<Vec<Vec<String>>> {
|
||||
|
||||
/// Returns true when the executable name is one of the supported PowerShell binaries.
|
||||
fn is_powershell_executable(exe: &str) -> bool {
|
||||
let executable_name = Path::new(exe)
|
||||
.file_name()
|
||||
.and_then(|osstr| osstr.to_str())
|
||||
.unwrap_or(exe)
|
||||
.to_ascii_lowercase();
|
||||
|
||||
matches!(
|
||||
exe.to_ascii_lowercase().as_str(),
|
||||
executable_name.as_str(),
|
||||
"powershell" | "powershell.exe" | "pwsh" | "pwsh.exe"
|
||||
)
|
||||
}
|
||||
@@ -313,6 +320,27 @@ mod tests {
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accepts_full_path_powershell_invocations() {
|
||||
if !cfg!(windows) {
|
||||
// Windows only because on Linux path splitting doesn't handle `/` separators properly
|
||||
return;
|
||||
}
|
||||
|
||||
assert!(is_safe_command_windows(&vec_str(&[
|
||||
r"C:\Program Files\PowerShell\7\pwsh.exe",
|
||||
"-NoProfile",
|
||||
"-Command",
|
||||
"Get-ChildItem -Path .",
|
||||
])));
|
||||
|
||||
assert!(is_safe_command_windows(&vec_str(&[
|
||||
r"C:\Windows\System32\WindowsPowerShell\v1.0\powershell.exe",
|
||||
"-Command",
|
||||
"Get-Content Cargo.toml",
|
||||
])));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allows_read_only_pipelines_and_git_usage() {
|
||||
assert!(is_safe_command_windows(&vec_str(&[
|
||||
|
||||
@@ -127,8 +127,8 @@ async fn run_compact_task_inner(
|
||||
continue;
|
||||
}
|
||||
sess.set_total_tokens_full(turn_context.as_ref()).await;
|
||||
sess.send_event(&turn_context, EventMsg::Error(e.to_error_event(None)))
|
||||
.await;
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -138,14 +138,14 @@ async fn run_compact_task_inner(
|
||||
sess.notify_stream_error(
|
||||
turn_context.as_ref(),
|
||||
format!("Reconnecting... {retries}/{max_retries}"),
|
||||
e.http_status_code(),
|
||||
e,
|
||||
)
|
||||
.await;
|
||||
tokio::time::sleep(delay).await;
|
||||
continue;
|
||||
} else {
|
||||
sess.send_event(&turn_context, EventMsg::Error(e.to_error_event(None)))
|
||||
.await;
|
||||
let event = EventMsg::Error(e.to_error_event(None));
|
||||
sess.send_event(&turn_context, event).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,8 +29,10 @@ pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Ar
|
||||
|
||||
async fn run_remote_compact_task_inner(sess: &Arc<Session>, turn_context: &Arc<TurnContext>) {
|
||||
if let Err(err) = run_remote_compact_task_inner_impl(sess, turn_context).await {
|
||||
let event = err.to_error_event(Some("Error running remote compact task".to_string()));
|
||||
sess.send_event(turn_context, EventMsg::Error(event)).await;
|
||||
let event = EventMsg::Error(
|
||||
err.to_error_event(Some("Error running remote compact task".to_string())),
|
||||
);
|
||||
sess.send_event(turn_context, event).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ use crate::config::types::Notice;
|
||||
use anyhow::Context;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::TrustLevel;
|
||||
use codex_utils_tokenizer::warm_model_cache;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
@@ -231,9 +230,6 @@ impl ConfigDocument {
|
||||
fn apply(&mut self, edit: &ConfigEdit) -> anyhow::Result<bool> {
|
||||
match edit {
|
||||
ConfigEdit::SetModel { model, effort } => Ok({
|
||||
if let Some(model) = &model {
|
||||
warm_model_cache(model)
|
||||
}
|
||||
let mut mutated = false;
|
||||
mutated |= self.write_profile_value(
|
||||
&["model"],
|
||||
|
||||
@@ -86,9 +86,6 @@ pub struct Config {
|
||||
/// Size of the context window for the model, in tokens.
|
||||
pub model_context_window: Option<i64>,
|
||||
|
||||
/// Maximum number of output tokens.
|
||||
pub model_max_output_tokens: Option<i64>,
|
||||
|
||||
/// Token usage threshold triggering auto-compaction of conversation history.
|
||||
pub model_auto_compact_token_limit: Option<i64>,
|
||||
|
||||
@@ -160,6 +157,9 @@ pub struct Config {
|
||||
/// and turn completions when not focused.
|
||||
pub tui_notifications: Notifications,
|
||||
|
||||
/// Enable ASCII animations and shimmer effects in the TUI.
|
||||
pub animations: bool,
|
||||
|
||||
/// The directory that should be treated as the current working directory
|
||||
/// for the session. All relative paths inside the business-logic layer are
|
||||
/// resolved against this path.
|
||||
@@ -567,9 +567,6 @@ pub struct ConfigToml {
|
||||
/// Size of the context window for the model, in tokens.
|
||||
pub model_context_window: Option<i64>,
|
||||
|
||||
/// Maximum number of output tokens.
|
||||
pub model_max_output_tokens: Option<i64>,
|
||||
|
||||
/// Token usage threshold triggering auto-compaction of conversation history.
|
||||
pub model_auto_compact_token_limit: Option<i64>,
|
||||
|
||||
@@ -1119,11 +1116,6 @@ impl Config {
|
||||
let model_context_window = cfg
|
||||
.model_context_window
|
||||
.or_else(|| openai_model_info.as_ref().map(|info| info.context_window));
|
||||
let model_max_output_tokens = cfg.model_max_output_tokens.or_else(|| {
|
||||
openai_model_info
|
||||
.as_ref()
|
||||
.map(|info| info.max_output_tokens)
|
||||
});
|
||||
let model_auto_compact_token_limit = cfg.model_auto_compact_token_limit.or_else(|| {
|
||||
openai_model_info
|
||||
.as_ref()
|
||||
@@ -1175,7 +1167,6 @@ impl Config {
|
||||
review_model,
|
||||
model_family,
|
||||
model_context_window,
|
||||
model_max_output_tokens,
|
||||
model_auto_compact_token_limit,
|
||||
model_provider_id,
|
||||
model_provider,
|
||||
@@ -1253,6 +1244,7 @@ impl Config {
|
||||
.as_ref()
|
||||
.map(|t| t.notifications.clone())
|
||||
.unwrap_or_default(),
|
||||
animations: cfg.tui.as_ref().map(|t| t.animations).unwrap_or(true),
|
||||
otel: {
|
||||
let t: OtelConfigToml = cfg.otel.unwrap_or_default();
|
||||
let log_user_prompt = t.log_user_prompt.unwrap_or(false);
|
||||
@@ -2957,7 +2949,6 @@ model_verbosity = "high"
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_family: find_family_for_model("o3").expect("known model slug"),
|
||||
model_context_window: Some(200_000),
|
||||
model_max_output_tokens: Some(100_000),
|
||||
model_auto_compact_token_limit: Some(180_000),
|
||||
model_provider_id: "openai".to_string(),
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
@@ -3003,6 +2994,7 @@ model_verbosity = "high"
|
||||
notices: Default::default(),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
otel: OtelConfig::default(),
|
||||
},
|
||||
o3_profile_config
|
||||
@@ -3029,7 +3021,6 @@ model_verbosity = "high"
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_family: find_family_for_model("gpt-3.5-turbo").expect("known model slug"),
|
||||
model_context_window: Some(16_385),
|
||||
model_max_output_tokens: Some(4_096),
|
||||
model_auto_compact_token_limit: Some(14_746),
|
||||
model_provider_id: "openai-chat-completions".to_string(),
|
||||
model_provider: fixture.openai_chat_completions_provider.clone(),
|
||||
@@ -3075,6 +3066,7 @@ model_verbosity = "high"
|
||||
notices: Default::default(),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
@@ -3116,7 +3108,6 @@ model_verbosity = "high"
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_family: find_family_for_model("o3").expect("known model slug"),
|
||||
model_context_window: Some(200_000),
|
||||
model_max_output_tokens: Some(100_000),
|
||||
model_auto_compact_token_limit: Some(180_000),
|
||||
model_provider_id: "openai".to_string(),
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
@@ -3162,6 +3153,7 @@ model_verbosity = "high"
|
||||
notices: Default::default(),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
@@ -3189,7 +3181,6 @@ model_verbosity = "high"
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_family: find_family_for_model("gpt-5.1").expect("known model slug"),
|
||||
model_context_window: Some(272_000),
|
||||
model_max_output_tokens: Some(128_000),
|
||||
model_auto_compact_token_limit: Some(244_800),
|
||||
model_provider_id: "openai".to_string(),
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
@@ -3235,6 +3226,7 @@ model_verbosity = "high"
|
||||
notices: Default::default(),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
|
||||
@@ -363,6 +363,15 @@ pub struct Tui {
|
||||
/// Defaults to `true`.
|
||||
#[serde(default)]
|
||||
pub notifications: Notifications,
|
||||
|
||||
/// Enable animations (welcome screen, shimmer effects, spinners).
|
||||
/// Defaults to `true`.
|
||||
#[serde(default = "default_true")]
|
||||
pub animations: bool,
|
||||
}
|
||||
|
||||
const fn default_true() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Settings for notices we display to users via the tui and app-server clients
|
||||
@@ -379,6 +388,7 @@ pub struct Notice {
|
||||
/// Tracks whether the user has seen the model migration prompt
|
||||
pub hide_gpt5_1_migration_prompt: Option<bool>,
|
||||
/// Tracks whether the user has seen the gpt-5.1-codex-max migration prompt
|
||||
#[serde(rename = "hide_gpt-5.1-codex-max_migration_prompt")]
|
||||
pub hide_gpt_5_1_codex_max_migration_prompt: Option<bool>,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use crate::codex::TurnContext;
|
||||
use crate::context_manager::normalize;
|
||||
use crate::truncate::TruncationPolicy;
|
||||
use crate::truncate::approx_token_count;
|
||||
use crate::truncate::truncate_function_output_items_with_policy;
|
||||
use crate::truncate::truncate_text;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::TokenUsage;
|
||||
use codex_protocol::protocol::TokenUsageInfo;
|
||||
use codex_utils_tokenizer::Tokenizer;
|
||||
use std::ops::Deref;
|
||||
|
||||
/// Transcript of conversation history
|
||||
@@ -74,26 +74,21 @@ impl ContextManager {
|
||||
history
|
||||
}
|
||||
|
||||
// Estimate the number of tokens in the history. Return None if no tokenizer
|
||||
// is available. This does not consider the reasoning traces.
|
||||
// /!\ The value is a lower bound estimate and does not represent the exact
|
||||
// context length.
|
||||
// Estimate token usage using byte-based heuristics from the truncation helpers.
|
||||
// This is a coarse lower bound, not a tokenizer-accurate count.
|
||||
pub(crate) fn estimate_token_count(&self, turn_context: &TurnContext) -> Option<i64> {
|
||||
let model = turn_context.client.get_model();
|
||||
let tokenizer = Tokenizer::for_model(model.as_str()).ok()?;
|
||||
let model_family = turn_context.client.get_model_family();
|
||||
let base_tokens =
|
||||
i64::try_from(approx_token_count(model_family.base_instructions.as_str()))
|
||||
.unwrap_or(i64::MAX);
|
||||
|
||||
Some(
|
||||
self.items
|
||||
.iter()
|
||||
.map(|item| {
|
||||
serde_json::to_string(&item)
|
||||
.map(|item| tokenizer.count(&item))
|
||||
.unwrap_or_default()
|
||||
})
|
||||
.sum::<i64>()
|
||||
+ tokenizer.count(model_family.base_instructions.as_str()),
|
||||
)
|
||||
let items_tokens = self.items.iter().fold(0i64, |acc, item| {
|
||||
let serialized = serde_json::to_string(item).unwrap_or_default();
|
||||
let item_tokens = i64::try_from(approx_token_count(&serialized)).unwrap_or(i64::MAX);
|
||||
acc.saturating_add(item_tokens)
|
||||
});
|
||||
|
||||
Some(base_tokens.saturating_add(items_tokens))
|
||||
}
|
||||
|
||||
pub(crate) fn remove_first_item(&mut self) {
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::codex::TurnContext;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::shell::Shell;
|
||||
use crate::shell::default_user_shell;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -28,7 +29,7 @@ pub(crate) struct EnvironmentContext {
|
||||
pub sandbox_mode: Option<SandboxMode>,
|
||||
pub network_access: Option<NetworkAccess>,
|
||||
pub writable_roots: Option<Vec<PathBuf>>,
|
||||
pub shell: Option<Shell>,
|
||||
pub shell: Shell,
|
||||
}
|
||||
|
||||
impl EnvironmentContext {
|
||||
@@ -36,7 +37,7 @@ impl EnvironmentContext {
|
||||
cwd: Option<PathBuf>,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
shell: Option<Shell>,
|
||||
shell: Shell,
|
||||
) -> Self {
|
||||
Self {
|
||||
cwd,
|
||||
@@ -110,7 +111,7 @@ impl EnvironmentContext {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
EnvironmentContext::new(cwd, approval_policy, sandbox_policy, None)
|
||||
EnvironmentContext::new(cwd, approval_policy, sandbox_policy, default_user_shell())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,7 +122,7 @@ impl From<&TurnContext> for EnvironmentContext {
|
||||
Some(turn_context.approval_policy),
|
||||
Some(turn_context.sandbox_policy.clone()),
|
||||
// Shell is not configurable from turn to turn
|
||||
None,
|
||||
default_user_shell(),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -169,11 +170,9 @@ impl EnvironmentContext {
|
||||
}
|
||||
lines.push(" </writable_roots>".to_string());
|
||||
}
|
||||
if let Some(shell) = self.shell
|
||||
&& let Some(shell_name) = shell.name()
|
||||
{
|
||||
lines.push(format!(" <shell>{shell_name}</shell>"));
|
||||
}
|
||||
|
||||
let shell_name = self.shell.name();
|
||||
lines.push(format!(" <shell>{shell_name}</shell>"));
|
||||
lines.push(ENVIRONMENT_CONTEXT_CLOSE_TAG.to_string());
|
||||
lines.join("\n")
|
||||
}
|
||||
@@ -193,12 +192,18 @@ impl From<EnvironmentContext> for ResponseItem {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::shell::BashShell;
|
||||
use crate::shell::ZshShell;
|
||||
use crate::shell::ShellType;
|
||||
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn fake_shell() -> Shell {
|
||||
Shell {
|
||||
shell_type: ShellType::Bash,
|
||||
shell_path: PathBuf::from("/bin/bash"),
|
||||
}
|
||||
}
|
||||
|
||||
fn workspace_write_policy(writable_roots: Vec<&str>, network_access: bool) -> SandboxPolicy {
|
||||
SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: writable_roots.into_iter().map(PathBuf::from).collect(),
|
||||
@@ -214,7 +219,7 @@ mod tests {
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo", "/tmp"], false)),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
|
||||
let expected = r#"<environment_context>
|
||||
@@ -226,6 +231,7 @@ mod tests {
|
||||
<root>/repo</root>
|
||||
<root>/tmp</root>
|
||||
</writable_roots>
|
||||
<shell>bash</shell>
|
||||
</environment_context>"#;
|
||||
|
||||
assert_eq!(context.serialize_to_xml(), expected);
|
||||
@@ -237,13 +243,14 @@ mod tests {
|
||||
None,
|
||||
Some(AskForApproval::Never),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
|
||||
let expected = r#"<environment_context>
|
||||
<approval_policy>never</approval_policy>
|
||||
<sandbox_mode>read-only</sandbox_mode>
|
||||
<network_access>restricted</network_access>
|
||||
<shell>bash</shell>
|
||||
</environment_context>"#;
|
||||
|
||||
assert_eq!(context.serialize_to_xml(), expected);
|
||||
@@ -255,13 +262,14 @@ mod tests {
|
||||
None,
|
||||
Some(AskForApproval::OnFailure),
|
||||
Some(SandboxPolicy::DangerFullAccess),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
|
||||
let expected = r#"<environment_context>
|
||||
<approval_policy>on-failure</approval_policy>
|
||||
<sandbox_mode>danger-full-access</sandbox_mode>
|
||||
<network_access>enabled</network_access>
|
||||
<shell>bash</shell>
|
||||
</environment_context>"#;
|
||||
|
||||
assert_eq!(context.serialize_to_xml(), expected);
|
||||
@@ -274,13 +282,13 @@ mod tests {
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo"], false)),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::Never),
|
||||
Some(workspace_write_policy(vec!["/repo"], true)),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
assert!(!context1.equals_except_shell(&context2));
|
||||
}
|
||||
@@ -291,13 +299,13 @@ mod tests {
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::new_read_only_policy()),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::new_workspace_write_policy()),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
|
||||
assert!(!context1.equals_except_shell(&context2));
|
||||
@@ -309,13 +317,13 @@ mod tests {
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo", "/tmp", "/var"], false)),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo", "/tmp"], true)),
|
||||
None,
|
||||
fake_shell(),
|
||||
);
|
||||
|
||||
assert!(!context1.equals_except_shell(&context2));
|
||||
@@ -327,17 +335,19 @@ mod tests {
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo"], false)),
|
||||
Some(Shell::Bash(BashShell {
|
||||
Shell {
|
||||
shell_type: ShellType::Bash,
|
||||
shell_path: "/bin/bash".into(),
|
||||
})),
|
||||
},
|
||||
);
|
||||
let context2 = EnvironmentContext::new(
|
||||
Some(PathBuf::from("/repo")),
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(workspace_write_policy(vec!["/repo"], false)),
|
||||
Some(Shell::Zsh(ZshShell {
|
||||
Shell {
|
||||
shell_type: ShellType::Zsh,
|
||||
shell_path: "/bin/zsh".into(),
|
||||
})),
|
||||
},
|
||||
);
|
||||
|
||||
assert!(context1.equals_except_shell(&context2));
|
||||
|
||||
@@ -10,6 +10,7 @@ use chrono::Local;
|
||||
use chrono::Utc;
|
||||
use codex_async_utils::CancelErr;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::CodexErrorInfo;
|
||||
use codex_protocol::protocol::ErrorEvent;
|
||||
use codex_protocol::protocol::RateLimitSnapshot;
|
||||
use reqwest::StatusCode;
|
||||
@@ -432,17 +433,31 @@ impl CodexErr {
|
||||
(self as &dyn std::any::Any).downcast_ref::<T>()
|
||||
}
|
||||
|
||||
pub fn http_status_code(&self) -> Option<StatusCode> {
|
||||
/// Translate core error to client-facing protocol error.
|
||||
pub fn to_codex_protocol_error(&self) -> CodexErrorInfo {
|
||||
match self {
|
||||
CodexErr::UnexpectedStatus(err) => Some(err.status),
|
||||
CodexErr::RetryLimit(err) => Some(err.status),
|
||||
CodexErr::UsageLimitReached(_) | CodexErr::UsageNotIncluded => {
|
||||
Some(StatusCode::TOO_MANY_REQUESTS)
|
||||
CodexErr::ContextWindowExceeded => CodexErrorInfo::ContextWindowExceeded,
|
||||
CodexErr::UsageLimitReached(_)
|
||||
| CodexErr::QuotaExceeded
|
||||
| CodexErr::UsageNotIncluded => CodexErrorInfo::UsageLimitExceeded,
|
||||
CodexErr::RetryLimit(_) => CodexErrorInfo::ResponseTooManyFailedAttempts {
|
||||
http_status_code: self.http_status_code_value(),
|
||||
},
|
||||
CodexErr::ConnectionFailed(_) => CodexErrorInfo::HttpConnectionFailed {
|
||||
http_status_code: self.http_status_code_value(),
|
||||
},
|
||||
CodexErr::ResponseStreamFailed(_) => CodexErrorInfo::ResponseStreamConnectionFailed {
|
||||
http_status_code: self.http_status_code_value(),
|
||||
},
|
||||
CodexErr::RefreshTokenFailed(_) => CodexErrorInfo::Unauthorized,
|
||||
CodexErr::SessionConfiguredNotFirstEvent
|
||||
| CodexErr::InternalServerError
|
||||
| CodexErr::InternalAgentDied => CodexErrorInfo::InternalServerError,
|
||||
CodexErr::UnsupportedOperation(_) | CodexErr::ConversationNotFound(_) => {
|
||||
CodexErrorInfo::BadRequest
|
||||
}
|
||||
CodexErr::InternalServerError => Some(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
CodexErr::ResponseStreamFailed(err) => err.source.status(),
|
||||
CodexErr::ConnectionFailed(err) => err.source.status(),
|
||||
_ => None,
|
||||
CodexErr::Sandbox(_) => CodexErrorInfo::SandboxError,
|
||||
_ => CodexErrorInfo::Other,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -452,16 +467,22 @@ impl CodexErr {
|
||||
Some(prefix) => format!("{prefix}: {error_message}"),
|
||||
None => error_message,
|
||||
};
|
||||
|
||||
ErrorEvent {
|
||||
message,
|
||||
http_status_code: http_status_code_value(self.http_status_code()),
|
||||
codex_error_info: Some(self.to_codex_protocol_error()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn http_status_code_value(http_status_code: Option<StatusCode>) -> Option<u16> {
|
||||
http_status_code.as_ref().map(StatusCode::as_u16)
|
||||
pub fn http_status_code_value(&self) -> Option<u16> {
|
||||
let http_status_code = match self {
|
||||
CodexErr::RetryLimit(err) => Some(err.status),
|
||||
CodexErr::UnexpectedStatus(err) => Some(err.status),
|
||||
CodexErr::ConnectionFailed(err) => err.source.status(),
|
||||
CodexErr::ResponseStreamFailed(err) => err.source.status(),
|
||||
_ => None,
|
||||
};
|
||||
http_status_code.as_ref().map(StatusCode::as_u16)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_error_message_ui(e: &CodexErr) -> String {
|
||||
@@ -510,6 +531,10 @@ mod tests {
|
||||
use chrono::Utc;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
use pretty_assertions::assert_eq;
|
||||
use reqwest::Response;
|
||||
use reqwest::ResponseBuilderExt;
|
||||
use reqwest::StatusCode;
|
||||
use reqwest::Url;
|
||||
|
||||
fn rate_limit_snapshot() -> RateLimitSnapshot {
|
||||
let primary_reset_at = Utc
|
||||
@@ -605,6 +630,33 @@ mod tests {
|
||||
assert_eq!(get_error_message_ui(&err), "stdout only");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_error_event_handles_response_stream_failed() {
|
||||
let response = http::Response::builder()
|
||||
.status(StatusCode::TOO_MANY_REQUESTS)
|
||||
.url(Url::parse("http://example.com").unwrap())
|
||||
.body("")
|
||||
.unwrap();
|
||||
let source = Response::from(response).error_for_status_ref().unwrap_err();
|
||||
let err = CodexErr::ResponseStreamFailed(ResponseStreamFailed {
|
||||
source,
|
||||
request_id: Some("req-123".to_string()),
|
||||
});
|
||||
|
||||
let event = err.to_error_event(Some("prefix".to_string()));
|
||||
|
||||
assert_eq!(
|
||||
event.message,
|
||||
"prefix: Error while reading the server response: HTTP status client error (429 Too Many Requests) for url (http://example.com/), request id: req-123"
|
||||
);
|
||||
assert_eq!(
|
||||
event.codex_error_info,
|
||||
Some(CodexErrorInfo::ResponseStreamConnectionFailed {
|
||||
http_status_code: Some(429)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sandbox_denied_reports_exit_code_when_no_output_available() {
|
||||
let output = ExecToolCallOutput {
|
||||
@@ -807,43 +859,4 @@ mod tests {
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_event_includes_http_status_code_when_available() {
|
||||
let err = CodexErr::UnexpectedStatus(UnexpectedResponseError {
|
||||
status: StatusCode::BAD_REQUEST,
|
||||
body: "oops".to_string(),
|
||||
request_id: Some("req-1".to_string()),
|
||||
});
|
||||
let event = err.to_error_event(None);
|
||||
|
||||
assert_eq!(
|
||||
event.message,
|
||||
"unexpected status 400 Bad Request: oops, request id: req-1"
|
||||
);
|
||||
assert_eq!(
|
||||
event.http_status_code,
|
||||
Some(StatusCode::BAD_REQUEST.as_u16())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_event_omits_http_status_code_when_unknown() {
|
||||
let event = CodexErr::Fatal("boom".to_string()).to_error_event(None);
|
||||
|
||||
assert_eq!(event.message, "Fatal error: boom");
|
||||
assert_eq!(event.http_status_code, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_event_applies_message_wrapper() {
|
||||
let event = CodexErr::Fatal("boom".to_string())
|
||||
.to_error_event(Some("Error running remote compact task".to_string()));
|
||||
|
||||
assert_eq!(
|
||||
event.message,
|
||||
"Error running remote compact task: Fatal error: boom"
|
||||
);
|
||||
assert_eq!(event.http_status_code, None);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ pub fn parse_turn_item(item: &ResponseItem) -> Option<TurnItem> {
|
||||
..
|
||||
} => Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query: query.clone(),
|
||||
query: query.clone().unwrap_or_default(),
|
||||
})),
|
||||
_ => None,
|
||||
}
|
||||
@@ -306,7 +306,7 @@ mod tests {
|
||||
id: Some("ws_1".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: WebSearchAction::Search {
|
||||
query: "weather".to_string(),
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ use tokio::io::AsyncRead;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::process::Child;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
@@ -28,8 +29,9 @@ use crate::sandboxing::ExecEnv;
|
||||
use crate::sandboxing::SandboxManager;
|
||||
use crate::spawn::StdioPolicy;
|
||||
use crate::spawn::spawn_child_async;
|
||||
use crate::text_encoding::bytes_to_string_smart;
|
||||
|
||||
const DEFAULT_TIMEOUT_MS: u64 = 10_000;
|
||||
pub const DEFAULT_EXEC_COMMAND_TIMEOUT_MS: u64 = 10_000;
|
||||
|
||||
// Hardcode these since it does not seem worth including the libc crate just
|
||||
// for these.
|
||||
@@ -46,20 +48,59 @@ const AGGREGATE_BUFFER_INITIAL_CAPACITY: usize = 8 * 1024; // 8 KiB
|
||||
/// Aggregation still collects full output; only the live event stream is capped.
|
||||
pub(crate) const MAX_EXEC_OUTPUT_DELTAS_PER_CALL: usize = 10_000;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug)]
|
||||
pub struct ExecParams {
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub expiration: ExecExpiration,
|
||||
pub env: HashMap<String, String>,
|
||||
pub with_escalated_permissions: Option<bool>,
|
||||
pub justification: Option<String>,
|
||||
pub arg0: Option<String>,
|
||||
}
|
||||
|
||||
impl ExecParams {
|
||||
pub fn timeout_duration(&self) -> Duration {
|
||||
Duration::from_millis(self.timeout_ms.unwrap_or(DEFAULT_TIMEOUT_MS))
|
||||
/// Mechanism to terminate an exec invocation before it finishes naturally.
|
||||
#[derive(Debug)]
|
||||
pub enum ExecExpiration {
|
||||
Timeout(Duration),
|
||||
DefaultTimeout,
|
||||
Cancellation(CancellationToken),
|
||||
}
|
||||
|
||||
impl From<Option<u64>> for ExecExpiration {
|
||||
fn from(timeout_ms: Option<u64>) -> Self {
|
||||
timeout_ms.map_or(ExecExpiration::DefaultTimeout, |timeout_ms| {
|
||||
ExecExpiration::Timeout(Duration::from_millis(timeout_ms))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for ExecExpiration {
|
||||
fn from(timeout_ms: u64) -> Self {
|
||||
ExecExpiration::Timeout(Duration::from_millis(timeout_ms))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExecExpiration {
|
||||
async fn wait(self) {
|
||||
match self {
|
||||
ExecExpiration::Timeout(duration) => tokio::time::sleep(duration).await,
|
||||
ExecExpiration::DefaultTimeout => {
|
||||
tokio::time::sleep(Duration::from_millis(DEFAULT_EXEC_COMMAND_TIMEOUT_MS)).await
|
||||
}
|
||||
ExecExpiration::Cancellation(cancel) => {
|
||||
cancel.cancelled().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// If ExecExpiration is a timeout, returns the timeout in milliseconds.
|
||||
pub(crate) fn timeout_ms(&self) -> Option<u64> {
|
||||
match self {
|
||||
ExecExpiration::Timeout(duration) => Some(duration.as_millis() as u64),
|
||||
ExecExpiration::DefaultTimeout => Some(DEFAULT_EXEC_COMMAND_TIMEOUT_MS),
|
||||
ExecExpiration::Cancellation(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,7 +136,7 @@ pub async fn process_exec_tool_call(
|
||||
let ExecParams {
|
||||
command,
|
||||
cwd,
|
||||
timeout_ms,
|
||||
expiration,
|
||||
env,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
@@ -114,7 +155,7 @@ pub async fn process_exec_tool_call(
|
||||
args: args.to_vec(),
|
||||
cwd,
|
||||
env,
|
||||
timeout_ms,
|
||||
expiration,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
};
|
||||
@@ -122,7 +163,7 @@ pub async fn process_exec_tool_call(
|
||||
let manager = SandboxManager::new();
|
||||
let exec_env = manager
|
||||
.transform(
|
||||
&spec,
|
||||
spec,
|
||||
sandbox_policy,
|
||||
sandbox_type,
|
||||
sandbox_cwd,
|
||||
@@ -131,7 +172,7 @@ pub async fn process_exec_tool_call(
|
||||
.map_err(CodexErr::from)?;
|
||||
|
||||
// Route through the sandboxing module for a single, unified execution path.
|
||||
crate::sandboxing::execute_env(&exec_env, sandbox_policy, stdout_stream).await
|
||||
crate::sandboxing::execute_env(exec_env, sandbox_policy, stdout_stream).await
|
||||
}
|
||||
|
||||
pub(crate) async fn execute_exec_env(
|
||||
@@ -143,7 +184,7 @@ pub(crate) async fn execute_exec_env(
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
timeout_ms,
|
||||
expiration,
|
||||
sandbox,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
@@ -153,7 +194,7 @@ pub(crate) async fn execute_exec_env(
|
||||
let params = ExecParams {
|
||||
command,
|
||||
cwd,
|
||||
timeout_ms,
|
||||
expiration,
|
||||
env,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
@@ -178,9 +219,12 @@ async fn exec_windows_sandbox(
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
timeout_ms,
|
||||
expiration,
|
||||
..
|
||||
} = params;
|
||||
// TODO(iceweasel-oai): run_windows_sandbox_capture should support all
|
||||
// variants of ExecExpiration, not just timeout.
|
||||
let timeout_ms = expiration.timeout_ms();
|
||||
|
||||
let policy_str = serde_json::to_string(sandbox_policy).map_err(|err| {
|
||||
CodexErr::Io(io::Error::other(format!(
|
||||
@@ -414,7 +458,7 @@ impl StreamOutput<String> {
|
||||
impl StreamOutput<Vec<u8>> {
|
||||
pub fn from_utf8_lossy(&self) -> StreamOutput<String> {
|
||||
StreamOutput {
|
||||
text: String::from_utf8_lossy(&self.text).to_string(),
|
||||
text: bytes_to_string_smart(&self.text),
|
||||
truncated_after_lines: self.truncated_after_lines,
|
||||
}
|
||||
}
|
||||
@@ -448,12 +492,12 @@ async fn exec(
|
||||
{
|
||||
return exec_windows_sandbox(params, sandbox_policy).await;
|
||||
}
|
||||
let timeout = params.timeout_duration();
|
||||
let ExecParams {
|
||||
command,
|
||||
cwd,
|
||||
env,
|
||||
arg0,
|
||||
expiration,
|
||||
..
|
||||
} = params;
|
||||
|
||||
@@ -474,14 +518,14 @@ async fn exec(
|
||||
env,
|
||||
)
|
||||
.await?;
|
||||
consume_truncated_output(child, timeout, stdout_stream).await
|
||||
consume_truncated_output(child, expiration, stdout_stream).await
|
||||
}
|
||||
|
||||
/// Consumes the output of a child process, truncating it so it is suitable for
|
||||
/// use as the output of a `shell` tool call. Also enforces specified timeout.
|
||||
async fn consume_truncated_output(
|
||||
mut child: Child,
|
||||
timeout: Duration,
|
||||
expiration: ExecExpiration,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> Result<RawExecToolCallOutput> {
|
||||
// Both stdout and stderr were configured with `Stdio::piped()`
|
||||
@@ -515,20 +559,14 @@ async fn consume_truncated_output(
|
||||
));
|
||||
|
||||
let (exit_status, timed_out) = tokio::select! {
|
||||
result = tokio::time::timeout(timeout, child.wait()) => {
|
||||
match result {
|
||||
Ok(status_result) => {
|
||||
let exit_status = status_result?;
|
||||
(exit_status, false)
|
||||
}
|
||||
Err(_) => {
|
||||
// timeout
|
||||
kill_child_process_group(&mut child)?;
|
||||
child.start_kill()?;
|
||||
// Debatable whether `child.wait().await` should be called here.
|
||||
(synthetic_exit_status(EXIT_CODE_SIGNAL_BASE + TIMEOUT_CODE), true)
|
||||
}
|
||||
}
|
||||
status_result = child.wait() => {
|
||||
let exit_status = status_result?;
|
||||
(exit_status, false)
|
||||
}
|
||||
_ = expiration.wait() => {
|
||||
kill_child_process_group(&mut child)?;
|
||||
child.start_kill()?;
|
||||
(synthetic_exit_status(EXIT_CODE_SIGNAL_BASE + TIMEOUT_CODE), true)
|
||||
}
|
||||
_ = tokio::signal::ctrl_c() => {
|
||||
kill_child_process_group(&mut child)?;
|
||||
@@ -780,6 +818,15 @@ mod tests {
|
||||
#[cfg(unix)]
|
||||
#[tokio::test]
|
||||
async fn kill_child_process_group_kills_grandchildren_on_timeout() -> Result<()> {
|
||||
// On Linux/macOS, /bin/bash is typically present; on FreeBSD/OpenBSD,
|
||||
// prefer /bin/sh to avoid NotFound errors.
|
||||
#[cfg(any(target_os = "freebsd", target_os = "openbsd"))]
|
||||
let command = vec![
|
||||
"/bin/sh".to_string(),
|
||||
"-c".to_string(),
|
||||
"sleep 60 & echo $!; sleep 60".to_string(),
|
||||
];
|
||||
#[cfg(all(unix, not(any(target_os = "freebsd", target_os = "openbsd"))))]
|
||||
let command = vec![
|
||||
"/bin/bash".to_string(),
|
||||
"-c".to_string(),
|
||||
@@ -789,7 +836,7 @@ mod tests {
|
||||
let params = ExecParams {
|
||||
command,
|
||||
cwd: std::env::current_dir()?,
|
||||
timeout_ms: Some(500),
|
||||
expiration: 500.into(),
|
||||
env,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
@@ -823,4 +870,62 @@ mod tests {
|
||||
assert!(killed, "grandchild process with pid {pid} is still alive");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn process_exec_tool_call_respects_cancellation_token() -> Result<()> {
|
||||
let command = long_running_command();
|
||||
let cwd = std::env::current_dir()?;
|
||||
let env: HashMap<String, String> = std::env::vars().collect();
|
||||
let cancel_token = CancellationToken::new();
|
||||
let cancel_tx = cancel_token.clone();
|
||||
let params = ExecParams {
|
||||
command,
|
||||
cwd: cwd.clone(),
|
||||
expiration: ExecExpiration::Cancellation(cancel_token),
|
||||
env,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
};
|
||||
tokio::spawn(async move {
|
||||
tokio::time::sleep(Duration::from_millis(1_000)).await;
|
||||
cancel_tx.cancel();
|
||||
});
|
||||
let result = process_exec_tool_call(
|
||||
params,
|
||||
SandboxType::None,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
cwd.as_path(),
|
||||
&None,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let output = match result {
|
||||
Err(CodexErr::Sandbox(SandboxErr::Timeout { output })) => output,
|
||||
other => panic!("expected timeout error, got {other:?}"),
|
||||
};
|
||||
assert!(output.timed_out);
|
||||
assert_eq!(output.exit_code, EXEC_TIMEOUT_EXIT_CODE);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn long_running_command() -> Vec<String> {
|
||||
vec![
|
||||
"/bin/sh".to_string(),
|
||||
"-c".to_string(),
|
||||
"sleep 30".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn long_running_command() -> Vec<String> {
|
||||
vec![
|
||||
"powershell.exe".to_string(),
|
||||
"-NonInteractive".to_string(),
|
||||
"-NoLogo".to_string(),
|
||||
"-Command".to_string(),
|
||||
"Start-Sleep -Seconds 30".to_string(),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
365
codex-rs/core/src/exec_policy.rs
Normal file
365
codex-rs/core/src/exec_policy.rs
Normal file
@@ -0,0 +1,365 @@
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::command_safety::is_dangerous_command::requires_initial_appoval;
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Evaluation;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_execpolicy::PolicyParser;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use thiserror::Error;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::bash::parse_shell_lc_plain_commands;
|
||||
use crate::features::Feature;
|
||||
use crate::features::Features;
|
||||
use crate::sandboxing::SandboxPermissions;
|
||||
use crate::tools::sandboxing::ApprovalRequirement;
|
||||
|
||||
const FORBIDDEN_REASON: &str = "execpolicy forbids this command";
|
||||
const PROMPT_REASON: &str = "execpolicy requires approval for this command";
|
||||
const POLICY_DIR_NAME: &str = "policy";
|
||||
const POLICY_EXTENSION: &str = "codexpolicy";
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ExecPolicyError {
|
||||
#[error("failed to read execpolicy files from {dir}: {source}")]
|
||||
ReadDir {
|
||||
dir: PathBuf,
|
||||
source: std::io::Error,
|
||||
},
|
||||
|
||||
#[error("failed to read execpolicy file {path}: {source}")]
|
||||
ReadFile {
|
||||
path: PathBuf,
|
||||
source: std::io::Error,
|
||||
},
|
||||
|
||||
#[error("failed to parse execpolicy file {path}: {source}")]
|
||||
ParsePolicy {
|
||||
path: String,
|
||||
source: codex_execpolicy::Error,
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) async fn exec_policy_for(
|
||||
features: &Features,
|
||||
codex_home: &Path,
|
||||
) -> Result<Arc<Policy>, ExecPolicyError> {
|
||||
if !features.enabled(Feature::ExecPolicy) {
|
||||
return Ok(Arc::new(Policy::empty()));
|
||||
}
|
||||
|
||||
let policy_dir = codex_home.join(POLICY_DIR_NAME);
|
||||
let policy_paths = collect_policy_files(&policy_dir).await?;
|
||||
|
||||
let mut parser = PolicyParser::new();
|
||||
for policy_path in &policy_paths {
|
||||
let contents =
|
||||
fs::read_to_string(policy_path)
|
||||
.await
|
||||
.map_err(|source| ExecPolicyError::ReadFile {
|
||||
path: policy_path.clone(),
|
||||
source,
|
||||
})?;
|
||||
let identifier = policy_path.to_string_lossy().to_string();
|
||||
parser
|
||||
.parse(&identifier, &contents)
|
||||
.map_err(|source| ExecPolicyError::ParsePolicy {
|
||||
path: identifier,
|
||||
source,
|
||||
})?;
|
||||
}
|
||||
|
||||
let policy = Arc::new(parser.build());
|
||||
tracing::debug!(
|
||||
"loaded execpolicy from {} files in {}",
|
||||
policy_paths.len(),
|
||||
policy_dir.display()
|
||||
);
|
||||
|
||||
Ok(policy)
|
||||
}
|
||||
|
||||
fn evaluate_with_policy(
|
||||
policy: &Policy,
|
||||
command: &[String],
|
||||
approval_policy: AskForApproval,
|
||||
) -> Option<ApprovalRequirement> {
|
||||
let commands = parse_shell_lc_plain_commands(command).unwrap_or_else(|| vec![command.to_vec()]);
|
||||
let evaluation = policy.check_multiple(commands.iter());
|
||||
|
||||
match evaluation {
|
||||
Evaluation::Match { decision, .. } => match decision {
|
||||
Decision::Forbidden => Some(ApprovalRequirement::Forbidden {
|
||||
reason: FORBIDDEN_REASON.to_string(),
|
||||
}),
|
||||
Decision::Prompt => {
|
||||
let reason = PROMPT_REASON.to_string();
|
||||
if matches!(approval_policy, AskForApproval::Never) {
|
||||
Some(ApprovalRequirement::Forbidden { reason })
|
||||
} else {
|
||||
Some(ApprovalRequirement::NeedsApproval {
|
||||
reason: Some(reason),
|
||||
})
|
||||
}
|
||||
}
|
||||
Decision::Allow => Some(ApprovalRequirement::Skip),
|
||||
},
|
||||
Evaluation::NoMatch { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn create_approval_requirement_for_command(
|
||||
policy: &Policy,
|
||||
command: &[String],
|
||||
approval_policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
sandbox_permissions: SandboxPermissions,
|
||||
) -> ApprovalRequirement {
|
||||
if let Some(requirement) = evaluate_with_policy(policy, command, approval_policy) {
|
||||
return requirement;
|
||||
}
|
||||
|
||||
if requires_initial_appoval(
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
command,
|
||||
sandbox_permissions,
|
||||
) {
|
||||
ApprovalRequirement::NeedsApproval { reason: None }
|
||||
} else {
|
||||
ApprovalRequirement::Skip
|
||||
}
|
||||
}
|
||||
|
||||
async fn collect_policy_files(dir: &Path) -> Result<Vec<PathBuf>, ExecPolicyError> {
|
||||
let mut read_dir = match fs::read_dir(dir).await {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => return Ok(Vec::new()),
|
||||
Err(source) => {
|
||||
return Err(ExecPolicyError::ReadDir {
|
||||
dir: dir.to_path_buf(),
|
||||
source,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
let mut policy_paths = Vec::new();
|
||||
while let Some(entry) =
|
||||
read_dir
|
||||
.next_entry()
|
||||
.await
|
||||
.map_err(|source| ExecPolicyError::ReadDir {
|
||||
dir: dir.to_path_buf(),
|
||||
source,
|
||||
})?
|
||||
{
|
||||
let path = entry.path();
|
||||
let file_type = entry
|
||||
.file_type()
|
||||
.await
|
||||
.map_err(|source| ExecPolicyError::ReadDir {
|
||||
dir: dir.to_path_buf(),
|
||||
source,
|
||||
})?;
|
||||
|
||||
if path
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
.is_some_and(|ext| ext == POLICY_EXTENSION)
|
||||
&& file_type.is_file()
|
||||
{
|
||||
policy_paths.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
policy_paths.sort();
|
||||
|
||||
Ok(policy_paths)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::features::Feature;
|
||||
use crate::features::Features;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[tokio::test]
|
||||
async fn returns_empty_policy_when_feature_disabled() {
|
||||
let mut features = Features::with_defaults();
|
||||
features.disable(Feature::ExecPolicy);
|
||||
let temp_dir = tempdir().expect("create temp dir");
|
||||
|
||||
let policy = exec_policy_for(&features, temp_dir.path())
|
||||
.await
|
||||
.expect("policy result");
|
||||
|
||||
let commands = [vec!["rm".to_string()]];
|
||||
assert!(matches!(
|
||||
policy.check_multiple(commands.iter()),
|
||||
Evaluation::NoMatch { .. }
|
||||
));
|
||||
assert!(!temp_dir.path().join(POLICY_DIR_NAME).exists());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn collect_policy_files_returns_empty_when_dir_missing() {
|
||||
let temp_dir = tempdir().expect("create temp dir");
|
||||
|
||||
let policy_dir = temp_dir.path().join(POLICY_DIR_NAME);
|
||||
let files = collect_policy_files(&policy_dir)
|
||||
.await
|
||||
.expect("collect policy files");
|
||||
|
||||
assert!(files.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_policies_from_policy_subdirectory() {
|
||||
let temp_dir = tempdir().expect("create temp dir");
|
||||
let policy_dir = temp_dir.path().join(POLICY_DIR_NAME);
|
||||
fs::create_dir_all(&policy_dir).expect("create policy dir");
|
||||
fs::write(
|
||||
policy_dir.join("deny.codexpolicy"),
|
||||
r#"prefix_rule(pattern=["rm"], decision="forbidden")"#,
|
||||
)
|
||||
.expect("write policy file");
|
||||
|
||||
let policy = exec_policy_for(&Features::with_defaults(), temp_dir.path())
|
||||
.await
|
||||
.expect("policy result");
|
||||
let command = [vec!["rm".to_string()]];
|
||||
assert!(matches!(
|
||||
policy.check_multiple(command.iter()),
|
||||
Evaluation::Match { .. }
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ignores_policies_outside_policy_dir() {
|
||||
let temp_dir = tempdir().expect("create temp dir");
|
||||
fs::write(
|
||||
temp_dir.path().join("root.codexpolicy"),
|
||||
r#"prefix_rule(pattern=["ls"], decision="prompt")"#,
|
||||
)
|
||||
.expect("write policy file");
|
||||
|
||||
let policy = exec_policy_for(&Features::with_defaults(), temp_dir.path())
|
||||
.await
|
||||
.expect("policy result");
|
||||
let command = [vec!["ls".to_string()]];
|
||||
assert!(matches!(
|
||||
policy.check_multiple(command.iter()),
|
||||
Evaluation::NoMatch { .. }
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn evaluates_bash_lc_inner_commands() {
|
||||
let policy_src = r#"
|
||||
prefix_rule(pattern=["rm"], decision="forbidden")
|
||||
"#;
|
||||
let mut parser = PolicyParser::new();
|
||||
parser
|
||||
.parse("test.codexpolicy", policy_src)
|
||||
.expect("parse policy");
|
||||
let policy = parser.build();
|
||||
|
||||
let forbidden_script = vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"rm -rf /tmp".to_string(),
|
||||
];
|
||||
|
||||
let requirement =
|
||||
evaluate_with_policy(&policy, &forbidden_script, AskForApproval::OnRequest)
|
||||
.expect("expected match for forbidden command");
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ApprovalRequirement::Forbidden {
|
||||
reason: FORBIDDEN_REASON.to_string()
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approval_requirement_prefers_execpolicy_match() {
|
||||
let policy_src = r#"prefix_rule(pattern=["rm"], decision="prompt")"#;
|
||||
let mut parser = PolicyParser::new();
|
||||
parser
|
||||
.parse("test.codexpolicy", policy_src)
|
||||
.expect("parse policy");
|
||||
let policy = parser.build();
|
||||
let command = vec!["rm".to_string()];
|
||||
|
||||
let requirement = create_approval_requirement_for_command(
|
||||
&policy,
|
||||
&command,
|
||||
AskForApproval::OnRequest,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ApprovalRequirement::NeedsApproval {
|
||||
reason: Some(PROMPT_REASON.to_string())
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approval_requirement_respects_approval_policy() {
|
||||
let policy_src = r#"prefix_rule(pattern=["rm"], decision="prompt")"#;
|
||||
let mut parser = PolicyParser::new();
|
||||
parser
|
||||
.parse("test.codexpolicy", policy_src)
|
||||
.expect("parse policy");
|
||||
let policy = parser.build();
|
||||
let command = vec!["rm".to_string()];
|
||||
|
||||
let requirement = create_approval_requirement_for_command(
|
||||
&policy,
|
||||
&command,
|
||||
AskForApproval::Never,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
SandboxPermissions::UseDefault,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ApprovalRequirement::Forbidden {
|
||||
reason: PROMPT_REASON.to_string()
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn approval_requirement_falls_back_to_heuristics() {
|
||||
let command = vec!["python".to_string()];
|
||||
|
||||
let empty_policy = Policy::empty();
|
||||
let requirement = create_approval_requirement_for_command(
|
||||
&empty_policy,
|
||||
&command,
|
||||
AskForApproval::UnlessTrusted,
|
||||
&SandboxPolicy::ReadOnly,
|
||||
SandboxPermissions::UseDefault,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
requirement,
|
||||
ApprovalRequirement::NeedsApproval { reason: None }
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -31,9 +31,6 @@ pub enum Feature {
|
||||
GhostCommit,
|
||||
/// Use the single unified PTY-backed exec tool.
|
||||
UnifiedExec,
|
||||
/// Use the shell command tool that takes `command` as a single string of
|
||||
/// shell instead of an array of args passed to `execvp(3)`.
|
||||
ShellCommandTool,
|
||||
/// Enable experimental RMCP features such as OAuth login.
|
||||
RmcpClient,
|
||||
/// Include the freeform apply_patch tool.
|
||||
@@ -42,6 +39,8 @@ pub enum Feature {
|
||||
ViewImageTool,
|
||||
/// Allow the model to request web searches.
|
||||
WebSearchRequest,
|
||||
/// Gate the execpolicy enforcement for shell/unified exec.
|
||||
ExecPolicy,
|
||||
/// Enable the model-based risk assessments for sandboxed commands.
|
||||
SandboxCommandAssessment,
|
||||
/// Enable Windows sandbox (restricted token) on Windows.
|
||||
@@ -273,12 +272,6 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ShellCommandTool,
|
||||
key: "shell_command_tool",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RmcpClient,
|
||||
key: "rmcp_client",
|
||||
@@ -297,6 +290,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::Stable,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ExecPolicy,
|
||||
key: "exec_policy",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::SandboxCommandAssessment,
|
||||
key: "experimental_sandbox_command_assessment",
|
||||
|
||||
@@ -825,11 +825,21 @@ mod tests {
|
||||
.await
|
||||
.expect("Should collect git info from repo");
|
||||
|
||||
let remote_url_output = Command::new("git")
|
||||
.args(["remote", "get-url", "origin"])
|
||||
.current_dir(&repo_path)
|
||||
.output()
|
||||
.await
|
||||
.expect("Failed to read remote url");
|
||||
// Some dev environments rewrite remotes (e.g., force SSH), so compare against
|
||||
// whatever URL Git reports instead of a fixed placeholder.
|
||||
let expected_remote = String::from_utf8(remote_url_output.stdout)
|
||||
.unwrap()
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
// Should have repository URL
|
||||
assert_eq!(
|
||||
git_info.repository_url,
|
||||
Some("https://github.com/example/repo.git".to_string())
|
||||
);
|
||||
assert_eq!(git_info.repository_url, Some(expected_remote));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
||||
@@ -25,6 +25,7 @@ mod environment_context;
|
||||
pub mod error;
|
||||
pub mod exec;
|
||||
pub mod exec_env;
|
||||
mod exec_policy;
|
||||
pub mod features;
|
||||
mod flags;
|
||||
pub mod git_info;
|
||||
@@ -38,6 +39,7 @@ pub mod parse_command;
|
||||
pub mod powershell;
|
||||
mod response_processing;
|
||||
pub mod sandboxing;
|
||||
mod text_encoding;
|
||||
pub mod token_data;
|
||||
mod truncate;
|
||||
mod unified_exec;
|
||||
|
||||
@@ -137,6 +137,19 @@ pub fn find_family_for_model(slug: &str) -> Option<ModelFamily> {
|
||||
model_family!(slug, "gpt-4o", needs_special_apply_patch_instructions: true)
|
||||
} else if slug.starts_with("gpt-3.5") {
|
||||
model_family!(slug, "gpt-3.5", needs_special_apply_patch_instructions: true)
|
||||
} else if slug.starts_with("robin") {
|
||||
model_family!(
|
||||
slug, "gpt-5.1",
|
||||
supports_reasoning_summaries: true,
|
||||
apply_patch_tool_type: Some(ApplyPatchToolType::Freeform),
|
||||
support_verbosity: true,
|
||||
default_verbosity: Some(Verbosity::Low),
|
||||
base_instructions: GPT_5_1_INSTRUCTIONS.to_string(),
|
||||
default_reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
truncation_policy: TruncationPolicy::Bytes(10_000),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
supports_parallel_tool_calls: true,
|
||||
)
|
||||
} else if slug.starts_with("test-gpt-5") {
|
||||
model_family!(
|
||||
slug, slug,
|
||||
|
||||
@@ -2,7 +2,6 @@ use crate::model_family::ModelFamily;
|
||||
|
||||
// Shared constants for commonly used window/token sizes.
|
||||
pub(crate) const CONTEXT_WINDOW_272K: i64 = 272_000;
|
||||
pub(crate) const MAX_OUTPUT_TOKENS_128K: i64 = 128_000;
|
||||
|
||||
/// Metadata about a model, particularly OpenAI models.
|
||||
/// We may want to consider including details like the pricing for
|
||||
@@ -14,19 +13,15 @@ pub(crate) struct ModelInfo {
|
||||
/// Size of the context window in tokens. This is the maximum size of the input context.
|
||||
pub(crate) context_window: i64,
|
||||
|
||||
/// Maximum number of output tokens that can be generated for the model.
|
||||
pub(crate) max_output_tokens: i64,
|
||||
|
||||
/// Token threshold where we should automatically compact conversation history. This considers
|
||||
/// input tokens + output tokens of this turn.
|
||||
pub(crate) auto_compact_token_limit: Option<i64>,
|
||||
}
|
||||
|
||||
impl ModelInfo {
|
||||
const fn new(context_window: i64, max_output_tokens: i64) -> Self {
|
||||
const fn new(context_window: i64) -> Self {
|
||||
Self {
|
||||
context_window,
|
||||
max_output_tokens,
|
||||
auto_compact_token_limit: Some(Self::default_auto_compact_limit(context_window)),
|
||||
}
|
||||
}
|
||||
@@ -42,48 +37,44 @@ pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option<ModelInfo> {
|
||||
// OSS models have a 128k shared token pool.
|
||||
// Arbitrarily splitting it: 3/4 input context, 1/4 output.
|
||||
// https://openai.com/index/gpt-oss-model-card/
|
||||
"gpt-oss-20b" => Some(ModelInfo::new(96_000, 32_000)),
|
||||
"gpt-oss-120b" => Some(ModelInfo::new(96_000, 32_000)),
|
||||
"gpt-oss-20b" => Some(ModelInfo::new(96_000)),
|
||||
"gpt-oss-120b" => Some(ModelInfo::new(96_000)),
|
||||
// https://platform.openai.com/docs/models/o3
|
||||
"o3" => Some(ModelInfo::new(200_000, 100_000)),
|
||||
"o3" => Some(ModelInfo::new(200_000)),
|
||||
|
||||
// https://platform.openai.com/docs/models/o4-mini
|
||||
"o4-mini" => Some(ModelInfo::new(200_000, 100_000)),
|
||||
"o4-mini" => Some(ModelInfo::new(200_000)),
|
||||
|
||||
// https://platform.openai.com/docs/models/codex-mini-latest
|
||||
"codex-mini-latest" => Some(ModelInfo::new(200_000, 100_000)),
|
||||
"codex-mini-latest" => Some(ModelInfo::new(200_000)),
|
||||
|
||||
// As of Jun 25, 2025, gpt-4.1 defaults to gpt-4.1-2025-04-14.
|
||||
// https://platform.openai.com/docs/models/gpt-4.1
|
||||
"gpt-4.1" | "gpt-4.1-2025-04-14" => Some(ModelInfo::new(1_047_576, 32_768)),
|
||||
"gpt-4.1" | "gpt-4.1-2025-04-14" => Some(ModelInfo::new(1_047_576)),
|
||||
|
||||
// As of Jun 25, 2025, gpt-4o defaults to gpt-4o-2024-08-06.
|
||||
// https://platform.openai.com/docs/models/gpt-4o
|
||||
"gpt-4o" | "gpt-4o-2024-08-06" => Some(ModelInfo::new(128_000, 16_384)),
|
||||
"gpt-4o" | "gpt-4o-2024-08-06" => Some(ModelInfo::new(128_000)),
|
||||
|
||||
// https://platform.openai.com/docs/models/gpt-4o?snapshot=gpt-4o-2024-05-13
|
||||
"gpt-4o-2024-05-13" => Some(ModelInfo::new(128_000, 4_096)),
|
||||
"gpt-4o-2024-05-13" => Some(ModelInfo::new(128_000)),
|
||||
|
||||
// https://platform.openai.com/docs/models/gpt-4o?snapshot=gpt-4o-2024-11-20
|
||||
"gpt-4o-2024-11-20" => Some(ModelInfo::new(128_000, 16_384)),
|
||||
"gpt-4o-2024-11-20" => Some(ModelInfo::new(128_000)),
|
||||
|
||||
// https://platform.openai.com/docs/models/gpt-3.5-turbo
|
||||
"gpt-3.5-turbo" => Some(ModelInfo::new(16_385, 4_096)),
|
||||
"gpt-3.5-turbo" => Some(ModelInfo::new(16_385)),
|
||||
|
||||
_ if slug.starts_with("gpt-5-codex")
|
||||
|| slug.starts_with("gpt-5.1-codex")
|
||||
|| slug.starts_with("gpt-5.1-codex-max") =>
|
||||
{
|
||||
Some(ModelInfo::new(CONTEXT_WINDOW_272K, MAX_OUTPUT_TOKENS_128K))
|
||||
Some(ModelInfo::new(CONTEXT_WINDOW_272K))
|
||||
}
|
||||
|
||||
_ if slug.starts_with("gpt-5") => {
|
||||
Some(ModelInfo::new(CONTEXT_WINDOW_272K, MAX_OUTPUT_TOKENS_128K))
|
||||
}
|
||||
_ if slug.starts_with("gpt-5") => Some(ModelInfo::new(CONTEXT_WINDOW_272K)),
|
||||
|
||||
_ if slug.starts_with("codex-") => {
|
||||
Some(ModelInfo::new(CONTEXT_WINDOW_272K, MAX_OUTPUT_TOKENS_128K))
|
||||
}
|
||||
_ if slug.starts_with("codex-") => Some(ModelInfo::new(CONTEXT_WINDOW_272K)),
|
||||
|
||||
_ => None,
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ ready‑to‑spawn environment.
|
||||
|
||||
pub mod assessment;
|
||||
|
||||
use crate::exec::ExecExpiration;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::exec::SandboxType;
|
||||
use crate::exec::StdoutStream;
|
||||
@@ -26,23 +27,45 @@ use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum SandboxPermissions {
|
||||
UseDefault,
|
||||
RequireEscalated,
|
||||
}
|
||||
|
||||
impl SandboxPermissions {
|
||||
pub fn requires_escalated_permissions(self) -> bool {
|
||||
matches!(self, SandboxPermissions::RequireEscalated)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for SandboxPermissions {
|
||||
fn from(with_escalated_permissions: bool) -> Self {
|
||||
if with_escalated_permissions {
|
||||
SandboxPermissions::RequireEscalated
|
||||
} else {
|
||||
SandboxPermissions::UseDefault
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CommandSpec {
|
||||
pub program: String,
|
||||
pub args: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub env: HashMap<String, String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub expiration: ExecExpiration,
|
||||
pub with_escalated_permissions: Option<bool>,
|
||||
pub justification: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Debug)]
|
||||
pub struct ExecEnv {
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub env: HashMap<String, String>,
|
||||
pub timeout_ms: Option<u64>,
|
||||
pub expiration: ExecExpiration,
|
||||
pub sandbox: SandboxType,
|
||||
pub with_escalated_permissions: Option<bool>,
|
||||
pub justification: Option<String>,
|
||||
@@ -93,13 +116,13 @@ impl SandboxManager {
|
||||
|
||||
pub(crate) fn transform(
|
||||
&self,
|
||||
spec: &CommandSpec,
|
||||
mut spec: CommandSpec,
|
||||
policy: &SandboxPolicy,
|
||||
sandbox: SandboxType,
|
||||
sandbox_policy_cwd: &Path,
|
||||
codex_linux_sandbox_exe: Option<&PathBuf>,
|
||||
) -> Result<ExecEnv, SandboxTransformError> {
|
||||
let mut env = spec.env.clone();
|
||||
let mut env = spec.env;
|
||||
if !policy.has_full_network_access() {
|
||||
env.insert(
|
||||
CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR.to_string(),
|
||||
@@ -108,8 +131,8 @@ impl SandboxManager {
|
||||
}
|
||||
|
||||
let mut command = Vec::with_capacity(1 + spec.args.len());
|
||||
command.push(spec.program.clone());
|
||||
command.extend(spec.args.iter().cloned());
|
||||
command.push(spec.program);
|
||||
command.append(&mut spec.args);
|
||||
|
||||
let (command, sandbox_env, arg0_override) = match sandbox {
|
||||
SandboxType::None => (command, HashMap::new(), None),
|
||||
@@ -154,12 +177,12 @@ impl SandboxManager {
|
||||
|
||||
Ok(ExecEnv {
|
||||
command,
|
||||
cwd: spec.cwd.clone(),
|
||||
cwd: spec.cwd,
|
||||
env,
|
||||
timeout_ms: spec.timeout_ms,
|
||||
expiration: spec.expiration,
|
||||
sandbox,
|
||||
with_escalated_permissions: spec.with_escalated_permissions,
|
||||
justification: spec.justification.clone(),
|
||||
justification: spec.justification,
|
||||
arg0: arg0_override,
|
||||
})
|
||||
}
|
||||
@@ -170,9 +193,9 @@ impl SandboxManager {
|
||||
}
|
||||
|
||||
pub async fn execute_env(
|
||||
env: &ExecEnv,
|
||||
env: ExecEnv,
|
||||
policy: &SandboxPolicy,
|
||||
stdout_stream: Option<StdoutStream>,
|
||||
) -> crate::error::Result<ExecToolCallOutput> {
|
||||
execute_exec_env(env.clone(), policy, stdout_stream).await
|
||||
execute_exec_env(env, policy, stdout_stream).await
|
||||
}
|
||||
|
||||
@@ -7,61 +7,41 @@ pub enum ShellType {
|
||||
Zsh,
|
||||
Bash,
|
||||
PowerShell,
|
||||
Sh,
|
||||
Cmd,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct ZshShell {
|
||||
pub struct Shell {
|
||||
pub(crate) shell_type: ShellType,
|
||||
pub(crate) shell_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct BashShell {
|
||||
pub(crate) shell_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct PowerShellConfig {
|
||||
pub(crate) shell_path: PathBuf, // Executable name or path, e.g. "pwsh" or "powershell.exe".
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub enum Shell {
|
||||
Zsh(ZshShell),
|
||||
Bash(BashShell),
|
||||
PowerShell(PowerShellConfig),
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl Shell {
|
||||
pub fn name(&self) -> Option<String> {
|
||||
match self {
|
||||
Shell::Zsh(ZshShell { shell_path, .. }) | Shell::Bash(BashShell { shell_path, .. }) => {
|
||||
std::path::Path::new(shell_path)
|
||||
.file_name()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
}
|
||||
Shell::PowerShell(ps) => ps
|
||||
.shell_path
|
||||
.file_stem()
|
||||
.map(|s| s.to_string_lossy().to_string()),
|
||||
Shell::Unknown => None,
|
||||
pub fn name(&self) -> &'static str {
|
||||
match self.shell_type {
|
||||
ShellType::Zsh => "zsh",
|
||||
ShellType::Bash => "bash",
|
||||
ShellType::PowerShell => "powershell",
|
||||
ShellType::Sh => "sh",
|
||||
ShellType::Cmd => "cmd",
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes a string of shell and returns the full list of command args to
|
||||
/// use with `exec()` to run the shell command.
|
||||
pub fn derive_exec_args(&self, command: &str, use_login_shell: bool) -> Vec<String> {
|
||||
match self {
|
||||
Shell::Zsh(ZshShell { shell_path, .. }) | Shell::Bash(BashShell { shell_path, .. }) => {
|
||||
match self.shell_type {
|
||||
ShellType::Zsh | ShellType::Bash | ShellType::Sh => {
|
||||
let arg = if use_login_shell { "-lc" } else { "-c" };
|
||||
vec![
|
||||
shell_path.to_string_lossy().to_string(),
|
||||
self.shell_path.to_string_lossy().to_string(),
|
||||
arg.to_string(),
|
||||
command.to_string(),
|
||||
]
|
||||
}
|
||||
Shell::PowerShell(ps) => {
|
||||
let mut args = vec![ps.shell_path.to_string_lossy().to_string()];
|
||||
ShellType::PowerShell => {
|
||||
let mut args = vec![self.shell_path.to_string_lossy().to_string()];
|
||||
if !use_login_shell {
|
||||
args.push("-NoProfile".to_string());
|
||||
}
|
||||
@@ -70,7 +50,12 @@ impl Shell {
|
||||
args.push(command.to_string());
|
||||
args
|
||||
}
|
||||
Shell::Unknown => shlex::split(command).unwrap_or_else(|| vec![command.to_string()]),
|
||||
ShellType::Cmd => {
|
||||
let mut args = vec![self.shell_path.to_string_lossy().to_string()];
|
||||
args.push("/c".to_string());
|
||||
args.push(command.to_string());
|
||||
args
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -143,19 +128,34 @@ fn get_shell_path(
|
||||
None
|
||||
}
|
||||
|
||||
fn get_zsh_shell(path: Option<&PathBuf>) -> Option<ZshShell> {
|
||||
fn get_zsh_shell(path: Option<&PathBuf>) -> Option<Shell> {
|
||||
let shell_path = get_shell_path(ShellType::Zsh, path, "zsh", vec!["/bin/zsh"]);
|
||||
|
||||
shell_path.map(|shell_path| ZshShell { shell_path })
|
||||
shell_path.map(|shell_path| Shell {
|
||||
shell_type: ShellType::Zsh,
|
||||
shell_path,
|
||||
})
|
||||
}
|
||||
|
||||
fn get_bash_shell(path: Option<&PathBuf>) -> Option<BashShell> {
|
||||
fn get_bash_shell(path: Option<&PathBuf>) -> Option<Shell> {
|
||||
let shell_path = get_shell_path(ShellType::Bash, path, "bash", vec!["/bin/bash"]);
|
||||
|
||||
shell_path.map(|shell_path| BashShell { shell_path })
|
||||
shell_path.map(|shell_path| Shell {
|
||||
shell_type: ShellType::Bash,
|
||||
shell_path,
|
||||
})
|
||||
}
|
||||
|
||||
fn get_powershell_shell(path: Option<&PathBuf>) -> Option<PowerShellConfig> {
|
||||
fn get_sh_shell(path: Option<&PathBuf>) -> Option<Shell> {
|
||||
let shell_path = get_shell_path(ShellType::Sh, path, "sh", vec!["/bin/sh"]);
|
||||
|
||||
shell_path.map(|shell_path| Shell {
|
||||
shell_type: ShellType::Sh,
|
||||
shell_path,
|
||||
})
|
||||
}
|
||||
|
||||
fn get_powershell_shell(path: Option<&PathBuf>) -> Option<Shell> {
|
||||
let shell_path = get_shell_path(
|
||||
ShellType::PowerShell,
|
||||
path,
|
||||
@@ -164,26 +164,56 @@ fn get_powershell_shell(path: Option<&PathBuf>) -> Option<PowerShellConfig> {
|
||||
)
|
||||
.or_else(|| get_shell_path(ShellType::PowerShell, path, "powershell", vec![]));
|
||||
|
||||
shell_path.map(|shell_path| PowerShellConfig { shell_path })
|
||||
shell_path.map(|shell_path| Shell {
|
||||
shell_type: ShellType::PowerShell,
|
||||
shell_path,
|
||||
})
|
||||
}
|
||||
|
||||
fn get_cmd_shell(path: Option<&PathBuf>) -> Option<Shell> {
|
||||
let shell_path = get_shell_path(ShellType::Cmd, path, "cmd", vec![]);
|
||||
|
||||
shell_path.map(|shell_path| Shell {
|
||||
shell_type: ShellType::Cmd,
|
||||
shell_path,
|
||||
})
|
||||
}
|
||||
|
||||
fn ultimate_fallback_shell() -> Shell {
|
||||
if cfg!(windows) {
|
||||
Shell {
|
||||
shell_type: ShellType::Cmd,
|
||||
shell_path: PathBuf::from("cmd.exe"),
|
||||
}
|
||||
} else {
|
||||
Shell {
|
||||
shell_type: ShellType::Sh,
|
||||
shell_path: PathBuf::from("/bin/sh"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_shell_by_model_provided_path(shell_path: &PathBuf) -> Shell {
|
||||
detect_shell_type(shell_path)
|
||||
.and_then(|shell_type| get_shell(shell_type, Some(shell_path)))
|
||||
.unwrap_or(Shell::Unknown)
|
||||
.unwrap_or(ultimate_fallback_shell())
|
||||
}
|
||||
|
||||
pub fn get_shell(shell_type: ShellType, path: Option<&PathBuf>) -> Option<Shell> {
|
||||
match shell_type {
|
||||
ShellType::Zsh => get_zsh_shell(path).map(Shell::Zsh),
|
||||
ShellType::Bash => get_bash_shell(path).map(Shell::Bash),
|
||||
ShellType::PowerShell => get_powershell_shell(path).map(Shell::PowerShell),
|
||||
ShellType::Zsh => get_zsh_shell(path),
|
||||
ShellType::Bash => get_bash_shell(path),
|
||||
ShellType::PowerShell => get_powershell_shell(path),
|
||||
ShellType::Sh => get_sh_shell(path),
|
||||
ShellType::Cmd => get_cmd_shell(path),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn detect_shell_type(shell_path: &PathBuf) -> Option<ShellType> {
|
||||
match shell_path.as_os_str().to_str() {
|
||||
Some("zsh") => Some(ShellType::Zsh),
|
||||
Some("sh") => Some(ShellType::Sh),
|
||||
Some("cmd") => Some(ShellType::Cmd),
|
||||
Some("bash") => Some(ShellType::Bash),
|
||||
Some("pwsh") => Some(ShellType::PowerShell),
|
||||
Some("powershell") => Some(ShellType::PowerShell),
|
||||
@@ -200,14 +230,29 @@ pub fn detect_shell_type(shell_path: &PathBuf) -> Option<ShellType> {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn default_user_shell() -> Shell {
|
||||
pub fn default_user_shell() -> Shell {
|
||||
default_user_shell_from_path(get_user_shell_path())
|
||||
}
|
||||
|
||||
fn default_user_shell_from_path(user_shell_path: Option<PathBuf>) -> Shell {
|
||||
if cfg!(windows) {
|
||||
get_shell(ShellType::PowerShell, None).unwrap_or(Shell::Unknown)
|
||||
get_shell(ShellType::PowerShell, None).unwrap_or(ultimate_fallback_shell())
|
||||
} else {
|
||||
get_user_shell_path()
|
||||
let user_default_shell = user_shell_path
|
||||
.and_then(|shell| detect_shell_type(&shell))
|
||||
.and_then(|shell_type| get_shell(shell_type, None))
|
||||
.unwrap_or(Shell::Unknown)
|
||||
.and_then(|shell_type| get_shell(shell_type, None));
|
||||
|
||||
let shell_with_fallback = if cfg!(target_os = "macos") {
|
||||
user_default_shell
|
||||
.or_else(|| get_shell(ShellType::Zsh, None))
|
||||
.or_else(|| get_shell(ShellType::Bash, None))
|
||||
} else {
|
||||
user_default_shell
|
||||
.or_else(|| get_shell(ShellType::Bash, None))
|
||||
.or_else(|| get_shell(ShellType::Zsh, None))
|
||||
};
|
||||
|
||||
shell_with_fallback.unwrap_or(ultimate_fallback_shell())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -263,6 +308,19 @@ mod detect_shell_type_tests {
|
||||
detect_shell_type(&PathBuf::from("/usr/local/bin/pwsh")),
|
||||
Some(ShellType::PowerShell)
|
||||
);
|
||||
assert_eq!(
|
||||
detect_shell_type(&PathBuf::from("/bin/sh")),
|
||||
Some(ShellType::Sh)
|
||||
);
|
||||
assert_eq!(detect_shell_type(&PathBuf::from("sh")), Some(ShellType::Sh));
|
||||
assert_eq!(
|
||||
detect_shell_type(&PathBuf::from("cmd")),
|
||||
Some(ShellType::Cmd)
|
||||
);
|
||||
assert_eq!(
|
||||
detect_shell_type(&PathBuf::from("cmd.exe")),
|
||||
Some(ShellType::Cmd)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,10 +336,17 @@ mod tests {
|
||||
fn detects_zsh() {
|
||||
let zsh_shell = get_shell(ShellType::Zsh, None).unwrap();
|
||||
|
||||
let ZshShell { shell_path } = match zsh_shell {
|
||||
Shell::Zsh(zsh_shell) => zsh_shell,
|
||||
_ => panic!("expected zsh shell"),
|
||||
};
|
||||
let shell_path = zsh_shell.shell_path;
|
||||
|
||||
assert_eq!(shell_path, PathBuf::from("/bin/zsh"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_os = "macos")]
|
||||
fn fish_fallback_to_zsh() {
|
||||
let zsh_shell = default_user_shell_from_path(Some(PathBuf::from("/bin/fish")));
|
||||
|
||||
let shell_path = zsh_shell.shell_path;
|
||||
|
||||
assert_eq!(shell_path, PathBuf::from("/bin/zsh"));
|
||||
}
|
||||
@@ -289,18 +354,60 @@ mod tests {
|
||||
#[test]
|
||||
fn detects_bash() {
|
||||
let bash_shell = get_shell(ShellType::Bash, None).unwrap();
|
||||
let BashShell { shell_path } = match bash_shell {
|
||||
Shell::Bash(bash_shell) => bash_shell,
|
||||
_ => panic!("expected bash shell"),
|
||||
};
|
||||
let shell_path = bash_shell.shell_path;
|
||||
|
||||
assert!(
|
||||
shell_path == PathBuf::from("/bin/bash")
|
||||
|| shell_path == PathBuf::from("/usr/bin/bash"),
|
||||
|| shell_path == PathBuf::from("/usr/bin/bash")
|
||||
|| shell_path == PathBuf::from("/usr/local/bin/bash"),
|
||||
"shell path: {shell_path:?}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detects_sh() {
|
||||
let sh_shell = get_shell(ShellType::Sh, None).unwrap();
|
||||
let shell_path = sh_shell.shell_path;
|
||||
assert!(
|
||||
shell_path == PathBuf::from("/bin/sh") || shell_path == PathBuf::from("/usr/bin/sh"),
|
||||
"shell path: {shell_path:?}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_run_on_shell_test() {
|
||||
let cmd = "echo \"Works\"";
|
||||
if cfg!(windows) {
|
||||
assert!(shell_works(
|
||||
get_shell(ShellType::PowerShell, None),
|
||||
"Out-String 'Works'",
|
||||
true,
|
||||
));
|
||||
assert!(shell_works(get_shell(ShellType::Cmd, None), cmd, true,));
|
||||
assert!(shell_works(Some(ultimate_fallback_shell()), cmd, true));
|
||||
} else {
|
||||
assert!(shell_works(Some(ultimate_fallback_shell()), cmd, true));
|
||||
assert!(shell_works(get_shell(ShellType::Zsh, None), cmd, false));
|
||||
assert!(shell_works(get_shell(ShellType::Bash, None), cmd, true));
|
||||
assert!(shell_works(get_shell(ShellType::Sh, None), cmd, true));
|
||||
}
|
||||
}
|
||||
|
||||
fn shell_works(shell: Option<Shell>, command: &str, required: bool) -> bool {
|
||||
if let Some(shell) = shell {
|
||||
let args = shell.derive_exec_args(command, false);
|
||||
let output = Command::new(args[0].clone())
|
||||
.args(&args[1..])
|
||||
.output()
|
||||
.unwrap();
|
||||
assert!(output.status.success());
|
||||
assert!(String::from_utf8_lossy(&output.stdout).contains("Works"));
|
||||
true
|
||||
} else {
|
||||
!required
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_current_shell_detects_zsh() {
|
||||
let shell = Command::new("sh")
|
||||
@@ -312,10 +419,11 @@ mod tests {
|
||||
let shell_path = String::from_utf8_lossy(&shell.stdout).trim().to_string();
|
||||
if shell_path.ends_with("/zsh") {
|
||||
assert_eq!(
|
||||
default_user_shell().await,
|
||||
Shell::Zsh(ZshShell {
|
||||
default_user_shell(),
|
||||
Shell {
|
||||
shell_type: ShellType::Zsh,
|
||||
shell_path: PathBuf::from(shell_path),
|
||||
})
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -326,11 +434,8 @@ mod tests {
|
||||
return;
|
||||
}
|
||||
|
||||
let powershell_shell = default_user_shell().await;
|
||||
let PowerShellConfig { shell_path } = match powershell_shell {
|
||||
Shell::PowerShell(powershell_shell) => powershell_shell,
|
||||
_ => panic!("expected powershell shell"),
|
||||
};
|
||||
let powershell_shell = default_user_shell();
|
||||
let shell_path = powershell_shell.shell_path;
|
||||
|
||||
assert!(shell_path.ends_with("pwsh.exe") || shell_path.ends_with("powershell.exe"));
|
||||
}
|
||||
@@ -342,10 +447,7 @@ mod tests {
|
||||
}
|
||||
|
||||
let powershell_shell = get_shell(ShellType::PowerShell, None).unwrap();
|
||||
let PowerShellConfig { shell_path } = match powershell_shell {
|
||||
Shell::PowerShell(powershell_shell) => powershell_shell,
|
||||
_ => panic!("expected powershell shell"),
|
||||
};
|
||||
let shell_path = powershell_shell.shell_path;
|
||||
|
||||
assert!(shell_path.ends_with("pwsh.exe") || shell_path.ends_with("powershell.exe"));
|
||||
}
|
||||
|
||||
@@ -31,6 +31,8 @@ use crate::user_shell_command::user_shell_command_record_item;
|
||||
use super::SessionTask;
|
||||
use super::SessionTaskContext;
|
||||
|
||||
const USER_SHELL_TIMEOUT_MS: u64 = 60 * 60 * 1000; // 1 hour
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct UserShellCommandTask {
|
||||
command: String,
|
||||
@@ -93,7 +95,9 @@ impl SessionTask for UserShellCommandTask {
|
||||
command: command.clone(),
|
||||
cwd: cwd.clone(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
timeout_ms: None,
|
||||
// TODO(zhao-oai): Now that we have ExecExpiration::Cancellation, we
|
||||
// should use that instead of an "arbitrarily large" timeout here.
|
||||
expiration: USER_SHELL_TIMEOUT_MS.into(),
|
||||
sandbox: SandboxType::None,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
|
||||
461
codex-rs/core/src/text_encoding.rs
Normal file
461
codex-rs/core/src/text_encoding.rs
Normal file
@@ -0,0 +1,461 @@
|
||||
//! Text encoding detection and conversion utilities for shell output.
|
||||
//!
|
||||
//! Windows users frequently run into code pages such as CP1251 or CP866 when invoking commands
|
||||
//! through VS Code. Those bytes show up as invalid UTF-8 and used to be replaced with the standard
|
||||
//! Unicode replacement character. We now lean on `chardetng` and `encoding_rs` so we can
|
||||
//! automatically detect and decode the vast majority of legacy encodings before falling back to
|
||||
//! lossy UTF-8 decoding.
|
||||
|
||||
use chardetng::EncodingDetector;
|
||||
use encoding_rs::Encoding;
|
||||
use encoding_rs::IBM866;
|
||||
use encoding_rs::WINDOWS_1252;
|
||||
|
||||
/// Attempts to convert arbitrary bytes to UTF-8 with best-effort encoding detection.
|
||||
pub fn bytes_to_string_smart(bytes: &[u8]) -> String {
|
||||
if bytes.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
if let Ok(utf8_str) = std::str::from_utf8(bytes) {
|
||||
return utf8_str.to_owned();
|
||||
}
|
||||
|
||||
let encoding = detect_encoding(bytes);
|
||||
decode_bytes(bytes, encoding)
|
||||
}
|
||||
|
||||
// Windows-1252 reassigns a handful of 0x80-0x9F slots to smart punctuation (curly quotes, dashes,
|
||||
// ™). CP866 uses those *same byte values* for uppercase Cyrillic letters. When chardetng sees shell
|
||||
// snippets that mix these bytes with ASCII it sometimes guesses IBM866, so “smart quotes” render as
|
||||
// Cyrillic garbage (“УФЦ”) in VS Code. However, CP866 uppercase tokens are perfectly valid output
|
||||
// (e.g., `ПРИ test`) so we cannot flip every 0x80-0x9F byte to Windows-1252 either. The compromise
|
||||
// is to only coerce IBM866 to Windows-1252 when (a) the high bytes are exclusively the punctuation
|
||||
// values listed below and (b) we spot adjacent ASCII. This targets the real failure case without
|
||||
// clobbering legitimate Cyrillic text. If another code page has a similar collision, introduce a
|
||||
// dedicated allowlist (like this one) plus unit tests that capture the actual shell output we want
|
||||
// to preserve. Windows-1252 byte values for smart punctuation.
|
||||
const WINDOWS_1252_PUNCT_BYTES: [u8; 8] = [
|
||||
0x91, // ‘ (left single quotation mark)
|
||||
0x92, // ’ (right single quotation mark)
|
||||
0x93, // “ (left double quotation mark)
|
||||
0x94, // ” (right double quotation mark)
|
||||
0x95, // • (bullet)
|
||||
0x96, // – (en dash)
|
||||
0x97, // — (em dash)
|
||||
0x99, // ™ (trade mark sign)
|
||||
];
|
||||
|
||||
fn detect_encoding(bytes: &[u8]) -> &'static Encoding {
|
||||
let mut detector = EncodingDetector::new();
|
||||
detector.feed(bytes, true);
|
||||
let (encoding, _is_confident) = detector.guess_assess(None, true);
|
||||
|
||||
// chardetng occasionally reports IBM866 for short strings that only contain Windows-1252 “smart
|
||||
// punctuation” bytes (0x80-0x9F) because that range maps to Cyrillic letters in IBM866. When
|
||||
// those bytes show up alongside an ASCII word (typical shell output: `"“`test), we know the
|
||||
// intent was likely CP1252 quotes/dashes. Prefer WINDOWS_1252 in that specific situation so we
|
||||
// render the characters users expect instead of Cyrillic junk. References:
|
||||
// - Windows-1252 reserving 0x80-0x9F for curly quotes/dashes:
|
||||
// https://en.wikipedia.org/wiki/Windows-1252
|
||||
// - CP866 mapping 0x93/0x94/0x96 to Cyrillic letters, so the same bytes show up as “УФЦ” when
|
||||
// mis-decoded: https://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/PC/CP866.TXT
|
||||
if encoding == IBM866 && looks_like_windows_1252_punctuation(bytes) {
|
||||
return WINDOWS_1252;
|
||||
}
|
||||
|
||||
encoding
|
||||
}
|
||||
|
||||
fn decode_bytes(bytes: &[u8], encoding: &'static Encoding) -> String {
|
||||
let (decoded, _, had_errors) = encoding.decode(bytes);
|
||||
|
||||
if had_errors {
|
||||
return String::from_utf8_lossy(bytes).into_owned();
|
||||
}
|
||||
|
||||
decoded.into_owned()
|
||||
}
|
||||
|
||||
/// Detect whether the byte stream looks like Windows-1252 “smart punctuation” wrapped around
|
||||
/// otherwise-ASCII text.
|
||||
///
|
||||
/// Context: IBM866 and Windows-1252 share the 0x80-0x9F slot range. In IBM866 these bytes decode to
|
||||
/// Cyrillic letters, whereas Windows-1252 maps them to curly quotes and dashes. chardetng can guess
|
||||
/// IBM866 for short snippets that only contain those bytes, which turns shell output such as
|
||||
/// `“test”` into unreadable Cyrillic. To avoid that, we treat inputs comprising a handful of bytes
|
||||
/// from the problematic range plus ASCII letters as CP1252 punctuation. We deliberately do *not*
|
||||
/// cap how many of those punctuation bytes we accept: VS Code frequently prints several quoted
|
||||
/// phrases (e.g., `"foo" – "bar"`), and truncating the count would once again mis-decode those as
|
||||
/// Cyrillic. If we discover additional encodings with overlapping byte ranges, prefer adding
|
||||
/// encoding-specific byte allowlists like `WINDOWS_1252_PUNCT` and tests that exercise real-world
|
||||
/// shell snippets.
|
||||
fn looks_like_windows_1252_punctuation(bytes: &[u8]) -> bool {
|
||||
let mut saw_extended_punctuation = false;
|
||||
let mut saw_ascii_word = false;
|
||||
|
||||
for &byte in bytes {
|
||||
if byte >= 0xA0 {
|
||||
return false;
|
||||
}
|
||||
if (0x80..=0x9F).contains(&byte) {
|
||||
if !is_windows_1252_punct(byte) {
|
||||
return false;
|
||||
}
|
||||
saw_extended_punctuation = true;
|
||||
}
|
||||
if byte.is_ascii_alphabetic() {
|
||||
saw_ascii_word = true;
|
||||
}
|
||||
}
|
||||
|
||||
saw_extended_punctuation && saw_ascii_word
|
||||
}
|
||||
|
||||
fn is_windows_1252_punct(byte: u8) -> bool {
|
||||
WINDOWS_1252_PUNCT_BYTES.contains(&byte)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use encoding_rs::BIG5;
|
||||
use encoding_rs::EUC_KR;
|
||||
use encoding_rs::GBK;
|
||||
use encoding_rs::ISO_8859_2;
|
||||
use encoding_rs::ISO_8859_3;
|
||||
use encoding_rs::ISO_8859_4;
|
||||
use encoding_rs::ISO_8859_5;
|
||||
use encoding_rs::ISO_8859_6;
|
||||
use encoding_rs::ISO_8859_7;
|
||||
use encoding_rs::ISO_8859_8;
|
||||
use encoding_rs::ISO_8859_10;
|
||||
use encoding_rs::ISO_8859_13;
|
||||
use encoding_rs::SHIFT_JIS;
|
||||
use encoding_rs::WINDOWS_874;
|
||||
use encoding_rs::WINDOWS_1250;
|
||||
use encoding_rs::WINDOWS_1251;
|
||||
use encoding_rs::WINDOWS_1253;
|
||||
use encoding_rs::WINDOWS_1254;
|
||||
use encoding_rs::WINDOWS_1255;
|
||||
use encoding_rs::WINDOWS_1256;
|
||||
use encoding_rs::WINDOWS_1257;
|
||||
use encoding_rs::WINDOWS_1258;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_utf8_passthrough() {
|
||||
// Fast path: when UTF-8 is valid we should avoid copies and return as-is.
|
||||
let utf8_text = "Hello, мир! 世界";
|
||||
let bytes = utf8_text.as_bytes();
|
||||
assert_eq!(bytes_to_string_smart(bytes), utf8_text);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cp1251_russian_text() {
|
||||
// Cyrillic text emitted by PowerShell/WSL in CP1251 should decode cleanly.
|
||||
let bytes = b"\xEF\xF0\xE8\xEC\xE5\xF0"; // "пример" encoded with Windows-1251
|
||||
assert_eq!(bytes_to_string_smart(bytes), "пример");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cp1251_privet_word() {
|
||||
// Regression: CP1251 words like "Привет" must not be mis-identified as Windows-1252.
|
||||
let bytes = b"\xCF\xF0\xE8\xE2\xE5\xF2"; // "Привет" encoded with Windows-1251
|
||||
assert_eq!(bytes_to_string_smart(bytes), "Привет");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_koi8_r_privet_word() {
|
||||
// KOI8-R output should decode to the original Cyrillic as well.
|
||||
let bytes = b"\xF0\xD2\xC9\xD7\xC5\xD4"; // "Привет" encoded with KOI8-R
|
||||
assert_eq!(bytes_to_string_smart(bytes), "Привет");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cp866_russian_text() {
|
||||
// Legacy consoles (cmd.exe) commonly emit CP866 bytes for Cyrillic content.
|
||||
let bytes = b"\xAF\xE0\xA8\xAC\xA5\xE0"; // "пример" encoded with CP866
|
||||
assert_eq!(bytes_to_string_smart(bytes), "пример");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cp866_uppercase_text() {
|
||||
// Ensure the IBM866 heuristic still returns IBM866 for uppercase-only words.
|
||||
let bytes = b"\x8F\x90\x88"; // "ПРИ" encoded with CP866 uppercase letters
|
||||
assert_eq!(bytes_to_string_smart(bytes), "ПРИ");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cp866_uppercase_followed_by_ascii() {
|
||||
// Regression test: uppercase CP866 tokens next to ASCII text should not be treated as
|
||||
// CP1252.
|
||||
let bytes = b"\x8F\x90\x88 test"; // "ПРИ test" encoded with CP866 uppercase letters followed by ASCII
|
||||
assert_eq!(bytes_to_string_smart(bytes), "ПРИ test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1252_quotes() {
|
||||
// Smart detection should map Windows-1252 punctuation into proper Unicode.
|
||||
let bytes = b"\x93\x94test";
|
||||
assert_eq!(bytes_to_string_smart(bytes), "\u{201C}\u{201D}test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1252_multiple_quotes() {
|
||||
// Longer snippets of punctuation (e.g., “foo” – “bar”) should still flip to CP1252.
|
||||
let bytes = b"\x93foo\x94 \x96 \x93bar\x94";
|
||||
assert_eq!(
|
||||
bytes_to_string_smart(bytes),
|
||||
"\u{201C}foo\u{201D} \u{2013} \u{201C}bar\u{201D}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1252_privet_gibberish_is_preserved() {
|
||||
// Windows-1252 cannot encode Cyrillic; if the input literally contains "ПÑ..." we should not "fix" it.
|
||||
let bytes = "Привет".as_bytes();
|
||||
assert_eq!(bytes_to_string_smart(bytes), "Привет");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_1_latin_text() {
|
||||
// ISO-8859-1 (code page 28591) is the Latin segment used by LatArCyrHeb.
|
||||
// encoding_rs unifies ISO-8859-1 with Windows-1252, so reuse that constant here.
|
||||
let (encoded, _, had_errors) = WINDOWS_1252.encode("Hello");
|
||||
assert!(!had_errors, "failed to encode Latin sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "Hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_2_central_european_text() {
|
||||
// ISO-8859-2 (code page 28592) covers additional Central European glyphs.
|
||||
let (encoded, _, had_errors) = ISO_8859_2.encode("Příliš žluťoučký kůň");
|
||||
assert!(!had_errors, "failed to encode ISO-8859-2 sample");
|
||||
assert_eq!(
|
||||
bytes_to_string_smart(encoded.as_ref()),
|
||||
"Příliš žluťoučký kůň"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_3_south_europe_text() {
|
||||
// ISO-8859-3 (code page 28593) adds support for Maltese/Esperanto letters.
|
||||
// chardetng rarely distinguishes ISO-8859-3 from neighboring Latin code pages, so we rely on
|
||||
// an ASCII-only sample to ensure round-tripping still succeeds.
|
||||
let (encoded, _, had_errors) = ISO_8859_3.encode("Esperanto and Maltese");
|
||||
assert!(!had_errors, "failed to encode ISO-8859-3 sample");
|
||||
assert_eq!(
|
||||
bytes_to_string_smart(encoded.as_ref()),
|
||||
"Esperanto and Maltese"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_4_baltic_text() {
|
||||
// ISO-8859-4 (code page 28594) targets the Baltic/Nordic repertoire.
|
||||
let sample = "Šis ir rakstzīmju kodēšanas tests. Dažās valodās, kurās tiek \
|
||||
izmantotas latīņu valodas burti, lēmuma pieņemšanai mums ir nepieciešams \
|
||||
vairāk ieguldījuma.";
|
||||
let (encoded, _, had_errors) = ISO_8859_4.encode(sample);
|
||||
assert!(!had_errors, "failed to encode ISO-8859-4 sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), sample);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_5_cyrillic_text() {
|
||||
// ISO-8859-5 (code page 28595) covers the Cyrillic portion.
|
||||
let (encoded, _, had_errors) = ISO_8859_5.encode("Привет");
|
||||
assert!(!had_errors, "failed to encode Cyrillic sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "Привет");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_6_arabic_text() {
|
||||
// ISO-8859-6 (code page 28596) covers the Arabic glyphs.
|
||||
let (encoded, _, had_errors) = ISO_8859_6.encode("مرحبا");
|
||||
assert!(!had_errors, "failed to encode Arabic sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "مرحبا");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_7_greek_text() {
|
||||
// ISO-8859-7 (code page 28597) is used for Greek locales.
|
||||
let (encoded, _, had_errors) = ISO_8859_7.encode("Καλημέρα");
|
||||
assert!(!had_errors, "failed to encode ISO-8859-7 sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "Καλημέρα");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_8_hebrew_text() {
|
||||
// ISO-8859-8 (code page 28598) covers the Hebrew glyphs.
|
||||
let (encoded, _, had_errors) = ISO_8859_8.encode("שלום");
|
||||
assert!(!had_errors, "failed to encode Hebrew sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "שלום");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_9_turkish_text() {
|
||||
// ISO-8859-9 (code page 28599) mirrors Latin-1 but inserts Turkish letters.
|
||||
// encoding_rs exposes the equivalent Windows-1254 mapping.
|
||||
let (encoded, _, had_errors) = WINDOWS_1254.encode("İstanbul");
|
||||
assert!(!had_errors, "failed to encode ISO-8859-9 sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "İstanbul");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_10_nordic_text() {
|
||||
// ISO-8859-10 (code page 28600) adds additional Nordic letters.
|
||||
let sample = "Þetta er prófun fyrir Ægir og Øystein.";
|
||||
let (encoded, _, had_errors) = ISO_8859_10.encode(sample);
|
||||
assert!(!had_errors, "failed to encode ISO-8859-10 sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), sample);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_11_thai_text() {
|
||||
// ISO-8859-11 (code page 28601) mirrors TIS-620 / Windows-874 for Thai.
|
||||
let sample = "ภาษาไทยสำหรับการทดสอบ ISO-8859-11";
|
||||
// encoding_rs exposes the equivalent Windows-874 encoding, so use that constant.
|
||||
let (encoded, _, had_errors) = WINDOWS_874.encode(sample);
|
||||
assert!(!had_errors, "failed to encode ISO-8859-11 sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), sample);
|
||||
}
|
||||
|
||||
// ISO-8859-12 was never standardized, and encodings 14–16 cannot be distinguished reliably
|
||||
// without the heuristics we removed (chardetng generally reports neighboring Latin pages), so
|
||||
// we intentionally omit coverage for those slots until the detector can identify them.
|
||||
|
||||
#[test]
|
||||
fn test_iso8859_13_baltic_text() {
|
||||
// ISO-8859-13 (code page 28603) is common across Baltic languages.
|
||||
let (encoded, _, had_errors) = ISO_8859_13.encode("Sveiki");
|
||||
assert!(!had_errors, "failed to encode ISO-8859-13 sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "Sveiki");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1250_central_european_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1250.encode("Příliš žluťoučký kůň");
|
||||
assert!(!had_errors, "failed to encode Central European sample");
|
||||
assert_eq!(
|
||||
bytes_to_string_smart(encoded.as_ref()),
|
||||
"Příliš žluťoučký kůň"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1251_encoded_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1251.encode("Привет из Windows-1251");
|
||||
assert!(!had_errors, "failed to encode Windows-1251 sample");
|
||||
assert_eq!(
|
||||
bytes_to_string_smart(encoded.as_ref()),
|
||||
"Привет из Windows-1251"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1253_greek_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1253.encode("Γειά σου");
|
||||
assert!(!had_errors, "failed to encode Greek sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "Γειά σου");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1254_turkish_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1254.encode("İstanbul");
|
||||
assert!(!had_errors, "failed to encode Turkish sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "İstanbul");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1255_hebrew_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1255.encode("שלום");
|
||||
assert!(!had_errors, "failed to encode Windows-1255 Hebrew sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "שלום");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1256_arabic_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1256.encode("مرحبا");
|
||||
assert!(!had_errors, "failed to encode Windows-1256 Arabic sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "مرحبا");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1257_baltic_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1257.encode("Pērkons");
|
||||
assert!(!had_errors, "failed to encode Baltic sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "Pērkons");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1258_vietnamese_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_1258.encode("Xin chào");
|
||||
assert!(!had_errors, "failed to encode Vietnamese sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "Xin chào");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_874_thai_text() {
|
||||
let (encoded, _, had_errors) = WINDOWS_874.encode("สวัสดีครับ นี่คือการทดสอบภาษาไทย");
|
||||
assert!(!had_errors, "failed to encode Thai sample");
|
||||
assert_eq!(
|
||||
bytes_to_string_smart(encoded.as_ref()),
|
||||
"สวัสดีครับ นี่คือการทดสอบภาษาไทย"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_932_shift_jis_text() {
|
||||
let (encoded, _, had_errors) = SHIFT_JIS.encode("こんにちは");
|
||||
assert!(!had_errors, "failed to encode Shift-JIS sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "こんにちは");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_936_gbk_text() {
|
||||
let (encoded, _, had_errors) = GBK.encode("你好,世界,这是一个测试");
|
||||
assert!(!had_errors, "failed to encode GBK sample");
|
||||
assert_eq!(
|
||||
bytes_to_string_smart(encoded.as_ref()),
|
||||
"你好,世界,这是一个测试"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_949_korean_text() {
|
||||
let (encoded, _, had_errors) = EUC_KR.encode("안녕하세요");
|
||||
assert!(!had_errors, "failed to encode Korean sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "안녕하세요");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_950_big5_text() {
|
||||
let (encoded, _, had_errors) = BIG5.encode("繁體");
|
||||
assert!(!had_errors, "failed to encode Big5 sample");
|
||||
assert_eq!(bytes_to_string_smart(encoded.as_ref()), "繁體");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_latin1_cafe() {
|
||||
// Latin-1 bytes remain common in Western-European locales; decode them directly.
|
||||
let bytes = b"caf\xE9"; // codespell:ignore caf
|
||||
assert_eq!(bytes_to_string_smart(bytes), "café");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preserves_ansi_sequences() {
|
||||
// ANSI escape sequences should survive regardless of the detected encoding.
|
||||
let bytes = b"\x1b[31mred\x1b[0m";
|
||||
assert_eq!(bytes_to_string_smart(bytes), "\x1b[31mred\x1b[0m");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fallback_to_lossy() {
|
||||
// Completely invalid sequences fall back to the old lossy behavior.
|
||||
let invalid_bytes = [0xFF, 0xFE, 0xFD];
|
||||
let result = bytes_to_string_smart(&invalid_bytes);
|
||||
assert_eq!(result, String::from_utf8_lossy(&invalid_bytes));
|
||||
}
|
||||
}
|
||||
@@ -179,15 +179,17 @@ impl ToolEmitter {
|
||||
ctx.turn,
|
||||
EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
|
||||
call_id: ctx.call_id.to_string(),
|
||||
turn_id: ctx.turn.sub_id.clone(),
|
||||
auto_approved: *auto_approved,
|
||||
changes: changes.clone(),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
(Self::ApplyPatch { .. }, ToolEventStage::Success(output)) => {
|
||||
(Self::ApplyPatch { changes, .. }, ToolEventStage::Success(output)) => {
|
||||
emit_patch_end(
|
||||
ctx,
|
||||
changes.clone(),
|
||||
output.stdout.text.clone(),
|
||||
output.stderr.text.clone(),
|
||||
output.exit_code == 0,
|
||||
@@ -195,11 +197,12 @@ impl ToolEmitter {
|
||||
.await;
|
||||
}
|
||||
(
|
||||
Self::ApplyPatch { .. },
|
||||
Self::ApplyPatch { changes, .. },
|
||||
ToolEventStage::Failure(ToolEventFailure::Output(output)),
|
||||
) => {
|
||||
emit_patch_end(
|
||||
ctx,
|
||||
changes.clone(),
|
||||
output.stdout.text.clone(),
|
||||
output.stderr.text.clone(),
|
||||
output.exit_code == 0,
|
||||
@@ -207,10 +210,17 @@ impl ToolEmitter {
|
||||
.await;
|
||||
}
|
||||
(
|
||||
Self::ApplyPatch { .. },
|
||||
Self::ApplyPatch { changes, .. },
|
||||
ToolEventStage::Failure(ToolEventFailure::Message(message)),
|
||||
) => {
|
||||
emit_patch_end(ctx, String::new(), (*message).to_string(), false).await;
|
||||
emit_patch_end(
|
||||
ctx,
|
||||
changes.clone(),
|
||||
String::new(),
|
||||
(*message).to_string(),
|
||||
false,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
(
|
||||
Self::UnifiedExec {
|
||||
@@ -409,15 +419,23 @@ async fn emit_exec_end(
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn emit_patch_end(ctx: ToolEventCtx<'_>, stdout: String, stderr: String, success: bool) {
|
||||
async fn emit_patch_end(
|
||||
ctx: ToolEventCtx<'_>,
|
||||
changes: HashMap<PathBuf, FileChange>,
|
||||
stdout: String,
|
||||
stderr: String,
|
||||
success: bool,
|
||||
) {
|
||||
ctx.session
|
||||
.send_event(
|
||||
ctx.turn,
|
||||
EventMsg::PatchApplyEnd(PatchApplyEndEvent {
|
||||
call_id: ctx.call_id.to_string(),
|
||||
turn_id: ctx.turn.sub_id.clone(),
|
||||
stdout,
|
||||
stderr,
|
||||
success,
|
||||
changes,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -9,9 +9,11 @@ use crate::apply_patch::convert_apply_patch_to_protocol;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::exec::ExecParams;
|
||||
use crate::exec_env::create_env;
|
||||
use crate::exec_policy::create_approval_requirement_for_command;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::is_safe_command::is_known_safe_command;
|
||||
use crate::protocol::ExecCommandSource;
|
||||
use crate::sandboxing::SandboxPermissions;
|
||||
use crate::tools::context::ToolInvocation;
|
||||
use crate::tools::context::ToolOutput;
|
||||
use crate::tools::context::ToolPayload;
|
||||
@@ -35,7 +37,7 @@ impl ShellHandler {
|
||||
ExecParams {
|
||||
command: params.command,
|
||||
cwd: turn_context.resolve_path(params.workdir.clone()),
|
||||
timeout_ms: params.timeout_ms,
|
||||
expiration: params.timeout_ms.into(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
with_escalated_permissions: params.with_escalated_permissions,
|
||||
justification: params.justification,
|
||||
@@ -57,7 +59,7 @@ impl ShellCommandHandler {
|
||||
ExecParams {
|
||||
command,
|
||||
cwd: turn_context.resolve_path(params.workdir.clone()),
|
||||
timeout_ms: params.timeout_ms,
|
||||
expiration: params.timeout_ms.into(),
|
||||
env: create_env(&turn_context.shell_environment_policy),
|
||||
with_escalated_permissions: params.with_escalated_permissions,
|
||||
justification: params.justification,
|
||||
@@ -241,7 +243,7 @@ impl ShellHandler {
|
||||
let req = ApplyPatchRequest {
|
||||
patch: apply.action.patch.clone(),
|
||||
cwd: apply.action.cwd.clone(),
|
||||
timeout_ms: exec_params.timeout_ms,
|
||||
timeout_ms: exec_params.expiration.timeout_ms(),
|
||||
user_explicitly_approved: apply.user_explicitly_approved_this_action,
|
||||
codex_exe: turn.codex_linux_sandbox_exe.clone(),
|
||||
};
|
||||
@@ -298,10 +300,17 @@ impl ShellHandler {
|
||||
let req = ShellRequest {
|
||||
command: exec_params.command.clone(),
|
||||
cwd: exec_params.cwd.clone(),
|
||||
timeout_ms: exec_params.timeout_ms,
|
||||
timeout_ms: exec_params.expiration.timeout_ms(),
|
||||
env: exec_params.env.clone(),
|
||||
with_escalated_permissions: exec_params.with_escalated_permissions,
|
||||
justification: exec_params.justification.clone(),
|
||||
approval_requirement: create_approval_requirement_for_command(
|
||||
&turn.exec_policy,
|
||||
&exec_params.command,
|
||||
turn.approval_policy,
|
||||
&turn.sandbox_policy,
|
||||
SandboxPermissions::from(exec_params.with_escalated_permissions.unwrap_or(false)),
|
||||
),
|
||||
};
|
||||
let mut orchestrator = ToolOrchestrator::new();
|
||||
let mut runtime = ShellRuntime::new();
|
||||
@@ -329,29 +338,30 @@ mod tests {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::is_safe_command::is_known_safe_command;
|
||||
use crate::shell::BashShell;
|
||||
use crate::shell::PowerShellConfig;
|
||||
use crate::shell::Shell;
|
||||
use crate::shell::ZshShell;
|
||||
use crate::shell::ShellType;
|
||||
|
||||
/// The logic for is_known_safe_command() has heuristics for known shells,
|
||||
/// so we must ensure the commands generated by [ShellCommandHandler] can be
|
||||
/// recognized as safe if the `command` is safe.
|
||||
#[test]
|
||||
fn commands_generated_by_shell_command_handler_can_be_matched_by_is_known_safe_command() {
|
||||
let bash_shell = Shell::Bash(BashShell {
|
||||
let bash_shell = Shell {
|
||||
shell_type: ShellType::Bash,
|
||||
shell_path: PathBuf::from("/bin/bash"),
|
||||
});
|
||||
};
|
||||
assert_safe(&bash_shell, "ls -la");
|
||||
|
||||
let zsh_shell = Shell::Zsh(ZshShell {
|
||||
let zsh_shell = Shell {
|
||||
shell_type: ShellType::Zsh,
|
||||
shell_path: PathBuf::from("/bin/zsh"),
|
||||
});
|
||||
};
|
||||
assert_safe(&zsh_shell, "ls -la");
|
||||
|
||||
let powershell = Shell::PowerShell(PowerShellConfig {
|
||||
let powershell = Shell {
|
||||
shell_type: ShellType::PowerShell,
|
||||
shell_path: PathBuf::from("pwsh.exe"),
|
||||
});
|
||||
};
|
||||
assert_safe(&powershell, "ls -Name");
|
||||
}
|
||||
|
||||
|
||||
@@ -11,11 +11,13 @@ use crate::error::get_error_message_ui;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::sandboxing::SandboxManager;
|
||||
use crate::tools::sandboxing::ApprovalCtx;
|
||||
use crate::tools::sandboxing::ApprovalRequirement;
|
||||
use crate::tools::sandboxing::ProvidesSandboxRetryData;
|
||||
use crate::tools::sandboxing::SandboxAttempt;
|
||||
use crate::tools::sandboxing::ToolCtx;
|
||||
use crate::tools::sandboxing::ToolError;
|
||||
use crate::tools::sandboxing::ToolRuntime;
|
||||
use crate::tools::sandboxing::default_approval_requirement;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
|
||||
@@ -49,40 +51,52 @@ impl ToolOrchestrator {
|
||||
let otel_cfg = codex_otel::otel_event_manager::ToolDecisionSource::Config;
|
||||
|
||||
// 1) Approval
|
||||
let needs_initial_approval =
|
||||
tool.wants_initial_approval(req, approval_policy, &turn_ctx.sandbox_policy);
|
||||
let mut already_approved = false;
|
||||
|
||||
if needs_initial_approval {
|
||||
let mut risk = None;
|
||||
|
||||
if let Some(metadata) = req.sandbox_retry_data() {
|
||||
risk = tool_ctx
|
||||
.session
|
||||
.assess_sandbox_command(turn_ctx, &tool_ctx.call_id, &metadata.command, None)
|
||||
.await;
|
||||
let requirement = tool.approval_requirement(req).unwrap_or_else(|| {
|
||||
default_approval_requirement(approval_policy, &turn_ctx.sandbox_policy)
|
||||
});
|
||||
match requirement {
|
||||
ApprovalRequirement::Skip => {
|
||||
otel.tool_decision(otel_tn, otel_ci, ReviewDecision::Approved, otel_cfg);
|
||||
}
|
||||
ApprovalRequirement::Forbidden { reason } => {
|
||||
return Err(ToolError::Rejected(reason));
|
||||
}
|
||||
ApprovalRequirement::NeedsApproval { reason } => {
|
||||
let mut risk = None;
|
||||
|
||||
let approval_ctx = ApprovalCtx {
|
||||
session: tool_ctx.session,
|
||||
turn: turn_ctx,
|
||||
call_id: &tool_ctx.call_id,
|
||||
retry_reason: None,
|
||||
risk,
|
||||
};
|
||||
let decision = tool.start_approval_async(req, approval_ctx).await;
|
||||
|
||||
otel.tool_decision(otel_tn, otel_ci, decision, otel_user.clone());
|
||||
|
||||
match decision {
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
return Err(ToolError::Rejected("rejected by user".to_string()));
|
||||
if let Some(metadata) = req.sandbox_retry_data() {
|
||||
risk = tool_ctx
|
||||
.session
|
||||
.assess_sandbox_command(
|
||||
turn_ctx,
|
||||
&tool_ctx.call_id,
|
||||
&metadata.command,
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession => {}
|
||||
|
||||
let approval_ctx = ApprovalCtx {
|
||||
session: tool_ctx.session,
|
||||
turn: turn_ctx,
|
||||
call_id: &tool_ctx.call_id,
|
||||
retry_reason: reason,
|
||||
risk,
|
||||
};
|
||||
let decision = tool.start_approval_async(req, approval_ctx).await;
|
||||
|
||||
otel.tool_decision(otel_tn, otel_ci, decision, otel_user.clone());
|
||||
|
||||
match decision {
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
return Err(ToolError::Rejected("rejected by user".to_string()));
|
||||
}
|
||||
ReviewDecision::Approved | ReviewDecision::ApprovedForSession => {}
|
||||
}
|
||||
already_approved = true;
|
||||
}
|
||||
already_approved = true;
|
||||
} else {
|
||||
otel.tool_decision(otel_tn, otel_ci, ReviewDecision::Approved, otel_cfg);
|
||||
}
|
||||
|
||||
// 2) First attempt under the selected sandbox.
|
||||
|
||||
@@ -67,7 +67,7 @@ impl ApplyPatchRuntime {
|
||||
program,
|
||||
args: vec![CODEX_APPLY_PATCH_ARG1.to_string(), req.patch.clone()],
|
||||
cwd: req.cwd.clone(),
|
||||
timeout_ms: req.timeout_ms,
|
||||
expiration: req.timeout_ms.into(),
|
||||
// Run apply_patch with a minimal environment for determinism and to avoid leaks.
|
||||
env: HashMap::new(),
|
||||
with_escalated_permissions: None,
|
||||
@@ -153,9 +153,9 @@ impl ToolRuntime<ApplyPatchRequest, ExecToolCallOutput> for ApplyPatchRuntime {
|
||||
) -> Result<ExecToolCallOutput, ToolError> {
|
||||
let spec = Self::build_command_spec(req)?;
|
||||
let env = attempt
|
||||
.env_for(&spec)
|
||||
.env_for(spec)
|
||||
.map_err(|err| ToolError::Codex(err.into()))?;
|
||||
let out = execute_env(&env, attempt.policy, Self::stdout_stream(ctx))
|
||||
let out = execute_env(env, attempt.policy, Self::stdout_stream(ctx))
|
||||
.await
|
||||
.map_err(ToolError::Codex)?;
|
||||
Ok(out)
|
||||
|
||||
@@ -4,6 +4,7 @@ Module: runtimes
|
||||
Concrete ToolRuntime implementations for specific tools. Each runtime stays
|
||||
small and focused and reuses the orchestrator for approvals + sandbox + retry.
|
||||
*/
|
||||
use crate::exec::ExecExpiration;
|
||||
use crate::sandboxing::CommandSpec;
|
||||
use crate::tools::sandboxing::ToolError;
|
||||
use std::collections::HashMap;
|
||||
@@ -19,7 +20,7 @@ pub(crate) fn build_command_spec(
|
||||
command: &[String],
|
||||
cwd: &Path,
|
||||
env: &HashMap<String, String>,
|
||||
timeout_ms: Option<u64>,
|
||||
expiration: ExecExpiration,
|
||||
with_escalated_permissions: Option<bool>,
|
||||
justification: Option<String>,
|
||||
) -> Result<CommandSpec, ToolError> {
|
||||
@@ -31,7 +32,7 @@ pub(crate) fn build_command_spec(
|
||||
args: args.to_vec(),
|
||||
cwd: cwd.to_path_buf(),
|
||||
env: env.clone(),
|
||||
timeout_ms,
|
||||
expiration,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
})
|
||||
|
||||
@@ -4,13 +4,12 @@ Runtime: shell
|
||||
Executes shell requests under the orchestrator: asks for approval when needed,
|
||||
builds a CommandSpec, and runs it under the current SandboxAttempt.
|
||||
*/
|
||||
use crate::command_safety::is_dangerous_command::requires_initial_appoval;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::sandboxing::execute_env;
|
||||
use crate::tools::runtimes::build_command_spec;
|
||||
use crate::tools::sandboxing::Approvable;
|
||||
use crate::tools::sandboxing::ApprovalCtx;
|
||||
use crate::tools::sandboxing::ApprovalRequirement;
|
||||
use crate::tools::sandboxing::ProvidesSandboxRetryData;
|
||||
use crate::tools::sandboxing::SandboxAttempt;
|
||||
use crate::tools::sandboxing::SandboxRetryData;
|
||||
@@ -20,7 +19,6 @@ use crate::tools::sandboxing::ToolCtx;
|
||||
use crate::tools::sandboxing::ToolError;
|
||||
use crate::tools::sandboxing::ToolRuntime;
|
||||
use crate::tools::sandboxing::with_cached_approval;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use futures::future::BoxFuture;
|
||||
use std::path::PathBuf;
|
||||
@@ -33,6 +31,7 @@ pub struct ShellRequest {
|
||||
pub env: std::collections::HashMap<String, String>,
|
||||
pub with_escalated_permissions: Option<bool>,
|
||||
pub justification: Option<String>,
|
||||
pub approval_requirement: ApprovalRequirement,
|
||||
}
|
||||
|
||||
impl ProvidesSandboxRetryData for ShellRequest {
|
||||
@@ -114,18 +113,8 @@ impl Approvable<ShellRequest> for ShellRuntime {
|
||||
})
|
||||
}
|
||||
|
||||
fn wants_initial_approval(
|
||||
&self,
|
||||
req: &ShellRequest,
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
requires_initial_appoval(
|
||||
policy,
|
||||
sandbox_policy,
|
||||
&req.command,
|
||||
req.with_escalated_permissions.unwrap_or(false),
|
||||
)
|
||||
fn approval_requirement(&self, req: &ShellRequest) -> Option<ApprovalRequirement> {
|
||||
Some(req.approval_requirement.clone())
|
||||
}
|
||||
|
||||
fn wants_escalated_first_attempt(&self, req: &ShellRequest) -> bool {
|
||||
@@ -144,14 +133,14 @@ impl ToolRuntime<ShellRequest, ExecToolCallOutput> for ShellRuntime {
|
||||
&req.command,
|
||||
&req.cwd,
|
||||
&req.env,
|
||||
req.timeout_ms,
|
||||
req.timeout_ms.into(),
|
||||
req.with_escalated_permissions,
|
||||
req.justification.clone(),
|
||||
)?;
|
||||
let env = attempt
|
||||
.env_for(&spec)
|
||||
.env_for(spec)
|
||||
.map_err(|err| ToolError::Codex(err.into()))?;
|
||||
let out = execute_env(&env, attempt.policy, Self::stdout_stream(ctx))
|
||||
let out = execute_env(env, attempt.policy, Self::stdout_stream(ctx))
|
||||
.await
|
||||
.map_err(ToolError::Codex)?;
|
||||
Ok(out)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::command_safety::is_dangerous_command::requires_initial_appoval;
|
||||
/*
|
||||
Runtime: unified exec
|
||||
|
||||
@@ -7,9 +6,11 @@ the session manager to spawn PTYs once an ExecEnv is prepared.
|
||||
*/
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::SandboxErr;
|
||||
use crate::exec::ExecExpiration;
|
||||
use crate::tools::runtimes::build_command_spec;
|
||||
use crate::tools::sandboxing::Approvable;
|
||||
use crate::tools::sandboxing::ApprovalCtx;
|
||||
use crate::tools::sandboxing::ApprovalRequirement;
|
||||
use crate::tools::sandboxing::ProvidesSandboxRetryData;
|
||||
use crate::tools::sandboxing::SandboxAttempt;
|
||||
use crate::tools::sandboxing::SandboxRetryData;
|
||||
@@ -22,9 +23,7 @@ use crate::tools::sandboxing::with_cached_approval;
|
||||
use crate::unified_exec::UnifiedExecError;
|
||||
use crate::unified_exec::UnifiedExecSession;
|
||||
use crate::unified_exec::UnifiedExecSessionManager;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use futures::future::BoxFuture;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
@@ -36,6 +35,7 @@ pub struct UnifiedExecRequest {
|
||||
pub env: HashMap<String, String>,
|
||||
pub with_escalated_permissions: Option<bool>,
|
||||
pub justification: Option<String>,
|
||||
pub approval_requirement: ApprovalRequirement,
|
||||
}
|
||||
|
||||
impl ProvidesSandboxRetryData for UnifiedExecRequest {
|
||||
@@ -65,6 +65,7 @@ impl UnifiedExecRequest {
|
||||
env: HashMap<String, String>,
|
||||
with_escalated_permissions: Option<bool>,
|
||||
justification: Option<String>,
|
||||
approval_requirement: ApprovalRequirement,
|
||||
) -> Self {
|
||||
Self {
|
||||
command,
|
||||
@@ -72,6 +73,7 @@ impl UnifiedExecRequest {
|
||||
env,
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
approval_requirement,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -129,18 +131,8 @@ impl Approvable<UnifiedExecRequest> for UnifiedExecRuntime<'_> {
|
||||
})
|
||||
}
|
||||
|
||||
fn wants_initial_approval(
|
||||
&self,
|
||||
req: &UnifiedExecRequest,
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
requires_initial_appoval(
|
||||
policy,
|
||||
sandbox_policy,
|
||||
&req.command,
|
||||
req.with_escalated_permissions.unwrap_or(false),
|
||||
)
|
||||
fn approval_requirement(&self, req: &UnifiedExecRequest) -> Option<ApprovalRequirement> {
|
||||
Some(req.approval_requirement.clone())
|
||||
}
|
||||
|
||||
fn wants_escalated_first_attempt(&self, req: &UnifiedExecRequest) -> bool {
|
||||
@@ -159,13 +151,13 @@ impl<'a> ToolRuntime<UnifiedExecRequest, UnifiedExecSession> for UnifiedExecRunt
|
||||
&req.command,
|
||||
&req.cwd,
|
||||
&req.env,
|
||||
None,
|
||||
ExecExpiration::DefaultTimeout,
|
||||
req.with_escalated_permissions,
|
||||
req.justification.clone(),
|
||||
)
|
||||
.map_err(|_| ToolError::Rejected("missing command line for PTY".to_string()))?;
|
||||
let exec_env = attempt
|
||||
.env_for(&spec)
|
||||
.env_for(spec)
|
||||
.map_err(|err| ToolError::Codex(err.into()))?;
|
||||
self.manager
|
||||
.open_session_with_exec_env(&exec_env)
|
||||
|
||||
@@ -86,6 +86,37 @@ pub(crate) struct ApprovalCtx<'a> {
|
||||
pub risk: Option<SandboxCommandAssessment>,
|
||||
}
|
||||
|
||||
// Specifies what tool orchestrator should do with a given tool call.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum ApprovalRequirement {
|
||||
/// No approval required for this tool call
|
||||
Skip,
|
||||
/// Approval required for this tool call
|
||||
NeedsApproval { reason: Option<String> },
|
||||
/// Execution forbidden for this tool call
|
||||
Forbidden { reason: String },
|
||||
}
|
||||
|
||||
/// - Never, OnFailure: do not ask
|
||||
/// - OnRequest: ask unless sandbox policy is DangerFullAccess
|
||||
/// - UnlessTrusted: always ask
|
||||
pub(crate) fn default_approval_requirement(
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> ApprovalRequirement {
|
||||
let needs_approval = match policy {
|
||||
AskForApproval::Never | AskForApproval::OnFailure => false,
|
||||
AskForApproval::OnRequest => !matches!(sandbox_policy, SandboxPolicy::DangerFullAccess),
|
||||
AskForApproval::UnlessTrusted => true,
|
||||
};
|
||||
|
||||
if needs_approval {
|
||||
ApprovalRequirement::NeedsApproval { reason: None }
|
||||
} else {
|
||||
ApprovalRequirement::Skip
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait Approvable<Req> {
|
||||
type ApprovalKey: Hash + Eq + Clone + Debug + Serialize;
|
||||
|
||||
@@ -106,22 +137,11 @@ pub(crate) trait Approvable<Req> {
|
||||
matches!(policy, AskForApproval::Never)
|
||||
}
|
||||
|
||||
/// Decide whether an initial user approval should be requested before the
|
||||
/// first attempt. Defaults to the orchestrator's behavior (pre‑refactor):
|
||||
/// - Never, OnFailure: do not ask
|
||||
/// - OnRequest: ask unless sandbox policy is DangerFullAccess
|
||||
/// - UnlessTrusted: always ask
|
||||
fn wants_initial_approval(
|
||||
&self,
|
||||
_req: &Req,
|
||||
policy: AskForApproval,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> bool {
|
||||
match policy {
|
||||
AskForApproval::Never | AskForApproval::OnFailure => false,
|
||||
AskForApproval::OnRequest => !matches!(sandbox_policy, SandboxPolicy::DangerFullAccess),
|
||||
AskForApproval::UnlessTrusted => true,
|
||||
}
|
||||
/// Override the default approval requirement. Return `Some(_)` to specify
|
||||
/// a custom requirement, or `None` to fall back to
|
||||
/// policy-based default.
|
||||
fn approval_requirement(&self, _req: &Req) -> Option<ApprovalRequirement> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Decide we can request an approval for no-sandbox execution.
|
||||
@@ -196,7 +216,7 @@ pub(crate) struct SandboxAttempt<'a> {
|
||||
impl<'a> SandboxAttempt<'a> {
|
||||
pub fn env_for(
|
||||
&self,
|
||||
spec: &CommandSpec,
|
||||
spec: CommandSpec,
|
||||
) -> Result<crate::sandboxing::ExecEnv, SandboxTransformError> {
|
||||
self.manager.transform(
|
||||
spec,
|
||||
|
||||
@@ -57,8 +57,6 @@ impl ToolsConfig {
|
||||
ConfigShellToolType::Disabled
|
||||
} else if features.enabled(Feature::UnifiedExec) {
|
||||
ConfigShellToolType::UnifiedExec
|
||||
} else if features.enabled(Feature::ShellCommandTool) {
|
||||
ConfigShellToolType::ShellCommand
|
||||
} else {
|
||||
model_family.shell_type.clone()
|
||||
};
|
||||
@@ -1468,22 +1466,6 @@ mod tests {
|
||||
assert_contains_tool_names(&tools, &subset);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_specs_shell_command_present() {
|
||||
assert_model_tools(
|
||||
"codex-mini-latest",
|
||||
Features::with_defaults().enable(Feature::ShellCommandTool),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
"list_mcp_resource_templates",
|
||||
"read_mcp_resource",
|
||||
"update_plan",
|
||||
"view_image",
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn test_parallel_support_flags() {
|
||||
|
||||
@@ -185,6 +185,7 @@ fn truncate_with_byte_estimate(s: &str, policy: TruncationPolicy) -> String {
|
||||
if s.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let total_chars = s.chars().count();
|
||||
let max_bytes = policy.byte_budget();
|
||||
|
||||
@@ -204,24 +205,55 @@ fn truncate_with_byte_estimate(s: &str, policy: TruncationPolicy) -> String {
|
||||
let total_bytes = s.len();
|
||||
|
||||
let (left_budget, right_budget) = split_budget(max_bytes);
|
||||
let prefix_end = pick_prefix_end(s, left_budget);
|
||||
let mut suffix_start = pick_suffix_start(s, right_budget);
|
||||
if suffix_start < prefix_end {
|
||||
suffix_start = prefix_end;
|
||||
}
|
||||
|
||||
let left_chars = s[..prefix_end].chars().count();
|
||||
let right_chars = s[suffix_start..].chars().count();
|
||||
let removed_chars = total_chars
|
||||
.saturating_sub(left_chars)
|
||||
.saturating_sub(right_chars);
|
||||
let (removed_chars, left, right) = split_string(s, left_budget, right_budget);
|
||||
|
||||
let marker = format_truncation_marker(
|
||||
policy,
|
||||
removed_units_for_source(policy, total_bytes.saturating_sub(max_bytes), removed_chars),
|
||||
);
|
||||
|
||||
assemble_truncated_output(&s[..prefix_end], &s[suffix_start..], &marker)
|
||||
assemble_truncated_output(left, right, &marker)
|
||||
}
|
||||
|
||||
fn split_string(s: &str, beginning_bytes: usize, end_bytes: usize) -> (usize, &str, &str) {
|
||||
if s.is_empty() {
|
||||
return (0, "", "");
|
||||
}
|
||||
|
||||
let len = s.len();
|
||||
let tail_start_target = len.saturating_sub(end_bytes);
|
||||
let mut prefix_end = 0usize;
|
||||
let mut suffix_start = len;
|
||||
let mut removed_chars = 0usize;
|
||||
let mut suffix_started = false;
|
||||
|
||||
for (idx, ch) in s.char_indices() {
|
||||
let char_end = idx + ch.len_utf8();
|
||||
if char_end <= beginning_bytes {
|
||||
prefix_end = char_end;
|
||||
continue;
|
||||
}
|
||||
|
||||
if idx >= tail_start_target {
|
||||
if !suffix_started {
|
||||
suffix_start = idx;
|
||||
suffix_started = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
removed_chars = removed_chars.saturating_add(1);
|
||||
}
|
||||
|
||||
if suffix_start < prefix_end {
|
||||
suffix_start = prefix_end;
|
||||
}
|
||||
|
||||
let before = &s[..prefix_end];
|
||||
let after = &s[suffix_start..];
|
||||
|
||||
(removed_chars, before, after)
|
||||
}
|
||||
|
||||
fn format_truncation_marker(policy: TruncationPolicy, removed_count: u64) -> String {
|
||||
@@ -270,42 +302,54 @@ fn approx_tokens_from_byte_count(bytes: usize) -> u64 {
|
||||
/ (APPROX_BYTES_PER_TOKEN as u64)
|
||||
}
|
||||
|
||||
fn truncate_on_boundary(input: &str, max_len: usize) -> &str {
|
||||
if input.len() <= max_len {
|
||||
return input;
|
||||
}
|
||||
let mut end = max_len;
|
||||
while end > 0 && !input.is_char_boundary(end) {
|
||||
end -= 1;
|
||||
}
|
||||
&input[..end]
|
||||
}
|
||||
|
||||
fn pick_prefix_end(s: &str, left_budget: usize) -> usize {
|
||||
truncate_on_boundary(s, left_budget).len()
|
||||
}
|
||||
|
||||
fn pick_suffix_start(s: &str, right_budget: usize) -> usize {
|
||||
let start_tail = s.len().saturating_sub(right_budget);
|
||||
let mut idx = start_tail.min(s.len());
|
||||
while idx < s.len() && !s.is_char_boundary(idx) {
|
||||
idx += 1;
|
||||
}
|
||||
idx
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::TruncationPolicy;
|
||||
use super::approx_token_count;
|
||||
use super::formatted_truncate_text;
|
||||
use super::split_string;
|
||||
use super::truncate_function_output_items_with_policy;
|
||||
use super::truncate_text;
|
||||
use super::truncate_with_token_budget;
|
||||
use codex_protocol::models::FunctionCallOutputContentItem;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn split_string_works() {
|
||||
assert_eq!(split_string("hello world", 5, 5), (1, "hello", "world"));
|
||||
assert_eq!(split_string("abc", 0, 0), (3, "", ""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_string_handles_empty_string() {
|
||||
assert_eq!(split_string("", 4, 4), (0, "", ""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_string_only_keeps_prefix_when_tail_budget_is_zero() {
|
||||
assert_eq!(split_string("abcdef", 3, 0), (3, "abc", ""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_string_only_keeps_suffix_when_prefix_budget_is_zero() {
|
||||
assert_eq!(split_string("abcdef", 0, 3), (3, "", "def"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_string_handles_overlapping_budgets_without_removal() {
|
||||
assert_eq!(split_string("abcdef", 4, 4), (0, "abcd", "ef"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn split_string_respects_utf8_boundaries() {
|
||||
assert_eq!(split_string("😀abc😀", 5, 5), (1, "😀a", "c😀"));
|
||||
|
||||
assert_eq!(split_string("😀😀😀😀😀", 1, 1), (5, "", ""));
|
||||
assert_eq!(split_string("😀😀😀😀😀", 7, 7), (3, "😀", "😀"));
|
||||
assert_eq!(split_string("😀😀😀😀😀", 8, 8), (1, "😀😀", "😀😀"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn truncate_bytes_less_than_placeholder_returns_placeholder() {
|
||||
let content = "example output";
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::Notify;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot::error::TryRecvError;
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio::time::Duration;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::exec::SandboxType;
|
||||
@@ -67,13 +67,18 @@ impl OutputBufferState {
|
||||
}
|
||||
|
||||
pub(crate) type OutputBuffer = Arc<Mutex<OutputBufferState>>;
|
||||
pub(crate) type OutputHandles = (OutputBuffer, Arc<Notify>);
|
||||
pub(crate) struct OutputHandles {
|
||||
pub(crate) output_buffer: OutputBuffer,
|
||||
pub(crate) output_notify: Arc<Notify>,
|
||||
pub(crate) cancellation_token: CancellationToken,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnifiedExecSession {
|
||||
session: ExecCommandSession,
|
||||
output_buffer: OutputBuffer,
|
||||
output_notify: Arc<Notify>,
|
||||
cancellation_token: CancellationToken,
|
||||
output_task: JoinHandle<()>,
|
||||
sandbox_type: SandboxType,
|
||||
}
|
||||
@@ -86,9 +91,11 @@ impl UnifiedExecSession {
|
||||
) -> Self {
|
||||
let output_buffer = Arc::new(Mutex::new(OutputBufferState::default()));
|
||||
let output_notify = Arc::new(Notify::new());
|
||||
let cancellation_token = CancellationToken::new();
|
||||
let mut receiver = initial_output_rx;
|
||||
let buffer_clone = Arc::clone(&output_buffer);
|
||||
let notify_clone = Arc::clone(&output_notify);
|
||||
let cancellation_token_clone = cancellation_token.clone();
|
||||
let output_task = tokio::spawn(async move {
|
||||
loop {
|
||||
match receiver.recv().await {
|
||||
@@ -99,7 +106,10 @@ impl UnifiedExecSession {
|
||||
notify_clone.notify_waiters();
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => continue,
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => break,
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => {
|
||||
cancellation_token_clone.cancel();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -108,6 +118,7 @@ impl UnifiedExecSession {
|
||||
session,
|
||||
output_buffer,
|
||||
output_notify,
|
||||
cancellation_token,
|
||||
output_task,
|
||||
sandbox_type,
|
||||
}
|
||||
@@ -118,10 +129,11 @@ impl UnifiedExecSession {
|
||||
}
|
||||
|
||||
pub(super) fn output_handles(&self) -> OutputHandles {
|
||||
(
|
||||
Arc::clone(&self.output_buffer),
|
||||
Arc::clone(&self.output_notify),
|
||||
)
|
||||
OutputHandles {
|
||||
output_buffer: Arc::clone(&self.output_buffer),
|
||||
output_notify: Arc::clone(&self.output_notify),
|
||||
cancellation_token: self.cancellation_token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn has_exited(&self) -> bool {
|
||||
@@ -199,20 +211,34 @@ impl UnifiedExecSession {
|
||||
};
|
||||
|
||||
if exit_ready {
|
||||
managed.signal_exit();
|
||||
managed.check_for_sandbox_denial().await?;
|
||||
return Ok(managed);
|
||||
}
|
||||
|
||||
tokio::pin!(exit_rx);
|
||||
if tokio::time::timeout(Duration::from_millis(50), &mut exit_rx)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
managed.signal_exit();
|
||||
managed.check_for_sandbox_denial().await?;
|
||||
return Ok(managed);
|
||||
}
|
||||
|
||||
tokio::spawn({
|
||||
let cancellation_token = managed.cancellation_token.clone();
|
||||
async move {
|
||||
let _ = exit_rx.await;
|
||||
cancellation_token.cancel();
|
||||
}
|
||||
});
|
||||
|
||||
Ok(managed)
|
||||
}
|
||||
|
||||
fn signal_exit(&self) {
|
||||
self.cancellation_token.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for UnifiedExecSession {
|
||||
|
||||
@@ -5,16 +5,19 @@ use tokio::sync::Notify;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::time::Duration;
|
||||
use tokio::time::Instant;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::exec::StreamOutput;
|
||||
use crate::exec_env::create_env;
|
||||
use crate::exec_policy::create_approval_requirement_for_command;
|
||||
use crate::protocol::BackgroundEventEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::ExecCommandSource;
|
||||
use crate::sandboxing::ExecEnv;
|
||||
use crate::sandboxing::SandboxPermissions;
|
||||
use crate::tools::events::ToolEmitter;
|
||||
use crate::tools::events::ToolEventCtx;
|
||||
use crate::tools::events::ToolEventFailure;
|
||||
@@ -38,8 +41,20 @@ use super::clamp_yield_time;
|
||||
use super::generate_chunk_id;
|
||||
use super::resolve_max_tokens;
|
||||
use super::session::OutputBuffer;
|
||||
use super::session::OutputHandles;
|
||||
use super::session::UnifiedExecSession;
|
||||
|
||||
struct PreparedSessionHandles {
|
||||
writer_tx: mpsc::Sender<Vec<u8>>,
|
||||
output_buffer: OutputBuffer,
|
||||
output_notify: Arc<Notify>,
|
||||
cancellation_token: CancellationToken,
|
||||
session_ref: Arc<Session>,
|
||||
turn_ref: Arc<TurnContext>,
|
||||
command: Vec<String>,
|
||||
cwd: PathBuf,
|
||||
}
|
||||
|
||||
impl UnifiedExecSessionManager {
|
||||
pub(crate) async fn exec_command(
|
||||
&self,
|
||||
@@ -65,10 +80,19 @@ impl UnifiedExecSessionManager {
|
||||
let yield_time_ms = clamp_yield_time(request.yield_time_ms);
|
||||
|
||||
let start = Instant::now();
|
||||
let (output_buffer, output_notify) = session.output_handles();
|
||||
let OutputHandles {
|
||||
output_buffer,
|
||||
output_notify,
|
||||
cancellation_token,
|
||||
} = session.output_handles();
|
||||
let deadline = start + Duration::from_millis(yield_time_ms);
|
||||
let collected =
|
||||
Self::collect_output_until_deadline(&output_buffer, &output_notify, deadline).await;
|
||||
let collected = Self::collect_output_until_deadline(
|
||||
&output_buffer,
|
||||
&output_notify,
|
||||
&cancellation_token,
|
||||
deadline,
|
||||
)
|
||||
.await;
|
||||
let wall_time = Instant::now().saturating_duration_since(start);
|
||||
|
||||
let text = String::from_utf8_lossy(&collected).to_string();
|
||||
@@ -127,15 +151,16 @@ impl UnifiedExecSessionManager {
|
||||
) -> Result<UnifiedExecResponse, UnifiedExecError> {
|
||||
let session_id = request.session_id;
|
||||
|
||||
let (
|
||||
let PreparedSessionHandles {
|
||||
writer_tx,
|
||||
output_buffer,
|
||||
output_notify,
|
||||
cancellation_token,
|
||||
session_ref,
|
||||
turn_ref,
|
||||
session_command,
|
||||
session_cwd,
|
||||
) = self.prepare_session_handles(session_id).await?;
|
||||
command: session_command,
|
||||
cwd: session_cwd,
|
||||
} = self.prepare_session_handles(session_id).await?;
|
||||
|
||||
let interaction_emitter = ToolEmitter::unified_exec(
|
||||
&session_command,
|
||||
@@ -174,8 +199,13 @@ impl UnifiedExecSessionManager {
|
||||
let yield_time_ms = clamp_yield_time(request.yield_time_ms);
|
||||
let start = Instant::now();
|
||||
let deadline = start + Duration::from_millis(yield_time_ms);
|
||||
let collected =
|
||||
Self::collect_output_until_deadline(&output_buffer, &output_notify, deadline).await;
|
||||
let collected = Self::collect_output_until_deadline(
|
||||
&output_buffer,
|
||||
&output_notify,
|
||||
&cancellation_token,
|
||||
deadline,
|
||||
)
|
||||
.await;
|
||||
let wall_time = Instant::now().saturating_duration_since(start);
|
||||
|
||||
let text = String::from_utf8_lossy(&collected).to_string();
|
||||
@@ -263,44 +293,27 @@ impl UnifiedExecSessionManager {
|
||||
async fn prepare_session_handles(
|
||||
&self,
|
||||
session_id: i32,
|
||||
) -> Result<
|
||||
(
|
||||
mpsc::Sender<Vec<u8>>,
|
||||
OutputBuffer,
|
||||
Arc<Notify>,
|
||||
Arc<Session>,
|
||||
Arc<TurnContext>,
|
||||
Vec<String>,
|
||||
PathBuf,
|
||||
),
|
||||
UnifiedExecError,
|
||||
> {
|
||||
) -> Result<PreparedSessionHandles, UnifiedExecError> {
|
||||
let sessions = self.sessions.lock().await;
|
||||
let (output_buffer, output_notify, writer_tx, session, turn, command, cwd) =
|
||||
if let Some(entry) = sessions.get(&session_id) {
|
||||
let (buffer, notify) = entry.session.output_handles();
|
||||
(
|
||||
buffer,
|
||||
notify,
|
||||
entry.session.writer_sender(),
|
||||
Arc::clone(&entry.session_ref),
|
||||
Arc::clone(&entry.turn_ref),
|
||||
entry.command.clone(),
|
||||
entry.cwd.clone(),
|
||||
)
|
||||
} else {
|
||||
return Err(UnifiedExecError::UnknownSessionId { session_id });
|
||||
};
|
||||
|
||||
Ok((
|
||||
writer_tx,
|
||||
let entry = sessions
|
||||
.get(&session_id)
|
||||
.ok_or(UnifiedExecError::UnknownSessionId { session_id })?;
|
||||
let OutputHandles {
|
||||
output_buffer,
|
||||
output_notify,
|
||||
session,
|
||||
turn,
|
||||
command,
|
||||
cwd,
|
||||
))
|
||||
cancellation_token,
|
||||
} = entry.session.output_handles();
|
||||
|
||||
Ok(PreparedSessionHandles {
|
||||
writer_tx: entry.session.writer_sender(),
|
||||
output_buffer,
|
||||
output_notify,
|
||||
cancellation_token,
|
||||
session_ref: Arc::clone(&entry.session_ref),
|
||||
turn_ref: Arc::clone(&entry.turn_ref),
|
||||
command: entry.command.clone(),
|
||||
cwd: entry.cwd.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn send_input(
|
||||
@@ -449,6 +462,13 @@ impl UnifiedExecSessionManager {
|
||||
create_env(&context.turn.shell_environment_policy),
|
||||
with_escalated_permissions,
|
||||
justification,
|
||||
create_approval_requirement_for_command(
|
||||
&context.turn.exec_policy,
|
||||
command,
|
||||
context.turn.approval_policy,
|
||||
&context.turn.sandbox_policy,
|
||||
SandboxPermissions::from(with_escalated_permissions.unwrap_or(false)),
|
||||
),
|
||||
);
|
||||
let tool_ctx = ToolCtx {
|
||||
session: context.session.as_ref(),
|
||||
@@ -471,9 +491,13 @@ impl UnifiedExecSessionManager {
|
||||
pub(super) async fn collect_output_until_deadline(
|
||||
output_buffer: &OutputBuffer,
|
||||
output_notify: &Arc<Notify>,
|
||||
cancellation_token: &CancellationToken,
|
||||
deadline: Instant,
|
||||
) -> Vec<u8> {
|
||||
const POST_EXIT_OUTPUT_GRACE: Duration = Duration::from_millis(25);
|
||||
|
||||
let mut collected: Vec<u8> = Vec::with_capacity(4096);
|
||||
let mut exit_signal_received = cancellation_token.is_cancelled();
|
||||
loop {
|
||||
let drained_chunks;
|
||||
let mut wait_for_output = None;
|
||||
@@ -486,15 +510,27 @@ impl UnifiedExecSessionManager {
|
||||
}
|
||||
|
||||
if drained_chunks.is_empty() {
|
||||
exit_signal_received |= cancellation_token.is_cancelled();
|
||||
let remaining = deadline.saturating_duration_since(Instant::now());
|
||||
if remaining == Duration::ZERO {
|
||||
break;
|
||||
}
|
||||
|
||||
let notified = wait_for_output.unwrap_or_else(|| output_notify.notified());
|
||||
if exit_signal_received {
|
||||
let grace = remaining.min(POST_EXIT_OUTPUT_GRACE);
|
||||
if tokio::time::timeout(grace, notified).await.is_err() {
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
tokio::pin!(notified);
|
||||
let exit_notified = cancellation_token.cancelled();
|
||||
tokio::pin!(exit_notified);
|
||||
tokio::select! {
|
||||
_ = &mut notified => {}
|
||||
_ = &mut exit_notified => exit_signal_received = true,
|
||||
_ = tokio::time::sleep(remaining) => break,
|
||||
}
|
||||
continue;
|
||||
@@ -504,6 +540,7 @@ impl UnifiedExecSessionManager {
|
||||
collected.extend_from_slice(&chunk);
|
||||
}
|
||||
|
||||
exit_signal_received |= cancellation_token.is_cancelled();
|
||||
if Instant::now() >= deadline {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -18,3 +18,4 @@ tempfile = { workspace = true }
|
||||
tokio = { workspace = true, features = ["time"] }
|
||||
walkdir = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -172,6 +172,15 @@ pub fn sandbox_network_env_var() -> &'static str {
|
||||
codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR
|
||||
}
|
||||
|
||||
pub fn format_with_current_shell(command: &str) -> Vec<String> {
|
||||
codex_core::shell::default_user_shell().derive_exec_args(command, true)
|
||||
}
|
||||
|
||||
pub fn format_with_current_shell_display(command: &str) -> String {
|
||||
let args = format_with_current_shell(command);
|
||||
shlex::try_join(args.iter().map(String::as_str)).expect("serialize current shell command")
|
||||
}
|
||||
|
||||
pub mod fs_wait {
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
|
||||
@@ -462,8 +462,11 @@ pub fn ev_apply_patch_function_call(call_id: &str, patch: &str) -> Value {
|
||||
|
||||
pub fn ev_shell_command_call(call_id: &str, command: &str) -> Value {
|
||||
let args = serde_json::json!({ "command": command });
|
||||
let arguments = serde_json::to_string(&args).expect("serialize shell arguments");
|
||||
ev_shell_command_call_with_args(call_id, &args)
|
||||
}
|
||||
|
||||
pub fn ev_shell_command_call_with_args(call_id: &str, args: &serde_json::Value) -> Value {
|
||||
let arguments = serde_json::to_string(args).expect("serialize shell command arguments");
|
||||
ev_function_call(call_id, "shell_command", &arguments)
|
||||
}
|
||||
|
||||
|
||||
@@ -17,15 +17,11 @@ use core_test_support::wait_for_event;
|
||||
use regex_lite::Regex;
|
||||
use serde_json::json;
|
||||
|
||||
/// Integration test: spawn a long‑running shell tool via a mocked Responses SSE
|
||||
/// Integration test: spawn a long‑running shell_command tool via a mocked Responses SSE
|
||||
/// function call, then interrupt the session and expect TurnAborted.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn interrupt_long_running_tool_emits_turn_aborted() {
|
||||
let command = vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"sleep 60".to_string(),
|
||||
];
|
||||
let command = "sleep 60";
|
||||
|
||||
let args = json!({
|
||||
"command": command,
|
||||
@@ -33,14 +29,19 @@ async fn interrupt_long_running_tool_emits_turn_aborted() {
|
||||
})
|
||||
.to_string();
|
||||
let body = sse(vec![
|
||||
ev_function_call("call_sleep", "shell", &args),
|
||||
ev_function_call("call_sleep", "shell_command", &args),
|
||||
ev_completed("done"),
|
||||
]);
|
||||
|
||||
let server = start_mock_server().await;
|
||||
mount_sse_once(&server, body).await;
|
||||
|
||||
let codex = test_codex().build(&server).await.unwrap().codex;
|
||||
let codex = test_codex()
|
||||
.with_model("gpt-5.1")
|
||||
.build(&server)
|
||||
.await
|
||||
.unwrap()
|
||||
.codex;
|
||||
|
||||
// Kick off a turn that triggers the function call.
|
||||
codex
|
||||
@@ -67,11 +68,7 @@ async fn interrupt_long_running_tool_emits_turn_aborted() {
|
||||
/// responses server, and ensures the model receives the synthesized abort.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn interrupt_tool_records_history_entries() {
|
||||
let command = vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"sleep 60".to_string(),
|
||||
];
|
||||
let command = "sleep 60";
|
||||
let call_id = "call-history";
|
||||
|
||||
let args = json!({
|
||||
@@ -81,7 +78,7 @@ async fn interrupt_tool_records_history_entries() {
|
||||
.to_string();
|
||||
let first_body = sse(vec![
|
||||
ev_response_created("resp-history"),
|
||||
ev_function_call(call_id, "shell", &args),
|
||||
ev_function_call(call_id, "shell_command", &args),
|
||||
ev_completed("resp-history"),
|
||||
]);
|
||||
let follow_up_body = sse(vec![
|
||||
@@ -92,7 +89,11 @@ async fn interrupt_tool_records_history_entries() {
|
||||
let server = start_mock_server().await;
|
||||
let response_mock = mount_sse_sequence(&server, vec![first_body, follow_up_body]).await;
|
||||
|
||||
let fixture = test_codex().build(&server).await.unwrap();
|
||||
let fixture = test_codex()
|
||||
.with_model("gpt-5.1")
|
||||
.build(&server)
|
||||
.await
|
||||
.unwrap();
|
||||
let codex = Arc::clone(&fixture.codex);
|
||||
|
||||
codex
|
||||
|
||||
@@ -667,7 +667,7 @@ async fn apply_patch_cli_verification_failure_has_no_side_effects(
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn apply_patch_shell_heredoc_with_cd_updates_relative_workdir() -> Result<()> {
|
||||
async fn apply_patch_shell_command_heredoc_with_cd_updates_relative_workdir() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = apply_patch_harness_with(|config| {
|
||||
@@ -684,14 +684,11 @@ async fn apply_patch_shell_heredoc_with_cd_updates_relative_workdir() -> Result<
|
||||
|
||||
let script = "cd sub && apply_patch <<'EOF'\n*** Begin Patch\n*** Update File: in_sub.txt\n@@\n-before\n+after\n*** End Patch\nEOF\n";
|
||||
let call_id = "shell-heredoc-cd";
|
||||
let args = json!({
|
||||
"command": ["bash", "-lc", script],
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
let args = json!({ "command": script, "timeout_ms": 5_000 });
|
||||
let bodies = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
@@ -706,14 +703,14 @@ async fn apply_patch_shell_heredoc_with_cd_updates_relative_workdir() -> Result<
|
||||
let out = harness.function_call_stdout(call_id).await;
|
||||
assert!(
|
||||
out.contains("Success."),
|
||||
"expected successful apply_patch invocation via shell: {out}"
|
||||
"expected successful apply_patch invocation via shell_command: {out}"
|
||||
);
|
||||
assert_eq!(fs::read_to_string(&target)?, "after\n");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn apply_patch_shell_failure_propagates_error_and_skips_diff() -> Result<()> {
|
||||
async fn apply_patch_shell_command_failure_propagates_error_and_skips_diff() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let harness = apply_patch_harness_with(|config| {
|
||||
@@ -730,14 +727,11 @@ async fn apply_patch_shell_failure_propagates_error_and_skips_diff() -> Result<(
|
||||
|
||||
let script = "apply_patch <<'EOF'\n*** Begin Patch\n*** Update File: invalid.txt\n@@\n-nope\n+changed\n*** End Patch\nEOF\n";
|
||||
let call_id = "shell-apply-failure";
|
||||
let args = json!({
|
||||
"command": ["bash", "-lc", script],
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
let args = json!({ "command": script, "timeout_ms": 5_000 });
|
||||
let bodies = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
@@ -780,10 +774,6 @@ async fn apply_patch_shell_failure_propagates_error_and_skips_diff() -> Result<(
|
||||
);
|
||||
|
||||
let out = harness.function_call_stdout(call_id).await;
|
||||
assert!(
|
||||
out.contains("apply_patch verification failed"),
|
||||
"expected verification failure message"
|
||||
);
|
||||
assert!(
|
||||
out.contains("Failed to find expected lines in"),
|
||||
"expected failure diagnostics: {out}"
|
||||
|
||||
@@ -71,7 +71,7 @@ enum ActionKind {
|
||||
response_body: &'static str,
|
||||
},
|
||||
RunCommand {
|
||||
command: &'static [&'static str],
|
||||
command: &'static str,
|
||||
},
|
||||
RunUnifiedExecCommand {
|
||||
command: &'static str,
|
||||
@@ -97,20 +97,12 @@ impl ActionKind {
|
||||
server: &MockServer,
|
||||
call_id: &str,
|
||||
with_escalated_permissions: bool,
|
||||
) -> Result<(Value, Option<Vec<String>>)> {
|
||||
) -> Result<(Value, Option<String>)> {
|
||||
match self {
|
||||
ActionKind::WriteFile { target, content } => {
|
||||
let (path, _) = target.resolve_for_patch(test);
|
||||
let _ = fs::remove_file(&path);
|
||||
let command = vec![
|
||||
"/bin/sh".to_string(),
|
||||
"-c".to_string(),
|
||||
format!(
|
||||
"printf {content:?} > {path:?} && cat {path:?}",
|
||||
content = content,
|
||||
path = path
|
||||
),
|
||||
];
|
||||
let command = format!("printf {content:?} > {path:?} && cat {path:?}");
|
||||
let event = shell_event(call_id, &command, 1_000, with_escalated_permissions)?;
|
||||
Ok((event, Some(command)))
|
||||
}
|
||||
@@ -127,21 +119,18 @@ impl ActionKind {
|
||||
.await;
|
||||
|
||||
let url = format!("{}{}", server.uri(), endpoint);
|
||||
let escaped_url = url.replace('\'', "\\'");
|
||||
let script = format!(
|
||||
"import sys\nimport urllib.request\nurl = {url:?}\ntry:\n data = urllib.request.urlopen(url, timeout=2).read().decode()\n print('OK:' + data.strip())\nexcept Exception as exc:\n print('ERR:' + exc.__class__.__name__)\n sys.exit(1)",
|
||||
"import sys\nimport urllib.request\nurl = '{escaped_url}'\ntry:\n data = urllib.request.urlopen(url, timeout=2).read().decode()\n print('OK:' + data.strip())\nexcept Exception as exc:\n print('ERR:' + exc.__class__.__name__)\n sys.exit(1)",
|
||||
);
|
||||
|
||||
let command = vec!["python3".to_string(), "-c".to_string(), script];
|
||||
let command = format!("python3 -c \"{script}\"");
|
||||
let event = shell_event(call_id, &command, 1_000, with_escalated_permissions)?;
|
||||
Ok((event, Some(command)))
|
||||
}
|
||||
ActionKind::RunCommand { command } => {
|
||||
let command: Vec<String> = command
|
||||
.iter()
|
||||
.map(std::string::ToString::to_string)
|
||||
.collect();
|
||||
let event = shell_event(call_id, &command, 1_000, with_escalated_permissions)?;
|
||||
Ok((event, Some(command)))
|
||||
let event = shell_event(call_id, command, 1_000, with_escalated_permissions)?;
|
||||
Ok((event, Some(command.to_string())))
|
||||
}
|
||||
ActionKind::RunUnifiedExecCommand {
|
||||
command,
|
||||
@@ -154,14 +143,7 @@ impl ActionKind {
|
||||
with_escalated_permissions,
|
||||
*justification,
|
||||
)?;
|
||||
Ok((
|
||||
event,
|
||||
Some(vec![
|
||||
"/bin/bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
command.to_string(),
|
||||
]),
|
||||
))
|
||||
Ok((event, Some(command.to_string())))
|
||||
}
|
||||
ActionKind::ApplyPatchFunction { target, content } => {
|
||||
let (path, patch_path) = target.resolve_for_patch(test);
|
||||
@@ -185,19 +167,19 @@ fn build_add_file_patch(patch_path: &str, content: &str) -> String {
|
||||
format!("*** Begin Patch\n*** Add File: {patch_path}\n+{content}\n*** End Patch\n")
|
||||
}
|
||||
|
||||
fn shell_apply_patch_command(patch: &str) -> Vec<String> {
|
||||
fn shell_apply_patch_command(patch: &str) -> String {
|
||||
let mut script = String::from("apply_patch <<'PATCH'\n");
|
||||
script.push_str(patch);
|
||||
if !patch.ends_with('\n') {
|
||||
script.push('\n');
|
||||
}
|
||||
script.push_str("PATCH\n");
|
||||
vec!["bash".to_string(), "-lc".to_string(), script]
|
||||
script
|
||||
}
|
||||
|
||||
fn shell_event(
|
||||
call_id: &str,
|
||||
command: &[String],
|
||||
command: &str,
|
||||
timeout_ms: u64,
|
||||
with_escalated_permissions: bool,
|
||||
) -> Result<Value> {
|
||||
@@ -209,7 +191,7 @@ fn shell_event(
|
||||
args["with_escalated_permissions"] = json!(true);
|
||||
}
|
||||
let args_str = serde_json::to_string(&args)?;
|
||||
Ok(ev_function_call(call_id, "shell", &args_str))
|
||||
Ok(ev_function_call(call_id, "shell_command", &args_str))
|
||||
}
|
||||
|
||||
fn exec_command_event(
|
||||
@@ -296,7 +278,10 @@ impl Expectation {
|
||||
}
|
||||
Expectation::FileCreatedNoExitCode { target, content } => {
|
||||
let (path, _) = target.resolve_for_patch(test);
|
||||
assert_eq!(result.exit_code, None, "expected no exit code for {path:?}");
|
||||
assert!(
|
||||
result.exit_code.is_none() || result.exit_code == Some(0),
|
||||
"expected no exit code for {path:?}",
|
||||
);
|
||||
assert!(
|
||||
result.stdout.contains(content),
|
||||
"stdout missing {content:?}: {}",
|
||||
@@ -385,8 +370,8 @@ impl Expectation {
|
||||
);
|
||||
}
|
||||
Expectation::NetworkSuccessNoExitCode { body_contains } => {
|
||||
assert_eq!(
|
||||
result.exit_code, None,
|
||||
assert!(
|
||||
result.exit_code.is_none() || result.exit_code == Some(0),
|
||||
"expected no exit code for successful network call: {}",
|
||||
result.stdout
|
||||
);
|
||||
@@ -433,8 +418,8 @@ impl Expectation {
|
||||
);
|
||||
}
|
||||
Expectation::CommandSuccessNoExitCode { stdout_contains } => {
|
||||
assert_eq!(
|
||||
result.exit_code, None,
|
||||
assert!(
|
||||
result.exit_code.is_none() || result.exit_code == Some(0),
|
||||
"expected no exit code for trusted command: {}",
|
||||
result.stdout
|
||||
);
|
||||
@@ -531,10 +516,18 @@ fn parse_result(item: &Value) -> CommandResult {
|
||||
CommandResult { exit_code, stdout }
|
||||
}
|
||||
Err(_) => {
|
||||
let structured = Regex::new(r"(?s)^Exit code:\s*(-?\d+).*?Output:\n(.*)$").unwrap();
|
||||
let regex =
|
||||
Regex::new(r"(?s)^.*?Process exited with code (\d+)\n.*?Output:\n(.*)$").unwrap();
|
||||
// parse freeform output
|
||||
if let Some(captures) = regex.captures(output_str) {
|
||||
if let Some(captures) = structured.captures(output_str) {
|
||||
let exit_code = captures.get(1).unwrap().as_str().parse::<i64>().unwrap();
|
||||
let output = captures.get(2).unwrap().as_str();
|
||||
CommandResult {
|
||||
exit_code: Some(exit_code),
|
||||
stdout: output.to_string(),
|
||||
}
|
||||
} else if let Some(captures) = regex.captures(output_str) {
|
||||
let exit_code = captures.get(1).unwrap().as_str().parse::<i64>().unwrap();
|
||||
let output = captures.get(2).unwrap().as_str();
|
||||
CommandResult {
|
||||
@@ -553,7 +546,7 @@ fn parse_result(item: &Value) -> CommandResult {
|
||||
|
||||
async fn expect_exec_approval(
|
||||
test: &TestCodex,
|
||||
expected_command: &[String],
|
||||
expected_command: &str,
|
||||
) -> ExecApprovalRequestEvent {
|
||||
let event = wait_for_event(&test.codex, |event| {
|
||||
matches!(
|
||||
@@ -565,7 +558,12 @@ async fn expect_exec_approval(
|
||||
|
||||
match event {
|
||||
EventMsg::ExecApprovalRequest(approval) => {
|
||||
assert_eq!(approval.command, expected_command);
|
||||
let last_arg = approval
|
||||
.command
|
||||
.last()
|
||||
.map(std::string::String::as_str)
|
||||
.unwrap_or_default();
|
||||
assert_eq!(last_arg, expected_command);
|
||||
approval
|
||||
}
|
||||
EventMsg::TaskComplete(_) => panic!("expected approval request before completion"),
|
||||
@@ -660,7 +658,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
features: vec![],
|
||||
model_override: Some("gpt-5.1"),
|
||||
outcome: Outcome::Auto,
|
||||
expectation: Expectation::FileCreatedNoExitCode {
|
||||
expectation: Expectation::FileCreated {
|
||||
target: TargetPath::OutsideWorkspace("dfa_on_request_5_1.txt"),
|
||||
content: "danger-on-request",
|
||||
},
|
||||
@@ -702,7 +700,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
approval_policy: UnlessTrusted,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
action: ActionKind::RunCommand {
|
||||
command: &["echo", "trusted-unless"],
|
||||
command: "echo trusted-unless",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
features: vec![],
|
||||
@@ -717,7 +715,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
approval_policy: UnlessTrusted,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
action: ActionKind::RunCommand {
|
||||
command: &["echo", "trusted-unless"],
|
||||
command: "echo trusted-unless",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
features: vec![],
|
||||
@@ -880,7 +878,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
approval_policy: OnRequest,
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
action: ActionKind::RunCommand {
|
||||
command: &["echo", "trusted-read-only"],
|
||||
command: "echo trusted-read-only",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
features: vec![],
|
||||
@@ -895,7 +893,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
approval_policy: OnRequest,
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
action: ActionKind::RunCommand {
|
||||
command: &["echo", "trusted-read-only"],
|
||||
command: "echo trusted-read-only",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
features: vec![],
|
||||
@@ -1020,7 +1018,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
},
|
||||
},
|
||||
ScenarioSpec {
|
||||
name: "apply_patch_shell_requires_patch_approval",
|
||||
name: "apply_patch_shell_command_requires_patch_approval",
|
||||
approval_policy: UnlessTrusted,
|
||||
sandbox_policy: workspace_write(false),
|
||||
action: ActionKind::ApplyPatchShell {
|
||||
@@ -1114,7 +1112,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
},
|
||||
},
|
||||
ScenarioSpec {
|
||||
name: "apply_patch_shell_outside_requires_patch_approval",
|
||||
name: "apply_patch_shell_command_outside_requires_patch_approval",
|
||||
approval_policy: OnRequest,
|
||||
sandbox_policy: workspace_write(false),
|
||||
action: ActionKind::ApplyPatchShell {
|
||||
@@ -1229,7 +1227,10 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
message_contains: if cfg!(target_os = "linux") {
|
||||
&["Permission denied"]
|
||||
} else {
|
||||
&["Permission denied|Operation not permitted|Read-only file system"]
|
||||
&[
|
||||
"Permission denied|Operation not permitted|operation not permitted|\
|
||||
Read-only file system",
|
||||
]
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1238,7 +1239,7 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
approval_policy: Never,
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
action: ActionKind::RunCommand {
|
||||
command: &["echo", "trusted-never"],
|
||||
command: "echo trusted-never",
|
||||
},
|
||||
with_escalated_permissions: false,
|
||||
features: vec![],
|
||||
@@ -1373,7 +1374,10 @@ fn scenarios() -> Vec<ScenarioSpec> {
|
||||
message_contains: if cfg!(target_os = "linux") {
|
||||
&["Permission denied"]
|
||||
} else {
|
||||
&["Permission denied|Operation not permitted|Read-only file system"]
|
||||
&[
|
||||
"Permission denied|Operation not permitted|operation not permitted|\
|
||||
Read-only file system",
|
||||
]
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1509,7 +1513,7 @@ async fn run_scenario(scenario: &ScenarioSpec) -> Result<()> {
|
||||
expected_reason,
|
||||
} => {
|
||||
let command = expected_command
|
||||
.as_ref()
|
||||
.as_deref()
|
||||
.expect("exec approval requires shell command");
|
||||
let approval = expect_exec_approval(&test, command).await;
|
||||
if let Some(expected_reason) = expected_reason {
|
||||
|
||||
@@ -499,9 +499,20 @@ async fn integration_git_info_unit_test() {
|
||||
"Git info should contain repository_url"
|
||||
);
|
||||
let repo_url = git_info.repository_url.as_ref().unwrap();
|
||||
// Some hosts rewrite remotes (e.g., github.com → git@github.com), so assert against
|
||||
// the actual remote reported by git instead of a static URL.
|
||||
let expected_remote_url = std::process::Command::new("git")
|
||||
.args(["remote", "get-url", "origin"])
|
||||
.current_dir(&git_repo)
|
||||
.output()
|
||||
.unwrap();
|
||||
let expected_remote_url = String::from_utf8(expected_remote_url.stdout)
|
||||
.unwrap()
|
||||
.trim()
|
||||
.to_string();
|
||||
assert_eq!(
|
||||
repo_url, "https://github.com/example/integration-test.git",
|
||||
"Repository URL should match what we configured"
|
||||
repo_url, &expected_remote_url,
|
||||
"Repository URL should match git remote get-url output"
|
||||
);
|
||||
|
||||
println!("✅ Git info collection test passed!");
|
||||
|
||||
@@ -992,7 +992,7 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
id: Some("web-search-id".into()),
|
||||
status: Some("completed".into()),
|
||||
action: WebSearchAction::Search {
|
||||
query: "weather".into(),
|
||||
query: Some("weather".into()),
|
||||
},
|
||||
});
|
||||
prompt.input.push(ResponseItem::FunctionCall {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
@@ -25,17 +26,17 @@ use pretty_assertions::assert_eq;
|
||||
async fn codex_delegate_forwards_exec_approval_and_proceeds_on_approval() {
|
||||
skip_if_no_network!();
|
||||
|
||||
// Sub-agent turn 1: emit a shell function_call requiring approval, then complete.
|
||||
// Sub-agent turn 1: emit a shell_command function_call requiring approval, then complete.
|
||||
let call_id = "call-exec-1";
|
||||
let args = serde_json::json!({
|
||||
"command": ["bash", "-lc", "rm -rf delegated"],
|
||||
"command": "rm -rf delegated",
|
||||
"timeout_ms": 1000,
|
||||
"with_escalated_permissions": true,
|
||||
})
|
||||
.to_string();
|
||||
let sse1 = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &args),
|
||||
ev_function_call(call_id, "shell_command", &args),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
|
||||
@@ -61,6 +62,8 @@ async fn codex_delegate_forwards_exec_approval_and_proceeds_on_approval() {
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.approval_policy = AskForApproval::OnRequest;
|
||||
config.sandbox_policy = SandboxPolicy::ReadOnly;
|
||||
config.model = "gpt-5.1".to_string();
|
||||
config.model_family = find_family_for_model("gpt-5.1").expect("gpt-5.1 is a valid model");
|
||||
});
|
||||
let test = builder.build(&server).await.expect("build test codex");
|
||||
|
||||
@@ -138,6 +141,8 @@ async fn codex_delegate_forwards_patch_approval_and_proceeds_on_decision() {
|
||||
// Use a restricted sandbox so patch approval is required
|
||||
config.sandbox_policy = SandboxPolicy::ReadOnly;
|
||||
config.include_apply_patch_tool = true;
|
||||
config.model = "gpt-5.1".to_string();
|
||||
config.model_family = find_family_for_model("gpt-5.1").expect("gpt-5.1 is a valid model");
|
||||
});
|
||||
let test = builder.build(&server).await.expect("build test codex");
|
||||
|
||||
|
||||
@@ -384,7 +384,7 @@ async fn manual_compact_uses_custom_prompt() {
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn manual_compact_emits_estimated_token_usage_event() {
|
||||
async fn manual_compact_emits_api_and_local_token_usage_events() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
@@ -32,7 +32,7 @@ async fn run_test_cmd(tmp: TempDir, cmd: Vec<&str>) -> Result<ExecToolCallOutput
|
||||
let params = ExecParams {
|
||||
command: cmd.iter().map(ToString::to_string).collect(),
|
||||
cwd: tmp.path().to_path_buf(),
|
||||
timeout_ms: Some(1000),
|
||||
expiration: 1000.into(),
|
||||
env: HashMap::new(),
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
|
||||
110
codex-rs/core/tests/suite/exec_policy.rs
Normal file
110
codex-rs/core/tests/suite/exec_policy.rs
Normal file
@@ -0,0 +1,110 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
|
||||
#[tokio::test]
|
||||
async fn execpolicy_blocks_shell_invocation() -> Result<()> {
|
||||
// TODO execpolicy doesn't parse powershell commands yet
|
||||
if cfg!(windows) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
let policy_path = config.codex_home.join("policy").join("policy.codexpolicy");
|
||||
fs::create_dir_all(
|
||||
policy_path
|
||||
.parent()
|
||||
.expect("policy directory must have a parent"),
|
||||
)
|
||||
.expect("create policy directory");
|
||||
fs::write(
|
||||
&policy_path,
|
||||
r#"prefix_rule(pattern=["echo"], decision="forbidden")"#,
|
||||
)
|
||||
.expect("write policy file");
|
||||
config.model = "gpt-5.1".to_string();
|
||||
config.model_family =
|
||||
find_family_for_model("gpt-5.1").expect("gpt-5.1 should have a model family");
|
||||
});
|
||||
let server = start_mock_server().await;
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-forbidden";
|
||||
let args = json!({
|
||||
"command": "echo blocked",
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
|
||||
let session_model = test.session_configured.model.clone();
|
||||
test.codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: "run shell command".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: test.cwd_path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let EventMsg::ExecCommandEnd(end) = wait_for_event(&test.codex, |event| {
|
||||
matches!(event, EventMsg::ExecCommandEnd(_))
|
||||
})
|
||||
.await
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
wait_for_event(&test.codex, |event| {
|
||||
matches!(event, EventMsg::TaskComplete(_))
|
||||
})
|
||||
.await;
|
||||
|
||||
assert!(
|
||||
end.aggregated_output
|
||||
.contains("execpolicy forbids this command"),
|
||||
"unexpected output: {}",
|
||||
end.aggregated_output
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -28,6 +28,7 @@ mod compact_remote;
|
||||
mod compact_resume_fork;
|
||||
mod deprecation_notice;
|
||||
mod exec;
|
||||
mod exec_policy;
|
||||
mod fork_conversation;
|
||||
mod grep_files;
|
||||
mod items;
|
||||
@@ -48,6 +49,7 @@ mod seatbelt;
|
||||
mod shell_serialization;
|
||||
mod stream_error_allows_next_turn;
|
||||
mod stream_no_completed;
|
||||
mod text_encoding_fix;
|
||||
mod tool_harness;
|
||||
mod tool_parallelism;
|
||||
mod tools;
|
||||
|
||||
@@ -1,21 +1,10 @@
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::wait_for_event;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::MockServer;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
|
||||
fn sse_completed(id: &str) -> String {
|
||||
load_sse_fixture_with_id("tests/fixtures/completed_template.json", id)
|
||||
@@ -39,46 +28,17 @@ fn tool_identifiers(body: &serde_json::Value) -> Vec<String> {
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
async fn collect_tool_identifiers_for_model(model: &str) -> Vec<String> {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let sse = sse_completed(model);
|
||||
let resp_mock = responses::mount_sse_once(&server, sse).await;
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let cwd = TempDir::new().unwrap();
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home);
|
||||
config.cwd = cwd.path().to_path_buf();
|
||||
config.model_provider = model_provider;
|
||||
config.model = model.to_string();
|
||||
config.model_family =
|
||||
find_family_for_model(model).unwrap_or_else(|| panic!("unknown model family for {model}"));
|
||||
config.features.disable(Feature::ApplyPatchFreeform);
|
||||
config.features.disable(Feature::ViewImageTool);
|
||||
config.features.disable(Feature::WebSearchRequest);
|
||||
config.features.disable(Feature::UnifiedExec);
|
||||
|
||||
let conversation_manager =
|
||||
ConversationManager::with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
let codex = conversation_manager
|
||||
.new_conversation(config)
|
||||
let mut builder = test_codex().with_model(model);
|
||||
let test = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.conversation;
|
||||
.expect("create test Codex conversation");
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello tools".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
test.submit_turn("hello tools").await.expect("submit turn");
|
||||
|
||||
let body = resp_mock.single_request().body_json();
|
||||
tool_identifiers(&body)
|
||||
@@ -97,7 +57,8 @@ async fn model_selects_expected_tools() {
|
||||
"list_mcp_resources".to_string(),
|
||||
"list_mcp_resource_templates".to_string(),
|
||||
"read_mcp_resource".to_string(),
|
||||
"update_plan".to_string()
|
||||
"update_plan".to_string(),
|
||||
"view_image".to_string()
|
||||
],
|
||||
"codex-mini-latest should expose the local shell tool",
|
||||
);
|
||||
@@ -111,7 +72,8 @@ async fn model_selects_expected_tools() {
|
||||
"list_mcp_resource_templates".to_string(),
|
||||
"read_mcp_resource".to_string(),
|
||||
"update_plan".to_string(),
|
||||
"apply_patch".to_string()
|
||||
"apply_patch".to_string(),
|
||||
"view_image".to_string()
|
||||
],
|
||||
"gpt-5-codex should expose the apply_patch tool",
|
||||
);
|
||||
@@ -125,7 +87,8 @@ async fn model_selects_expected_tools() {
|
||||
"list_mcp_resource_templates".to_string(),
|
||||
"read_mcp_resource".to_string(),
|
||||
"update_plan".to_string(),
|
||||
"apply_patch".to_string()
|
||||
"apply_patch".to_string(),
|
||||
"view_image".to_string()
|
||||
],
|
||||
"gpt-5.1-codex should expose the apply_patch tool",
|
||||
);
|
||||
@@ -139,6 +102,7 @@ async fn model_selects_expected_tools() {
|
||||
"list_mcp_resource_templates".to_string(),
|
||||
"read_mcp_resource".to_string(),
|
||||
"update_plan".to_string(),
|
||||
"view_image".to_string()
|
||||
],
|
||||
"gpt-5 should expose the apply_patch tool",
|
||||
);
|
||||
@@ -152,7 +116,8 @@ async fn model_selects_expected_tools() {
|
||||
"list_mcp_resource_templates".to_string(),
|
||||
"read_mcp_resource".to_string(),
|
||||
"update_plan".to_string(),
|
||||
"apply_patch".to_string()
|
||||
"apply_patch".to_string(),
|
||||
"view_image".to_string()
|
||||
],
|
||||
"gpt-5.1 should expose the apply_patch tool",
|
||||
);
|
||||
|
||||
@@ -30,18 +30,15 @@ fn text_user_input(text: String) -> serde_json::Value {
|
||||
}
|
||||
|
||||
fn default_env_context_str(cwd: &str, shell: &Shell) -> String {
|
||||
let shell_name = shell.name();
|
||||
format!(
|
||||
r#"<environment_context>
|
||||
<cwd>{}</cwd>
|
||||
<cwd>{cwd}</cwd>
|
||||
<approval_policy>on-request</approval_policy>
|
||||
<sandbox_mode>read-only</sandbox_mode>
|
||||
<network_access>restricted</network_access>
|
||||
{}</environment_context>"#,
|
||||
cwd,
|
||||
match shell.name() {
|
||||
Some(name) => format!(" <shell>{name}</shell>\n"),
|
||||
None => String::new(),
|
||||
}
|
||||
<shell>{shell_name}</shell>
|
||||
</environment_context>"#
|
||||
)
|
||||
}
|
||||
|
||||
@@ -227,7 +224,7 @@ async fn prefixes_context_and_instructions_once_and_consistently_across_requests
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let shell = default_user_shell().await;
|
||||
let shell = default_user_shell();
|
||||
let cwd_str = config.cwd.to_string_lossy();
|
||||
let expected_env_text = default_env_context_str(&cwd_str, &shell);
|
||||
let expected_ui_text = format!(
|
||||
@@ -345,6 +342,7 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
|
||||
// After overriding the turn context, the environment context should be emitted again
|
||||
// reflecting the new approval policy and sandbox settings. Omit cwd because it did
|
||||
// not change.
|
||||
let shell = default_user_shell();
|
||||
let expected_env_text_2 = format!(
|
||||
r#"<environment_context>
|
||||
<approval_policy>never</approval_policy>
|
||||
@@ -353,8 +351,10 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
|
||||
<writable_roots>
|
||||
<root>{}</root>
|
||||
</writable_roots>
|
||||
<shell>{}</shell>
|
||||
</environment_context>"#,
|
||||
writable.path().to_string_lossy(),
|
||||
writable.path().display(),
|
||||
shell.name()
|
||||
);
|
||||
let expected_env_msg_2 = serde_json::json!({
|
||||
"type": "message",
|
||||
@@ -522,6 +522,8 @@ async fn per_turn_overrides_keep_cached_prefix_and_key_constant() -> anyhow::Res
|
||||
"role": "user",
|
||||
"content": [ { "type": "input_text", "text": "hello 2" } ]
|
||||
});
|
||||
let shell = default_user_shell();
|
||||
|
||||
let expected_env_text_2 = format!(
|
||||
r#"<environment_context>
|
||||
<cwd>{}</cwd>
|
||||
@@ -531,9 +533,11 @@ async fn per_turn_overrides_keep_cached_prefix_and_key_constant() -> anyhow::Res
|
||||
<writable_roots>
|
||||
<root>{}</root>
|
||||
</writable_roots>
|
||||
<shell>{}</shell>
|
||||
</environment_context>"#,
|
||||
new_cwd.path().to_string_lossy(),
|
||||
writable.path().to_string_lossy(),
|
||||
new_cwd.path().display(),
|
||||
writable.path().display(),
|
||||
shell.name(),
|
||||
);
|
||||
let expected_env_msg_2 = serde_json::json!({
|
||||
"type": "message",
|
||||
@@ -610,7 +614,7 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() -> a
|
||||
let body1 = req1.single_request().body_json();
|
||||
let body2 = req2.single_request().body_json();
|
||||
|
||||
let shell = default_user_shell().await;
|
||||
let shell = default_user_shell();
|
||||
let default_cwd_lossy = default_cwd.to_string_lossy();
|
||||
let expected_ui_text = format!(
|
||||
"# AGENTS.md instructions for {default_cwd_lossy}\n\n<INSTRUCTIONS>\nbe consistent and helpful\n</INSTRUCTIONS>"
|
||||
@@ -697,7 +701,7 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
|
||||
let body1 = req1.single_request().body_json();
|
||||
let body2 = req2.single_request().body_json();
|
||||
|
||||
let shell = default_user_shell().await;
|
||||
let shell = default_user_shell();
|
||||
let expected_ui_text = format!(
|
||||
"# AGENTS.md instructions for {}\n\n<INSTRUCTIONS>\nbe consistent and helpful\n</INSTRUCTIONS>",
|
||||
default_cwd.to_string_lossy()
|
||||
@@ -717,14 +721,15 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
|
||||
]);
|
||||
assert_eq!(body1["input"], expected_input_1);
|
||||
|
||||
let expected_env_msg_2 = text_user_input(
|
||||
let shell_name = shell.name();
|
||||
let expected_env_msg_2 = text_user_input(format!(
|
||||
r#"<environment_context>
|
||||
<approval_policy>never</approval_policy>
|
||||
<sandbox_mode>danger-full-access</sandbox_mode>
|
||||
<network_access>enabled</network_access>
|
||||
<shell>{shell_name}</shell>
|
||||
</environment_context>"#
|
||||
.to_string(),
|
||||
);
|
||||
));
|
||||
let expected_user_message_2 = text_user_input("hello 2".to_string());
|
||||
let expected_input_2 = serde_json::Value::Array(vec![
|
||||
expected_ui_msg,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#![allow(clippy::expect_used)]
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
@@ -40,6 +41,20 @@ const FIXTURE_JSON: &str = r#"{
|
||||
}
|
||||
"#;
|
||||
|
||||
fn configure_shell_command_model(output_type: ShellModelOutput, config: &mut Config) {
|
||||
if !matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(shell_command_family) = find_family_for_model("test-gpt-5-codex") {
|
||||
if config.model_family.shell_type == shell_command_family.shell_type {
|
||||
return;
|
||||
}
|
||||
config.model = shell_command_family.slug.clone();
|
||||
config.model_family = shell_command_family;
|
||||
}
|
||||
}
|
||||
|
||||
fn shell_responses(
|
||||
call_id: &str,
|
||||
command: Vec<&str>,
|
||||
@@ -112,10 +127,7 @@ async fn shell_output_stays_json_without_freeform_apply_patch(
|
||||
config.features.disable(Feature::ApplyPatchFreeform);
|
||||
config.model = "gpt-5".to_string();
|
||||
config.model_family = find_family_for_model("gpt-5").expect("gpt-5 is a model family");
|
||||
if matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
}
|
||||
let _ = output_type;
|
||||
configure_shell_command_model(output_type, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -170,10 +182,7 @@ async fn shell_output_is_structured_with_freeform_apply_patch(
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.features.enable(Feature::ApplyPatchFreeform);
|
||||
if matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
}
|
||||
let _ = output_type;
|
||||
configure_shell_command_model(output_type, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -223,10 +232,7 @@ async fn shell_output_preserves_fixture_json_without_serialization(
|
||||
config.features.disable(Feature::ApplyPatchFreeform);
|
||||
config.model = "gpt-5".to_string();
|
||||
config.model_family = find_family_for_model("gpt-5").expect("gpt-5 is a model family");
|
||||
if matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
}
|
||||
let _ = output_type;
|
||||
configure_shell_command_model(output_type, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -293,10 +299,7 @@ async fn shell_output_structures_fixture_with_serialization(
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.features.enable(Feature::ApplyPatchFreeform);
|
||||
if matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
}
|
||||
let _ = output_type;
|
||||
configure_shell_command_model(output_type, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -358,15 +361,12 @@ async fn shell_output_for_freeform_tool_records_duration(
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.include_apply_patch_tool = true;
|
||||
if matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
}
|
||||
let _ = output_type;
|
||||
configure_shell_command_model(output_type, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-structured";
|
||||
let responses = shell_responses(call_id, vec!["/bin/bash", "-c", "sleep 1"], output_type)?;
|
||||
let responses = shell_responses(call_id, vec!["/bin/sh", "-c", "sleep 1"], output_type)?;
|
||||
let mock = mount_sse_sequence(&server, responses).await;
|
||||
|
||||
test.submit_turn_with_policy(
|
||||
@@ -417,10 +417,7 @@ async fn shell_output_reserializes_truncated_content(output_type: ShellModelOutp
|
||||
config.model_family =
|
||||
find_family_for_model("gpt-5.1-codex").expect("gpt-5.1-codex is a model family");
|
||||
config.tool_output_token_limit = Some(200);
|
||||
if matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
}
|
||||
let _ = output_type;
|
||||
configure_shell_command_model(output_type, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -722,9 +719,7 @@ async fn shell_output_is_structured_for_nonzero_exit(output_type: ShellModelOutp
|
||||
config.model_family =
|
||||
find_family_for_model("gpt-5.1-codex").expect("gpt-5.1-codex is a model family");
|
||||
config.include_apply_patch_tool = true;
|
||||
if matches!(output_type, ShellModelOutput::ShellCommand) {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
}
|
||||
configure_shell_command_model(output_type, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -760,7 +755,7 @@ async fn shell_command_output_is_freeform() -> Result<()> {
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
configure_shell_command_model(ShellModelOutput::ShellCommand, config);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -812,11 +807,7 @@ async fn shell_command_output_is_not_truncated_under_10k_bytes() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5.1")
|
||||
.with_config(move |config| {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
});
|
||||
let mut builder = test_codex().with_model("gpt-5.1");
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-command";
|
||||
@@ -866,11 +857,7 @@ async fn shell_command_output_is_not_truncated_over_10k_bytes() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5.1")
|
||||
.with_config(move |config| {
|
||||
config.features.enable(Feature::ShellCommandTool);
|
||||
});
|
||||
let mut builder = test_codex().with_model("gpt-5.1");
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-command";
|
||||
|
||||
77
codex-rs/core/tests/suite/text_encoding_fix.rs
Normal file
77
codex-rs/core/tests/suite/text_encoding_fix.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
//! Integration test for the text encoding fix for issue #6178.
|
||||
//!
|
||||
//! These tests simulate VSCode's shell preview on Windows/WSL where the output
|
||||
//! may be encoded with a legacy code page before it reaches Codex.
|
||||
|
||||
use codex_core::exec::StreamOutput;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_utf8_shell_output() {
|
||||
// Baseline: UTF-8 output should bypass the detector and remain unchanged.
|
||||
assert_eq!(decode_shell_output("пример".as_bytes()), "пример");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cp1251_shell_output() {
|
||||
// VS Code shells on Windows frequently surface CP1251 bytes for Cyrillic text.
|
||||
assert_eq!(decode_shell_output(b"\xEF\xF0\xE8\xEC\xE5\xF0"), "пример");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cp866_shell_output() {
|
||||
// Native cmd.exe still defaults to CP866; make sure we recognize that too.
|
||||
assert_eq!(decode_shell_output(b"\xAF\xE0\xA8\xAC\xA5\xE0"), "пример");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_windows_1252_smart_decoding() {
|
||||
// Smart detection should turn fancy quotes/dashes into the proper Unicode glyphs.
|
||||
assert_eq!(
|
||||
decode_shell_output(b"\x93\x94 test \x96 dash"),
|
||||
"\u{201C}\u{201D} test \u{2013} dash"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_smart_decoding_improves_over_lossy_utf8() {
|
||||
// Regression guard: String::from_utf8_lossy() alone used to emit replacement chars here.
|
||||
let bytes = b"\x93\x94 test \x96 dash";
|
||||
assert!(
|
||||
String::from_utf8_lossy(bytes).contains('\u{FFFD}'),
|
||||
"lossy UTF-8 should inject replacement chars"
|
||||
);
|
||||
assert_eq!(
|
||||
decode_shell_output(bytes),
|
||||
"\u{201C}\u{201D} test \u{2013} dash",
|
||||
"smart decoding should keep curly quotes intact"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mixed_ascii_and_legacy_encoding() {
|
||||
// Commands tend to mix ASCII status text with Latin-1 bytes (e.g. café).
|
||||
assert_eq!(decode_shell_output(b"Output: caf\xE9"), "Output: café"); // codespell:ignore caf
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pure_latin1_shell_output() {
|
||||
// Latin-1 by itself should still decode correctly (regression coverage for the older tests).
|
||||
assert_eq!(decode_shell_output(b"caf\xE9"), "café"); // codespell:ignore caf
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_bytes_still_fall_back_to_lossy() {
|
||||
// If detection fails, we still want the user to see replacement characters.
|
||||
let bytes = b"\xFF\xFE\xFD";
|
||||
assert_eq!(decode_shell_output(bytes), String::from_utf8_lossy(bytes));
|
||||
}
|
||||
|
||||
fn decode_shell_output(bytes: &[u8]) -> String {
|
||||
StreamOutput {
|
||||
text: bytes.to_vec(),
|
||||
truncated_after_lines: None,
|
||||
}
|
||||
.from_utf8_lossy()
|
||||
.text
|
||||
}
|
||||
@@ -146,10 +146,11 @@ async fn non_parallel_tools_run_serially() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let test = test_codex().build(&server).await?;
|
||||
let mut builder = test_codex().with_model("gpt-5.1");
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let shell_args = json!({
|
||||
"command": ["/bin/sh", "-c", "sleep 0.3"],
|
||||
"command": "sleep 0.3",
|
||||
"timeout_ms": 1_000,
|
||||
});
|
||||
let args_one = serde_json::to_string(&shell_args)?;
|
||||
@@ -157,8 +158,8 @@ async fn non_parallel_tools_run_serially() -> anyhow::Result<()> {
|
||||
|
||||
let first_response = sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_function_call("call-1", "shell", &args_one),
|
||||
ev_function_call("call-2", "shell", &args_two),
|
||||
ev_function_call("call-1", "shell_command", &args_one),
|
||||
ev_function_call("call-2", "shell_command", &args_two),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
let second_response = sse(vec![
|
||||
@@ -167,7 +168,7 @@ async fn non_parallel_tools_run_serially() -> anyhow::Result<()> {
|
||||
]);
|
||||
mount_sse_sequence(&server, vec![first_response, second_response]).await;
|
||||
|
||||
let duration = run_turn_and_measure(&test, "run shell twice").await?;
|
||||
let duration = run_turn_and_measure(&test, "run shell_command twice").await?;
|
||||
assert_serial_duration(duration);
|
||||
|
||||
Ok(())
|
||||
@@ -185,14 +186,14 @@ async fn mixed_tools_fall_back_to_serial() -> anyhow::Result<()> {
|
||||
})
|
||||
.to_string();
|
||||
let shell_args = serde_json::to_string(&json!({
|
||||
"command": ["/bin/sh", "-c", "sleep 0.3"],
|
||||
"command": "sleep 0.3",
|
||||
"timeout_ms": 1_000,
|
||||
}))?;
|
||||
|
||||
let first_response = sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_function_call("call-1", "test_sync_tool", &sync_args),
|
||||
ev_function_call("call-2", "shell", &shell_args),
|
||||
ev_function_call("call-2", "shell_command", &shell_args),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
let second_response = sse(vec![
|
||||
@@ -215,7 +216,7 @@ async fn tool_results_grouped() -> anyhow::Result<()> {
|
||||
let test = build_codex_with_test_tool(&server).await?;
|
||||
|
||||
let shell_args = serde_json::to_string(&json!({
|
||||
"command": ["/bin/sh", "-c", "echo 'shell output'"],
|
||||
"command": "echo 'shell output'",
|
||||
"timeout_ms": 1_000,
|
||||
}))?;
|
||||
|
||||
@@ -223,9 +224,9 @@ async fn tool_results_grouped() -> anyhow::Result<()> {
|
||||
&server,
|
||||
sse(vec![
|
||||
json!({"type": "response.created", "response": {"id": "resp-1"}}),
|
||||
ev_function_call("call-1", "shell", &shell_args),
|
||||
ev_function_call("call-2", "shell", &shell_args),
|
||||
ev_function_call("call-3", "shell", &shell_args),
|
||||
ev_function_call("call-1", "shell_command", &shell_args),
|
||||
ev_function_call("call-2", "shell_command", &shell_args),
|
||||
ev_function_call("call-3", "shell_command", &shell_args),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
|
||||
@@ -98,7 +98,7 @@ async fn truncate_function_error_trims_respond_to_model() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Verifies that a standard tool call (shell) exceeding the model formatting
|
||||
// Verifies that a standard tool call (shell_command) exceeding the model formatting
|
||||
// limits is truncated before being sent back to the model.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn tool_call_output_configured_limit_chars_type() -> Result<()> {
|
||||
@@ -106,7 +106,7 @@ async fn tool_call_output_configured_limit_chars_type() -> Result<()> {
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
// Use a model that exposes the generic shell tool.
|
||||
// Use a model that exposes the shell_command tool.
|
||||
let mut builder = test_codex().with_model("gpt-5.1").with_config(|config| {
|
||||
config.tool_output_token_limit = Some(100_000);
|
||||
});
|
||||
@@ -114,28 +114,22 @@ async fn tool_call_output_configured_limit_chars_type() -> Result<()> {
|
||||
let fixture = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-too-large";
|
||||
let args = if cfg!(windows) {
|
||||
serde_json::json!({
|
||||
"command": [
|
||||
"powershell",
|
||||
"-Command",
|
||||
"for ($i=1; $i -le 100000; $i++) { Write-Output $i }"
|
||||
],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
let command = if cfg!(windows) {
|
||||
"for ($i=1; $i -le 100000; $i++) { Write-Output $i }"
|
||||
} else {
|
||||
serde_json::json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 100000"],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
"seq 1 100000"
|
||||
};
|
||||
let args = serde_json::json!({
|
||||
"command": command,
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
|
||||
// First response: model tells us to run the tool; second: complete the turn.
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
@@ -167,7 +161,10 @@ async fn tool_call_output_configured_limit_chars_type() -> Result<()> {
|
||||
"expected truncated shell output to be plain text"
|
||||
);
|
||||
|
||||
assert_eq!(output.len(), 400097, "we should be almost 100k tokens");
|
||||
assert!(
|
||||
(400000..=401000).contains(&output.len()),
|
||||
"we should be almost 100k tokens"
|
||||
);
|
||||
|
||||
assert!(
|
||||
!output.contains("tokens truncated"),
|
||||
@@ -177,7 +174,7 @@ async fn tool_call_output_configured_limit_chars_type() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Verifies that a standard tool call (shell) exceeding the model formatting
|
||||
// Verifies that a standard tool call (shell_command) exceeding the model formatting
|
||||
// limits is truncated before being sent back to the model.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn tool_call_output_exceeds_limit_truncated_chars_limit() -> Result<()> {
|
||||
@@ -185,34 +182,28 @@ async fn tool_call_output_exceeds_limit_truncated_chars_limit() -> Result<()> {
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
// Use a model that exposes the generic shell tool.
|
||||
// Use a model that exposes the shell_command tool.
|
||||
let mut builder = test_codex().with_model("gpt-5.1");
|
||||
|
||||
let fixture = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-too-large";
|
||||
let args = if cfg!(windows) {
|
||||
serde_json::json!({
|
||||
"command": [
|
||||
"powershell",
|
||||
"-Command",
|
||||
"for ($i=1; $i -le 100000; $i++) { Write-Output $i }"
|
||||
],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
let command = if cfg!(windows) {
|
||||
"for ($i=1; $i -le 100000; $i++) { Write-Output $i }"
|
||||
} else {
|
||||
serde_json::json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 100000"],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
"seq 1 100000"
|
||||
};
|
||||
let args = serde_json::json!({
|
||||
"command": command,
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
|
||||
// First response: model tells us to run the tool; second: complete the turn.
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
@@ -244,15 +235,20 @@ async fn tool_call_output_exceeds_limit_truncated_chars_limit() -> Result<()> {
|
||||
"expected truncated shell output to be plain text"
|
||||
);
|
||||
|
||||
assert_eq!(output.len(), 9976); // ~10k characters
|
||||
let truncated_pattern = r#"(?s)^Exit code: 0\nWall time: 0 seconds\nTotal output lines: 100000\nOutput:\n.*?…\d+ chars truncated….*$"#;
|
||||
let truncated_pattern = r#"(?s)^Exit code: 0\nWall time: [0-9]+(?:\.[0-9]+)? seconds\nTotal output lines: 100000\nOutput:\n.*?…\d+ chars truncated….*$"#;
|
||||
|
||||
assert_regex_match(truncated_pattern, &output);
|
||||
|
||||
let len = output.len();
|
||||
assert!(
|
||||
(9_900..=10_100).contains(&len),
|
||||
"expected ~10k chars after truncation, got {len}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Verifies that a standard tool call (shell) exceeding the model formatting
|
||||
// Verifies that a standard tool call (shell_command) exceeding the model formatting
|
||||
// limits is truncated before being sent back to the model.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn tool_call_output_exceeds_limit_truncated_for_model() -> Result<()> {
|
||||
@@ -260,7 +256,7 @@ async fn tool_call_output_exceeds_limit_truncated_for_model() -> Result<()> {
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
// Use a model that exposes the generic shell tool.
|
||||
// Use a model that exposes the shell_command tool.
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.model = "gpt-5.1-codex".to_string();
|
||||
config.model_family =
|
||||
@@ -269,28 +265,22 @@ async fn tool_call_output_exceeds_limit_truncated_for_model() -> Result<()> {
|
||||
let fixture = builder.build(&server).await?;
|
||||
|
||||
let call_id = "shell-too-large";
|
||||
let args = if cfg!(windows) {
|
||||
serde_json::json!({
|
||||
"command": [
|
||||
"powershell",
|
||||
"-Command",
|
||||
"for ($i=1; $i -le 100000; $i++) { Write-Output $i }"
|
||||
],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
let command = if cfg!(windows) {
|
||||
"for ($i=1; $i -le 100000; $i++) { Write-Output $i }"
|
||||
} else {
|
||||
serde_json::json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 100000"],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
"seq 1 100000"
|
||||
};
|
||||
let args = serde_json::json!({
|
||||
"command": command,
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
|
||||
// First response: model tells us to run the tool; second: complete the turn.
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
@@ -340,7 +330,7 @@ $"#;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Ensures shell tool outputs that exceed the line limit are truncated only once.
|
||||
// Ensures shell_command outputs that exceed the line limit are truncated only once.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn tool_call_output_truncated_only_once() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -354,27 +344,21 @@ async fn tool_call_output_truncated_only_once() -> Result<()> {
|
||||
});
|
||||
let fixture = builder.build(&server).await?;
|
||||
let call_id = "shell-single-truncation";
|
||||
let args = if cfg!(windows) {
|
||||
serde_json::json!({
|
||||
"command": [
|
||||
"powershell",
|
||||
"-Command",
|
||||
"for ($i=1; $i -le 10000; $i++) { Write-Output $i }"
|
||||
],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
let command = if cfg!(windows) {
|
||||
"for ($i=1; $i -le 10000; $i++) { Write-Output $i }"
|
||||
} else {
|
||||
serde_json::json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 10000"],
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
"seq 1 10000"
|
||||
};
|
||||
let args = serde_json::json!({
|
||||
"command": command,
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
|
||||
mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
responses::ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
responses::ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
@@ -614,7 +598,7 @@ async fn token_policy_marker_reports_tokens() -> Result<()> {
|
||||
|
||||
let call_id = "shell-token-marker";
|
||||
let args = json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 150"],
|
||||
"command": "seq 1 150",
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
|
||||
@@ -622,7 +606,7 @@ async fn token_policy_marker_reports_tokens() -> Result<()> {
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
@@ -645,7 +629,7 @@ async fn token_policy_marker_reports_tokens() -> Result<()> {
|
||||
.function_call_output_text(call_id)
|
||||
.context("shell output present")?;
|
||||
|
||||
let pattern = r#"(?s)^\{"output":"Total output lines: 150\\n\\n1\\n2\\n3\\n4\\n5\\n.*?…\d+ tokens truncated…7\\n138\\n139\\n140\\n141\\n142\\n143\\n144\\n145\\n146\\n147\\n148\\n149\\n150\\n","metadata":\{"exit_code":0,"duration_seconds":0\.0\}\}$"#;
|
||||
let pattern = r"(?s)^Exit code: 0\nWall time: [0-9]+(?:\.[0-9]+)? seconds\nTotal output lines: 150\nOutput:\n1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19.*tokens truncated.*129\n130\n131\n132\n133\n134\n135\n136\n137\n138\n139\n140\n141\n142\n143\n144\n145\n146\n147\n148\n149\n150\n$";
|
||||
|
||||
assert_regex_match(pattern, &output);
|
||||
|
||||
@@ -667,7 +651,7 @@ async fn byte_policy_marker_reports_bytes() -> Result<()> {
|
||||
|
||||
let call_id = "shell-byte-marker";
|
||||
let args = json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 150"],
|
||||
"command": "seq 1 150",
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
|
||||
@@ -675,7 +659,7 @@ async fn byte_policy_marker_reports_bytes() -> Result<()> {
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
@@ -698,16 +682,16 @@ async fn byte_policy_marker_reports_bytes() -> Result<()> {
|
||||
.function_call_output_text(call_id)
|
||||
.context("shell output present")?;
|
||||
|
||||
let pattern = r#"(?s)^\{"output":"Total output lines: 150\\n\\n1\\n2\\n3\\n4\\n5.*?…\d+ chars truncated…7\\n138\\n139\\n140\\n141\\n142\\n143\\n144\\n145\\n146\\n147\\n148\\n149\\n150\\n","metadata":\{"exit_code":0,"duration_seconds":0\.0\}\}$"#;
|
||||
let pattern = r"(?s)^Exit code: 0\nWall time: [0-9]+(?:\.[0-9]+)? seconds\nTotal output lines: 150\nOutput:\n1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19.*chars truncated.*129\n130\n131\n132\n133\n134\n135\n136\n137\n138\n139\n140\n141\n142\n143\n144\n145\n146\n147\n148\n149\n150\n$";
|
||||
|
||||
assert_regex_match(pattern, &output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Shell tool output should remain intact when the config opts into a large token budget.
|
||||
// shell_command output should remain intact when the config opts into a large token budget.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn shell_tool_output_not_truncated_with_custom_limit() -> Result<()> {
|
||||
async fn shell_command_output_not_truncated_with_custom_limit() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
@@ -721,7 +705,7 @@ async fn shell_tool_output_not_truncated_with_custom_limit() -> Result<()> {
|
||||
|
||||
let call_id = "shell-no-trunc";
|
||||
let args = json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 1000"],
|
||||
"command": "seq 1 1000",
|
||||
"timeout_ms": 5_000,
|
||||
});
|
||||
let expected_body: String = (1..=1000).map(|i| format!("{i}\n")).collect();
|
||||
@@ -730,7 +714,7 @@ async fn shell_tool_output_not_truncated_with_custom_limit() -> Result<()> {
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
|
||||
@@ -904,6 +904,98 @@ async fn exec_command_reports_chunk_and_exit_metadata() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn unified_exec_respects_early_exit_notifications() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
skip_if_sandbox!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.enable(Feature::UnifiedExec);
|
||||
});
|
||||
let TestCodex {
|
||||
codex,
|
||||
cwd,
|
||||
session_configured,
|
||||
..
|
||||
} = builder.build(&server).await?;
|
||||
|
||||
let call_id = "uexec-early-exit";
|
||||
let args = serde_json::json!({
|
||||
"cmd": "sleep 0.05",
|
||||
"yield_time_ms": 31415,
|
||||
});
|
||||
|
||||
let responses = vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "exec_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_assistant_message("msg-1", "done"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
];
|
||||
mount_sse_sequence(&server, responses).await;
|
||||
|
||||
let session_model = session_configured.model.clone();
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: "watch early exit timing".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
let requests = server.received_requests().await.expect("recorded requests");
|
||||
assert!(!requests.is_empty(), "expected at least one POST request");
|
||||
|
||||
let bodies = requests
|
||||
.iter()
|
||||
.map(|req| req.body_json::<Value>().expect("request json"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let outputs = collect_tool_outputs(&bodies)?;
|
||||
let output = outputs
|
||||
.get(call_id)
|
||||
.expect("missing early exit unified_exec output");
|
||||
|
||||
assert!(
|
||||
output.session_id.is_none(),
|
||||
"short-lived process should not keep a session alive"
|
||||
);
|
||||
assert_eq!(
|
||||
output.exit_code,
|
||||
Some(0),
|
||||
"short-lived process should exit successfully"
|
||||
);
|
||||
|
||||
let wall_time = output.wall_time_seconds;
|
||||
assert!(
|
||||
wall_time < 0.75,
|
||||
"wall_time should reflect early exit rather than the full yield time; got {wall_time}"
|
||||
);
|
||||
assert!(
|
||||
output.output.is_empty(),
|
||||
"sleep command should not emit output, got {:?}",
|
||||
output.output
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn write_stdin_returns_exit_metadata_and_clears_session() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -1530,8 +1622,8 @@ async fn unified_exec_formats_large_output_summary() -> Result<()> {
|
||||
} = builder.build(&server).await?;
|
||||
|
||||
let script = r#"python3 - <<'PY'
|
||||
for i in range(10000):
|
||||
print("token token ")
|
||||
import sys
|
||||
sys.stdout.write("token token \n" * 5000)
|
||||
PY
|
||||
"#;
|
||||
|
||||
|
||||
@@ -279,23 +279,19 @@ async fn user_shell_command_is_truncated_only_once() -> anyhow::Result<()> {
|
||||
config.tool_output_token_limit = Some(100);
|
||||
config.model = "gpt-5.1-codex".to_string();
|
||||
config.model_family =
|
||||
find_family_for_model("gpt-5-codex").expect("gpt-5-codex is a model family");
|
||||
find_family_for_model("gpt-5.1-codex").expect("gpt-5.1-codex is a model family");
|
||||
});
|
||||
let fixture = builder.build(&server).await?;
|
||||
|
||||
let call_id = "user-shell-double-truncation";
|
||||
let args = if cfg!(windows) {
|
||||
serde_json::json!({
|
||||
"command": [
|
||||
"powershell",
|
||||
"-Command",
|
||||
"for ($i=1; $i -le 2000; $i++) { Write-Output $i }"
|
||||
],
|
||||
"command": "for ($i=1; $i -le 2000; $i++) { Write-Output $i }",
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
} else {
|
||||
serde_json::json!({
|
||||
"command": ["/bin/sh", "-c", "seq 1 2000"],
|
||||
"command": "seq 1 2000",
|
||||
"timeout_ms": 5_000,
|
||||
})
|
||||
};
|
||||
@@ -304,7 +300,7 @@ async fn user_shell_command_is_truncated_only_once() -> anyhow::Result<()> {
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_function_call(call_id, "shell_command", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
@@ -319,19 +315,22 @@ async fn user_shell_command_is_truncated_only_once() -> anyhow::Result<()> {
|
||||
.await;
|
||||
|
||||
fixture
|
||||
.submit_turn_with_policy("trigger big shell output", SandboxPolicy::DangerFullAccess)
|
||||
.submit_turn_with_policy(
|
||||
"trigger big shell_command output",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let output = mock2
|
||||
.single_request()
|
||||
.function_call_output_text(call_id)
|
||||
.context("function_call_output present for shell call")?;
|
||||
.context("function_call_output present for shell_command call")?;
|
||||
|
||||
let truncation_headers = output.matches("Total output lines:").count();
|
||||
|
||||
assert_eq!(
|
||||
truncation_headers, 1,
|
||||
"shell output should carry only one truncation header: {output}"
|
||||
"shell_command output should carry only one truncation header: {output}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -72,7 +72,7 @@ For complete documentation of the `Op` and `EventMsg` variants, refer to [protoc
|
||||
- `EventMsg::AgentMessage` – Messages from the `Model`
|
||||
- `EventMsg::ExecApprovalRequest` – Request approval from user to execute a command
|
||||
- `EventMsg::TaskComplete` – A task completed successfully
|
||||
- `EventMsg::Error` – A task stopped with an error (includes an optional `http_status_code` when available)
|
||||
- `EventMsg::Error` – A task stopped with an error
|
||||
- `EventMsg::Warning` – A non-fatal warning that the client should surface to the user
|
||||
- `EventMsg::TurnComplete` – Contains a `response_id` bookmark for last `response_id` executed by the task. This can be used to continue the task at a later point in time, perhaps with additional user input.
|
||||
|
||||
|
||||
@@ -4,14 +4,23 @@ name = "codex-exec-server"
|
||||
version = { workspace = true }
|
||||
|
||||
[[bin]]
|
||||
name = "codex-exec-server"
|
||||
path = "src/main.rs"
|
||||
name = "codex-execve-wrapper"
|
||||
path = "src/bin/main_execve_wrapper.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "codex-exec-mcp-server"
|
||||
path = "src/bin/main_mcp_server.rs"
|
||||
|
||||
[lib]
|
||||
name = "codex_exec_server"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-core = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
@@ -31,6 +40,7 @@ rmcp = { workspace = true, default-features = false, features = [
|
||||
] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
socket2 = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
@@ -39,6 +49,7 @@ tokio = { workspace = true, features = [
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
|
||||
|
||||
8
codex-rs/exec-server/src/bin/main_execve_wrapper.rs
Normal file
8
codex-rs/exec-server/src/bin/main_execve_wrapper.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
#[cfg(not(unix))]
|
||||
fn main() {
|
||||
eprintln!("codex-execve-wrapper is only implemented for UNIX");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub use codex_exec_server::main_execve_wrapper as main;
|
||||
8
codex-rs/exec-server/src/bin/main_mcp_server.rs
Normal file
8
codex-rs/exec-server/src/bin/main_mcp_server.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
#[cfg(not(unix))]
|
||||
fn main() {
|
||||
eprintln!("codex-exec-mcp-server is only implemented for UNIX");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub use codex_exec_server::main_mcp_server as main;
|
||||
8
codex-rs/exec-server/src/lib.rs
Normal file
8
codex-rs/exec-server/src/lib.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
#[cfg(unix)]
|
||||
mod posix;
|
||||
|
||||
#[cfg(unix)]
|
||||
pub use posix::main_execve_wrapper;
|
||||
|
||||
#[cfg(unix)]
|
||||
pub use posix::main_mcp_server;
|
||||
@@ -1,11 +0,0 @@
|
||||
#[cfg(target_os = "windows")]
|
||||
fn main() {
|
||||
eprintln!("codex-exec-server is not implemented on Windows targets");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
mod posix;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub use posix::main;
|
||||
@@ -56,109 +56,115 @@
|
||||
//! o<-----x
|
||||
//!
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::{self};
|
||||
|
||||
use crate::posix::escalate_protocol::EscalateAction;
|
||||
use crate::posix::escalate_server::EscalateServer;
|
||||
use crate::posix::mcp_escalation_policy::ExecPolicyOutcome;
|
||||
|
||||
mod escalate_client;
|
||||
mod escalate_protocol;
|
||||
mod escalate_server;
|
||||
mod escalation_policy;
|
||||
mod mcp;
|
||||
mod mcp_escalation_policy;
|
||||
mod socket;
|
||||
mod stopwatch;
|
||||
|
||||
fn dummy_exec_policy(file: &Path, argv: &[String], _workdir: &Path) -> EscalateAction {
|
||||
// TODO: execpolicy
|
||||
if file == Path::new("/opt/homebrew/bin/gh")
|
||||
&& let [_, arg1, arg2, ..] = argv
|
||||
&& arg1 == "issue"
|
||||
&& arg2 == "list"
|
||||
{
|
||||
return EscalateAction::Escalate;
|
||||
}
|
||||
EscalateAction::Run
|
||||
}
|
||||
/// Default value of --execve option relative to the current executable.
|
||||
/// Note this must match the name of the binary as specified in Cargo.toml.
|
||||
const CODEX_EXECVE_WRAPPER_EXE_NAME: &str = "codex-execve-wrapper";
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(version)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
subcommand: Option<Commands>,
|
||||
}
|
||||
struct McpServerCli {
|
||||
/// Executable to delegate execve(2) calls to in Bash.
|
||||
#[arg(long = "execve")]
|
||||
execve_wrapper: Option<PathBuf>,
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
Escalate(EscalateArgs),
|
||||
ShellExec(ShellExecArgs),
|
||||
}
|
||||
|
||||
/// Invoked from within the sandbox to (potentially) escalate permissions.
|
||||
#[derive(Parser, Debug)]
|
||||
struct EscalateArgs {
|
||||
file: String,
|
||||
|
||||
#[arg(trailing_var_arg = true)]
|
||||
argv: Vec<String>,
|
||||
}
|
||||
|
||||
impl EscalateArgs {
|
||||
/// This is the escalate client. It talks to the escalate server to determine whether to exec()
|
||||
/// the command directly or to proxy to the escalate server.
|
||||
async fn run(self) -> anyhow::Result<i32> {
|
||||
let EscalateArgs { file, argv } = self;
|
||||
escalate_client::run(file, argv).await
|
||||
}
|
||||
}
|
||||
|
||||
/// Debugging command to emulate an MCP "shell" tool call.
|
||||
#[derive(Parser, Debug)]
|
||||
struct ShellExecArgs {
|
||||
command: String,
|
||||
/// Path to Bash that has been patched to support execve() wrapping.
|
||||
#[arg(long = "bash")]
|
||||
bash_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
pub async fn main() -> anyhow::Result<()> {
|
||||
let cli = Cli::parse();
|
||||
pub async fn main_mcp_server() -> anyhow::Result<()> {
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.with_writer(std::io::stderr)
|
||||
.with_ansi(false)
|
||||
.init();
|
||||
|
||||
match cli.subcommand {
|
||||
Some(Commands::Escalate(args)) => {
|
||||
std::process::exit(args.run().await?);
|
||||
}
|
||||
Some(Commands::ShellExec(args)) => {
|
||||
let bash_path = mcp::get_bash_path()?;
|
||||
let escalate_server = EscalateServer::new(bash_path, dummy_exec_policy);
|
||||
let result = escalate_server
|
||||
.exec(
|
||||
args.command.clone(),
|
||||
std::env::vars().collect(),
|
||||
std::env::current_dir()?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
println!("{result:?}");
|
||||
std::process::exit(result.exit_code);
|
||||
}
|
||||
let cli = McpServerCli::parse();
|
||||
let execve_wrapper = match cli.execve_wrapper {
|
||||
Some(path) => path,
|
||||
None => {
|
||||
let bash_path = mcp::get_bash_path()?;
|
||||
let cwd = std::env::current_exe()?;
|
||||
cwd.parent()
|
||||
.map(|p| p.join(CODEX_EXECVE_WRAPPER_EXE_NAME))
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("failed to determine execve wrapper path from current exe")
|
||||
})?
|
||||
}
|
||||
};
|
||||
let bash_path = match cli.bash_path {
|
||||
Some(path) => path,
|
||||
None => mcp::get_bash_path()?,
|
||||
};
|
||||
|
||||
tracing::info!("Starting MCP server");
|
||||
let service = mcp::serve(bash_path, dummy_exec_policy)
|
||||
.await
|
||||
.inspect_err(|e| {
|
||||
tracing::error!("serving error: {:?}", e);
|
||||
})?;
|
||||
tracing::info!("Starting MCP server");
|
||||
let service = mcp::serve(bash_path, execve_wrapper, dummy_exec_policy)
|
||||
.await
|
||||
.inspect_err(|e| {
|
||||
tracing::error!("serving error: {:?}", e);
|
||||
})?;
|
||||
|
||||
service.waiting().await?;
|
||||
Ok(())
|
||||
service.waiting().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct ExecveWrapperCli {
|
||||
file: String,
|
||||
|
||||
#[arg(trailing_var_arg = true)]
|
||||
argv: Vec<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
pub async fn main_execve_wrapper() -> anyhow::Result<()> {
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.with_writer(std::io::stderr)
|
||||
.with_ansi(false)
|
||||
.init();
|
||||
|
||||
let ExecveWrapperCli { file, argv } = ExecveWrapperCli::parse();
|
||||
let exit_code = escalate_client::run(file, argv).await?;
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
|
||||
// TODO: replace with execpolicy
|
||||
|
||||
fn dummy_exec_policy(file: &Path, argv: &[String], _workdir: &Path) -> ExecPolicyOutcome {
|
||||
if file.ends_with("rm") {
|
||||
ExecPolicyOutcome::Forbidden
|
||||
} else if file.ends_with("git") {
|
||||
ExecPolicyOutcome::Prompt {
|
||||
run_with_escalated_permissions: false,
|
||||
}
|
||||
} else if file == Path::new("/opt/homebrew/bin/gh")
|
||||
&& let [_, arg1, arg2, ..] = argv
|
||||
&& arg1 == "issue"
|
||||
&& arg2 == "list"
|
||||
{
|
||||
ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions: true,
|
||||
}
|
||||
} else {
|
||||
ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,5 +98,12 @@ pub(crate) async fn run(file: String, argv: Vec<String>) -> anyhow::Result<i32>
|
||||
|
||||
Err(err.into())
|
||||
}
|
||||
EscalateAction::Deny { reason } => {
|
||||
match reason {
|
||||
Some(reason) => eprintln!("Execution denied: {reason}"),
|
||||
None => eprintln!("Execution denied"),
|
||||
}
|
||||
Ok(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,8 @@ pub(super) enum EscalateAction {
|
||||
Run,
|
||||
/// The command should be escalated to the server for execution.
|
||||
Escalate,
|
||||
/// The command should not be executed.
|
||||
Deny { reason: Option<String> },
|
||||
}
|
||||
|
||||
/// The client sends this to the server to forward its open FDs.
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::os::fd::AsRawFd;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Stdio;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context as _;
|
||||
@@ -13,6 +13,7 @@ use codex_core::exec::process_exec_tool_call;
|
||||
use codex_core::get_platform_sandbox;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use tokio::process::Command;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
use crate::posix::escalate_protocol::BASH_EXEC_WRAPPER_ENV_VAR;
|
||||
use crate::posix::escalate_protocol::ESCALATE_SOCKET_ENV_VAR;
|
||||
@@ -21,25 +22,27 @@ use crate::posix::escalate_protocol::EscalateRequest;
|
||||
use crate::posix::escalate_protocol::EscalateResponse;
|
||||
use crate::posix::escalate_protocol::SuperExecMessage;
|
||||
use crate::posix::escalate_protocol::SuperExecResult;
|
||||
use crate::posix::escalation_policy::EscalationPolicy;
|
||||
use crate::posix::socket::AsyncDatagramSocket;
|
||||
use crate::posix::socket::AsyncSocket;
|
||||
|
||||
/// This is the policy which decides how to handle an exec() call.
|
||||
///
|
||||
/// `file` is the absolute, canonical path to the executable to run, i.e. the first arg to exec.
|
||||
/// `argv` is the argv, including the program name (`argv[0]`).
|
||||
/// `workdir` is the absolute, canonical path to the working directory in which to execute the
|
||||
/// command.
|
||||
pub(crate) type ExecPolicy = fn(file: &Path, argv: &[String], workdir: &Path) -> EscalateAction;
|
||||
use codex_core::exec::ExecExpiration;
|
||||
|
||||
pub(crate) struct EscalateServer {
|
||||
bash_path: PathBuf,
|
||||
policy: ExecPolicy,
|
||||
execve_wrapper: PathBuf,
|
||||
policy: Arc<dyn EscalationPolicy>,
|
||||
}
|
||||
|
||||
impl EscalateServer {
|
||||
pub fn new(bash_path: PathBuf, policy: ExecPolicy) -> Self {
|
||||
Self { bash_path, policy }
|
||||
pub fn new<P>(bash_path: PathBuf, execve_wrapper: PathBuf, policy: P) -> Self
|
||||
where
|
||||
P: EscalationPolicy + Send + Sync + 'static,
|
||||
{
|
||||
Self {
|
||||
bash_path,
|
||||
execve_wrapper,
|
||||
policy: Arc::new(policy),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn exec(
|
||||
@@ -47,13 +50,13 @@ impl EscalateServer {
|
||||
command: String,
|
||||
env: HashMap<String, String>,
|
||||
workdir: PathBuf,
|
||||
timeout_ms: Option<u64>,
|
||||
cancel_rx: CancellationToken,
|
||||
) -> anyhow::Result<ExecResult> {
|
||||
let (escalate_server, escalate_client) = AsyncDatagramSocket::pair()?;
|
||||
let client_socket = escalate_client.into_inner();
|
||||
client_socket.set_cloexec(false)?;
|
||||
|
||||
let escalate_task = tokio::spawn(escalate_task(escalate_server, self.policy));
|
||||
let escalate_task = tokio::spawn(escalate_task(escalate_server, self.policy.clone()));
|
||||
let mut env = env.clone();
|
||||
env.insert(
|
||||
ESCALATE_SOCKET_ENV_VAR.to_string(),
|
||||
@@ -61,8 +64,15 @@ impl EscalateServer {
|
||||
);
|
||||
env.insert(
|
||||
BASH_EXEC_WRAPPER_ENV_VAR.to_string(),
|
||||
format!("{} escalate", std::env::current_exe()?.to_string_lossy()),
|
||||
self.execve_wrapper.to_string_lossy().to_string(),
|
||||
);
|
||||
|
||||
// TODO: use the sandbox policy and cwd from the calling client.
|
||||
// Note that sandbox_cwd is ignored for ReadOnly, but needs to be legit
|
||||
// for `SandboxPolicy::WorkspaceWrite`.
|
||||
let sandbox_policy = SandboxPolicy::ReadOnly;
|
||||
let sandbox_cwd = PathBuf::from("/__NONEXISTENT__");
|
||||
|
||||
let result = process_exec_tool_call(
|
||||
codex_core::exec::ExecParams {
|
||||
command: vec![
|
||||
@@ -71,16 +81,15 @@ impl EscalateServer {
|
||||
command,
|
||||
],
|
||||
cwd: PathBuf::from(&workdir),
|
||||
timeout_ms,
|
||||
expiration: ExecExpiration::Cancellation(cancel_rx),
|
||||
env,
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
arg0: None,
|
||||
},
|
||||
get_platform_sandbox().unwrap_or(SandboxType::None),
|
||||
// TODO: use the sandbox policy and cwd from the calling client
|
||||
&SandboxPolicy::ReadOnly,
|
||||
&PathBuf::from("/__NONEXISTENT__"), // This is ignored for ReadOnly
|
||||
&sandbox_policy,
|
||||
&sandbox_cwd,
|
||||
&None,
|
||||
None,
|
||||
)
|
||||
@@ -96,7 +105,10 @@ impl EscalateServer {
|
||||
}
|
||||
}
|
||||
|
||||
async fn escalate_task(socket: AsyncDatagramSocket, policy: ExecPolicy) -> anyhow::Result<()> {
|
||||
async fn escalate_task(
|
||||
socket: AsyncDatagramSocket,
|
||||
policy: Arc<dyn EscalationPolicy>,
|
||||
) -> anyhow::Result<()> {
|
||||
loop {
|
||||
let (_, mut fds) = socket.receive_with_fds().await?;
|
||||
if fds.len() != 1 {
|
||||
@@ -104,6 +116,7 @@ async fn escalate_task(socket: AsyncDatagramSocket, policy: ExecPolicy) -> anyho
|
||||
continue;
|
||||
}
|
||||
let stream_socket = AsyncSocket::from_fd(fds.remove(0))?;
|
||||
let policy = policy.clone();
|
||||
tokio::spawn(async move {
|
||||
if let Err(err) = handle_escalate_session_with_policy(stream_socket, policy).await {
|
||||
tracing::error!("escalate session failed: {err:?}");
|
||||
@@ -122,7 +135,7 @@ pub(crate) struct ExecResult {
|
||||
|
||||
async fn handle_escalate_session_with_policy(
|
||||
socket: AsyncSocket,
|
||||
policy: ExecPolicy,
|
||||
policy: Arc<dyn EscalationPolicy>,
|
||||
) -> anyhow::Result<()> {
|
||||
let EscalateRequest {
|
||||
file,
|
||||
@@ -132,8 +145,12 @@ async fn handle_escalate_session_with_policy(
|
||||
} = socket.receive::<EscalateRequest>().await?;
|
||||
let file = PathBuf::from(&file).absolutize()?.into_owned();
|
||||
let workdir = PathBuf::from(&workdir).absolutize()?.into_owned();
|
||||
let action = policy(file.as_path(), &argv, &workdir);
|
||||
let action = policy
|
||||
.determine_action(file.as_path(), &argv, &workdir)
|
||||
.await?;
|
||||
|
||||
tracing::debug!("decided {action:?} for {file:?} {argv:?} {workdir:?}");
|
||||
|
||||
match action {
|
||||
EscalateAction::Run => {
|
||||
socket
|
||||
@@ -195,6 +212,13 @@ async fn handle_escalate_session_with_policy(
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
EscalateAction::Deny { reason } => {
|
||||
socket
|
||||
.send(EscalateResponse {
|
||||
action: EscalateAction::Deny { reason },
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -204,14 +228,33 @@ mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
struct DeterministicEscalationPolicy {
|
||||
action: EscalateAction,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl EscalationPolicy for DeterministicEscalationPolicy {
|
||||
async fn determine_action(
|
||||
&self,
|
||||
_file: &Path,
|
||||
_argv: &[String],
|
||||
_workdir: &Path,
|
||||
) -> Result<EscalateAction, rmcp::ErrorData> {
|
||||
Ok(self.action.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn handle_escalate_session_respects_run_in_sandbox_decision() -> anyhow::Result<()> {
|
||||
let (server, client) = AsyncSocket::pair()?;
|
||||
let server_task = tokio::spawn(handle_escalate_session_with_policy(
|
||||
server,
|
||||
|_file, _argv, _workdir| EscalateAction::Run,
|
||||
Arc::new(DeterministicEscalationPolicy {
|
||||
action: EscalateAction::Run,
|
||||
}),
|
||||
));
|
||||
|
||||
client
|
||||
@@ -238,7 +281,9 @@ mod tests {
|
||||
let (server, client) = AsyncSocket::pair()?;
|
||||
let server_task = tokio::spawn(handle_escalate_session_with_policy(
|
||||
server,
|
||||
|_file, _argv, _workdir| EscalateAction::Escalate,
|
||||
Arc::new(DeterministicEscalationPolicy {
|
||||
action: EscalateAction::Escalate,
|
||||
}),
|
||||
));
|
||||
|
||||
client
|
||||
|
||||
14
codex-rs/exec-server/src/posix/escalation_policy.rs
Normal file
14
codex-rs/exec-server/src/posix/escalation_policy.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::posix::escalate_protocol::EscalateAction;
|
||||
|
||||
/// Decides what action to take in response to an execve request from a client.
|
||||
#[async_trait::async_trait]
|
||||
pub(crate) trait EscalationPolicy: Send + Sync {
|
||||
async fn determine_action(
|
||||
&self,
|
||||
file: &Path,
|
||||
argv: &[String],
|
||||
workdir: &Path,
|
||||
) -> Result<EscalateAction, rmcp::ErrorData>;
|
||||
}
|
||||
@@ -18,9 +18,11 @@ use rmcp::tool_handler;
|
||||
use rmcp::tool_router;
|
||||
use rmcp::transport::stdio;
|
||||
|
||||
use crate::posix::escalate_server;
|
||||
use crate::posix::escalate_server::EscalateServer;
|
||||
use crate::posix::escalate_server::ExecPolicy;
|
||||
use crate::posix::escalate_server::{self};
|
||||
use crate::posix::mcp_escalation_policy::ExecPolicy;
|
||||
use crate::posix::mcp_escalation_policy::McpEscalationPolicy;
|
||||
use crate::posix::stopwatch::Stopwatch;
|
||||
|
||||
/// Path to our patched bash.
|
||||
const CODEX_BASH_PATH_ENV_VAR: &str = "CODEX_BASH_PATH";
|
||||
@@ -64,15 +66,17 @@ impl From<escalate_server::ExecResult> for ExecResult {
|
||||
pub struct ExecTool {
|
||||
tool_router: ToolRouter<ExecTool>,
|
||||
bash_path: PathBuf,
|
||||
execve_wrapper: PathBuf,
|
||||
policy: ExecPolicy,
|
||||
}
|
||||
|
||||
#[tool_router]
|
||||
impl ExecTool {
|
||||
pub fn new(bash_path: PathBuf, policy: ExecPolicy) -> Self {
|
||||
pub fn new(bash_path: PathBuf, execve_wrapper: PathBuf, policy: ExecPolicy) -> Self {
|
||||
Self {
|
||||
tool_router: Self::tool_router(),
|
||||
bash_path,
|
||||
execve_wrapper,
|
||||
policy,
|
||||
}
|
||||
}
|
||||
@@ -81,17 +85,28 @@ impl ExecTool {
|
||||
#[tool]
|
||||
async fn shell(
|
||||
&self,
|
||||
_context: RequestContext<RoleServer>,
|
||||
context: RequestContext<RoleServer>,
|
||||
Parameters(params): Parameters<ExecParams>,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
let escalate_server = EscalateServer::new(self.bash_path.clone(), self.policy);
|
||||
let effective_timeout = Duration::from_millis(
|
||||
params
|
||||
.timeout_ms
|
||||
.unwrap_or(codex_core::exec::DEFAULT_EXEC_COMMAND_TIMEOUT_MS),
|
||||
);
|
||||
let stopwatch = Stopwatch::new(effective_timeout);
|
||||
let cancel_token = stopwatch.cancellation_token();
|
||||
let escalate_server = EscalateServer::new(
|
||||
self.bash_path.clone(),
|
||||
self.execve_wrapper.clone(),
|
||||
McpEscalationPolicy::new(self.policy, context, stopwatch.clone()),
|
||||
);
|
||||
let result = escalate_server
|
||||
.exec(
|
||||
params.command,
|
||||
// TODO: use ShellEnvironmentPolicy
|
||||
std::env::vars().collect(),
|
||||
PathBuf::from(¶ms.workdir),
|
||||
params.timeout_ms,
|
||||
cancel_token,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(e.to_string(), None))?;
|
||||
@@ -99,27 +114,6 @@ impl ExecTool {
|
||||
ExecResult::from(result),
|
||||
)?]))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
async fn prompt(
|
||||
&self,
|
||||
command: String,
|
||||
workdir: String,
|
||||
context: RequestContext<RoleServer>,
|
||||
) -> Result<CreateElicitationResult, McpError> {
|
||||
context
|
||||
.peer
|
||||
.create_elicitation(CreateElicitationRequestParam {
|
||||
message: format!("Allow Codex to run `{command:?}` in `{workdir:?}`?"),
|
||||
#[allow(clippy::expect_used)]
|
||||
requested_schema: ElicitationSchema::builder()
|
||||
.property("dummy", PrimitiveSchema::String(StringSchema::new()))
|
||||
.build()
|
||||
.expect("failed to build elicitation schema"),
|
||||
})
|
||||
.await
|
||||
.map_err(|e| McpError::internal_error(e.to_string(), None))
|
||||
}
|
||||
}
|
||||
|
||||
#[tool_handler]
|
||||
@@ -147,8 +141,9 @@ impl ServerHandler for ExecTool {
|
||||
|
||||
pub(crate) async fn serve(
|
||||
bash_path: PathBuf,
|
||||
execve_wrapper: PathBuf,
|
||||
policy: ExecPolicy,
|
||||
) -> Result<RunningService<RoleServer, ExecTool>, rmcp::service::ServerInitializeError> {
|
||||
let tool = ExecTool::new(bash_path, policy);
|
||||
let tool = ExecTool::new(bash_path, execve_wrapper, policy);
|
||||
tool.serve(stdio()).await
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user