mirror of
https://github.com/openai/codex.git
synced 2026-02-03 15:33:41 +00:00
Compare commits
21 Commits
pakrym/pyt
...
remote-tas
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
065fa50f10 | ||
|
|
25ab9f5e10 | ||
|
|
f5ab495189 | ||
|
|
4923df37ea | ||
|
|
8858ed1090 | ||
|
|
f0491f4826 | ||
|
|
e1d6531103 | ||
|
|
5fa64b7ae1 | ||
|
|
e20e4edbab | ||
|
|
16ac10f9d3 | ||
|
|
3d12b46b18 | ||
|
|
36803606a0 | ||
|
|
21ef6be571 | ||
|
|
acb706b553 | ||
|
|
35dec89d8a | ||
|
|
d1cf46b09f | ||
|
|
e17d794a4e | ||
|
|
83dfb43dbd | ||
|
|
e5d31d5ccc | ||
|
|
9be247e41e | ||
|
|
d2fcf4314e |
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser
|
||||
|
||||
28
.github/dotslash-config.json
vendored
28
.github/dotslash-config.json
vendored
@@ -27,34 +27,6 @@
|
||||
"path": "codex.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-responses-api-proxy": {
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
pull_request: {}
|
||||
pull_request: { branches: [main] }
|
||||
push: { branches: [main] }
|
||||
|
||||
jobs:
|
||||
@@ -31,7 +31,6 @@ jobs:
|
||||
- uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Stage npm package
|
||||
id: stage_npm_package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
@@ -41,13 +40,13 @@ jobs:
|
||||
python3 ./codex-cli/scripts/build_npm_package.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--pack-output "$PACK_OUTPUT"
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
echo "PACK_OUTPUT=$PACK_OUTPUT" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
path: ${{ env.PACK_OUTPUT }}
|
||||
|
||||
- name: Ensure root README.md contains only ASCII and certain Unicode code points
|
||||
run: ./scripts/asciicheck.py README.md
|
||||
|
||||
1
.github/workflows/codespell.yml
vendored
1
.github/workflows/codespell.yml
vendored
@@ -25,3 +25,4 @@ jobs:
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
skip: frame*.txt
|
||||
|
||||
34
.github/workflows/rust-release.yml
vendored
34
.github/workflows/rust-release.yml
vendored
@@ -97,7 +97,7 @@ jobs:
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
@@ -107,10 +107,8 @@ jobs:
|
||||
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
|
||||
fi
|
||||
|
||||
- if: ${{ matrix.runner == 'windows-11-arm' }}
|
||||
@@ -218,30 +216,17 @@ jobs:
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage codex CLI npm package
|
||||
- name: Stage npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
|
||||
- name: Stage responses API proxy npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage-responses"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex-responses-api-proxy \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-responses-api-proxy-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
@@ -284,7 +269,7 @@ jobs:
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarballs from release
|
||||
- name: Download npm tarball from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
@@ -296,10 +281,6 @@ jobs:
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
@@ -313,14 +294,7 @@ jobs:
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
|
||||
tarballs=(
|
||||
"codex-npm-${VERSION}.tgz"
|
||||
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||
)
|
||||
|
||||
for tarball in "${tarballs[@]}"; do
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}"
|
||||
done
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
|
||||
update-branch:
|
||||
name: Update latest-alpha-cli branch
|
||||
|
||||
43
.github/workflows/sdk.yml
vendored
43
.github/workflows/sdk.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: sdk
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
sdks:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
working-directory: codex-rs
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run build
|
||||
|
||||
- name: Lint SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run lint
|
||||
|
||||
- name: Test SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run test
|
||||
@@ -1,3 +1,4 @@
|
||||
<h1 align="center">OpenAI Codex CLI</h1>
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install codex</code></p>
|
||||
|
||||
@@ -101,3 +102,4 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
|
||||
@@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS:
|
||||
| Requirement | Details |
|
||||
| --------------------------- | --------------------------------------------------------------- |
|
||||
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
||||
| Node.js | **16 or newer** (Node 20 LTS recommended) |
|
||||
| Node.js | **22 or newer** (LTS recommended) |
|
||||
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
||||
| RAM | 4-GB minimum (8-GB recommended) |
|
||||
|
||||
@@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
|
||||
<details>
|
||||
<summary>Does it work on Windows?</summary>
|
||||
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22.
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
// Unified entry point for the Codex CLI.
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
@@ -69,6 +68,7 @@ const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
// executing. This allows us to forward those signals to the child process
|
||||
// and guarantees that when either the child terminates or the parent
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
function getUpdatedPath(newDirs) {
|
||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||
|
||||
2
codex-cli/package-lock.json
generated
2
codex-cli/package-lock.json
generated
@@ -11,7 +11,7 @@
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=20"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=20"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
|
||||
@@ -13,7 +13,6 @@ from pathlib import Path
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||
GITHUB_REPO = "openai/codex"
|
||||
|
||||
# The docs are not clear on what the expected value/format of
|
||||
@@ -24,12 +23,6 @@ WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--package",
|
||||
choices=("codex", "codex-responses-api-proxy"),
|
||||
default="codex",
|
||||
help="Which npm package to stage (default: codex).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
help="Version number to write to package.json inside the staged package.",
|
||||
@@ -70,7 +63,6 @@ def parse_args() -> argparse.Namespace:
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
package = args.package
|
||||
version = args.version
|
||||
release_version = args.release_version
|
||||
if release_version:
|
||||
@@ -84,7 +76,7 @@ def main() -> int:
|
||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||
|
||||
try:
|
||||
stage_sources(staging_dir, version, package)
|
||||
stage_sources(staging_dir, version)
|
||||
|
||||
workflow_url = args.workflow_url
|
||||
resolved_head_sha: str | None = None
|
||||
@@ -108,23 +100,16 @@ def main() -> int:
|
||||
if not workflow_url:
|
||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
||||
|
||||
install_native_binaries(staging_dir, workflow_url, package)
|
||||
install_native_binaries(staging_dir, workflow_url)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
if package == "codex":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the responses API proxy:\n"
|
||||
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||
)
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
@@ -151,34 +136,20 @@ def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
return temp_dir, True
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
def stage_sources(staging_dir: Path, version: str) -> None:
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if package == "codex":
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||
elif package == "codex-responses-api-proxy":
|
||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||
|
||||
readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||
else:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
with open(package_json_path, "r", encoding="utf-8") as fh:
|
||||
with open(CODEX_CLI_ROOT / "package.json", "r", encoding="utf-8") as fh:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
@@ -187,19 +158,10 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str, package: str) -> None:
|
||||
package_components = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
}
|
||||
|
||||
components = package_components.get(package)
|
||||
if components is None:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url]
|
||||
for component in components:
|
||||
cmd.extend(["--component", component])
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str | None) -> None:
|
||||
cmd = ["./scripts/install_native_deps.py"]
|
||||
if workflow_url:
|
||||
cmd.extend(["--workflow-url", workflow_url])
|
||||
cmd.append(str(staging_dir))
|
||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Sequence
|
||||
@@ -21,7 +20,7 @@ CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
BINARY_TARGETS = (
|
||||
CODEX_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
@@ -30,27 +29,6 @@ BINARY_TARGETS = (
|
||||
"aarch64-pc-windows-msvc",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BinaryComponent:
|
||||
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex",
|
||||
),
|
||||
"codex-responses-api-proxy": BinaryComponent(
|
||||
artifact_prefix="codex-responses-api-proxy",
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
),
|
||||
}
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||
@@ -72,16 +50,6 @@ def parse_args() -> argparse.Namespace:
|
||||
"known good run when omitted."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--component",
|
||||
dest="components",
|
||||
action="append",
|
||||
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||
help=(
|
||||
"Limit installation to the specified components."
|
||||
" May be repeated. Defaults to 'codex' and 'rg'."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"root",
|
||||
nargs="?",
|
||||
@@ -101,28 +69,18 @@ def main() -> int:
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or ["codex", "rg"]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
workflow_url = DEFAULT_WORKFLOW_URL
|
||||
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
BINARY_TARGETS,
|
||||
[name for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
install_codex_binaries(artifacts_dir, vendor_dir, CODEX_TARGETS)
|
||||
|
||||
if "rg" in components:
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
@@ -166,8 +124,6 @@ def fetch_rg(
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
print("Installing ripgrep binaries for targets: " + ", ".join(targets))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(
|
||||
@@ -184,7 +140,6 @@ def fetch_rg(
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
@@ -203,60 +158,40 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def install_binary_components(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
targets: Iterable[str],
|
||||
component_names: Sequence[str],
|
||||
) -> None:
|
||||
selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
|
||||
if not selected_components:
|
||||
return
|
||||
|
||||
def install_codex_binaries(
|
||||
artifacts_dir: Path, vendor_dir: Path, targets: Iterable[str]
|
||||
) -> list[Path]:
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return
|
||||
return []
|
||||
|
||||
for component in selected_components:
|
||||
print(
|
||||
f"Installing {component.binary_basename} binaries for targets: "
|
||||
+ ", ".join(targets)
|
||||
)
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
_install_single_binary,
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
target,
|
||||
component,
|
||||
): target
|
||||
for target in targets
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
installed_path = future.result()
|
||||
print(f" installed {installed_path}")
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(_install_single_codex_binary, artifacts_dir, vendor_dir, target): target
|
||||
for target in targets
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _install_single_binary(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
component: BinaryComponent,
|
||||
) -> Path:
|
||||
def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: str) -> Path:
|
||||
artifact_subdir = artifacts_dir / target
|
||||
archive_name = _archive_name_for_target(component.artifact_prefix, target)
|
||||
archive_name = _archive_name_for_target(target)
|
||||
archive_path = artifact_subdir / archive_name
|
||||
if not archive_path.exists():
|
||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||
|
||||
dest_dir = vendor_dir / target / component.dest_dir
|
||||
dest_dir = vendor_dir / target / "codex"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
binary_name = (
|
||||
f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename
|
||||
)
|
||||
binary_name = "codex.exe" if "windows" in target else "codex"
|
||||
dest = dest_dir / binary_name
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(archive_path, "zst", None, dest)
|
||||
@@ -265,10 +200,10 @@ def _install_single_binary(
|
||||
return dest
|
||||
|
||||
|
||||
def _archive_name_for_target(artifact_prefix: str, target: str) -> str:
|
||||
def _archive_name_for_target(target: str) -> str:
|
||||
if "windows" in target:
|
||||
return f"{artifact_prefix}-{target}.exe.zst"
|
||||
return f"{artifact_prefix}-{target}.zst"
|
||||
return f"codex-{target}.exe.zst"
|
||||
return f"codex-{target}.zst"
|
||||
|
||||
|
||||
def _fetch_single_rg(
|
||||
|
||||
457
codex-rs/Cargo.lock
generated
457
codex-rs/Cargo.lock
generated
@@ -165,20 +165,6 @@ version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "app_test_support"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"codex-protocol",
|
||||
"mcp-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arboard"
|
||||
version = "3.6.1"
|
||||
@@ -347,52 +333,6 @@ version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98e529aee37b5c8206bb4bf4c44797127566d72f76952c970bd3d1e85de8f4e2"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"itoa",
|
||||
"matchit",
|
||||
"memchr",
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"serde_core",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ac7a6beb1182c7e30253ee75c3e918080bfb83f5a3023bcdf7209d85fd147e6"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"sync_wrapper",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.76"
|
||||
@@ -649,36 +589,6 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-app-server"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app_test_support",
|
||||
"assert_cmd",
|
||||
"base64",
|
||||
"codex-arg0",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-file-search",
|
||||
"codex-login",
|
||||
"codex-protocol",
|
||||
"codex-utils-json-to-toml",
|
||||
"core_test_support",
|
||||
"mcp-types",
|
||||
"os_info",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-apply-patch"
|
||||
version = "0.0.0"
|
||||
@@ -712,10 +622,10 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-backend-openapi-models",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -724,6 +634,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -749,7 +660,6 @@ dependencies = [
|
||||
"assert_cmd",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"codex-app-server",
|
||||
"codex-arg0",
|
||||
"codex-chatgpt",
|
||||
"codex-cloud-tasks",
|
||||
@@ -758,12 +668,12 @@ dependencies = [
|
||||
"codex-exec",
|
||||
"codex-login",
|
||||
"codex-mcp-server",
|
||||
"codex-process-hardening",
|
||||
"codex-protocol",
|
||||
"codex-protocol-ts",
|
||||
"codex-responses-api-proxy",
|
||||
"codex-tui",
|
||||
"ctor 0.5.0",
|
||||
"libc",
|
||||
"owo-colors",
|
||||
"predicates",
|
||||
"pretty_assertions",
|
||||
@@ -784,22 +694,28 @@ dependencies = [
|
||||
"base64",
|
||||
"chrono",
|
||||
"clap",
|
||||
"codex-backend-client",
|
||||
"codex-cloud-tasks-client",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-file-search",
|
||||
"codex-login",
|
||||
"codex-tui",
|
||||
"crossterm",
|
||||
"image",
|
||||
"mime_guess",
|
||||
"ratatui",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"throbber-widgets-tui",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"unicode-width 0.1.14",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -812,9 +728,12 @@ dependencies = [
|
||||
"codex-backend-client",
|
||||
"codex-git-apply",
|
||||
"diffy",
|
||||
"dirs",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -843,7 +762,6 @@ dependencies = [
|
||||
"codex-apply-patch",
|
||||
"codex-file-search",
|
||||
"codex-mcp-client",
|
||||
"codex-otel",
|
||||
"codex-protocol",
|
||||
"codex-rmcp-client",
|
||||
"core_test_support",
|
||||
@@ -881,7 +799,6 @@ dependencies = [
|
||||
"toml",
|
||||
"toml_edit",
|
||||
"tracing",
|
||||
"tracing-test",
|
||||
"tree-sitter",
|
||||
"tree-sitter-bash",
|
||||
"uuid",
|
||||
@@ -906,8 +823,6 @@ dependencies = [
|
||||
"codex-protocol",
|
||||
"core_test_support",
|
||||
"libc",
|
||||
"mcp-types",
|
||||
"opentelemetry-appender-tracing",
|
||||
"owo-colors",
|
||||
"predicates",
|
||||
"pretty_assertions",
|
||||
@@ -1010,7 +925,6 @@ dependencies = [
|
||||
"url",
|
||||
"urlencoding",
|
||||
"webbrowser",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1032,11 +946,12 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"base64",
|
||||
"codex-arg0",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-login",
|
||||
"codex-protocol",
|
||||
"codex-utils-json-to-toml",
|
||||
"core_test_support",
|
||||
"mcp-types",
|
||||
"mcp_test_support",
|
||||
@@ -1048,8 +963,10 @@ dependencies = [
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
@@ -1068,33 +985,6 @@ dependencies = [
|
||||
"wiremock",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-otel"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"codex-protocol",
|
||||
"eventsource-stream",
|
||||
"opentelemetry",
|
||||
"opentelemetry-otlp",
|
||||
"opentelemetry-semantic-conventions",
|
||||
"opentelemetry_sdk",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum_macros 0.27.2",
|
||||
"tokio",
|
||||
"tonic",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-process-hardening"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-protocol"
|
||||
version = "0.0.0"
|
||||
@@ -1135,13 +1025,13 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"codex-process-hardening",
|
||||
"ctor 0.5.0",
|
||||
"codex-arg0",
|
||||
"libc",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tiny_http",
|
||||
"tokio",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
@@ -1150,11 +1040,8 @@ name = "codex-rmcp-client"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
"futures",
|
||||
"mcp-types",
|
||||
"pretty_assertions",
|
||||
"reqwest",
|
||||
"rmcp",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -1191,7 +1078,6 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"mcp-types",
|
||||
"opentelemetry-appender-tracing",
|
||||
"path-clean",
|
||||
"pathdiff",
|
||||
"pretty_assertions",
|
||||
@@ -1218,15 +1104,6 @@ dependencies = [
|
||||
"vt100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-json-to-toml"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-readiness"
|
||||
version = "0.0.0"
|
||||
@@ -2463,7 +2340,6 @@ dependencies = [
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
"rustls-pki-types",
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
@@ -2471,19 +2347,6 @@ dependencies = [
|
||||
"webpki-roots",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-timeout"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
|
||||
dependencies = [
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-tls"
|
||||
version = "0.6.0"
|
||||
@@ -2518,7 +2381,7 @@ dependencies = [
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2 0.6.0",
|
||||
"socket2",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
@@ -3117,12 +2980,6 @@ dependencies = [
|
||||
"regex-automata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchit"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
|
||||
[[package]]
|
||||
name = "mcp-types"
|
||||
version = "0.0.0"
|
||||
@@ -3140,6 +2997,7 @@ dependencies = [
|
||||
"assert_cmd",
|
||||
"codex-core",
|
||||
"codex-mcp-server",
|
||||
"codex-protocol",
|
||||
"mcp-types",
|
||||
"os_info",
|
||||
"pretty_assertions",
|
||||
@@ -3239,7 +3097,7 @@ dependencies = [
|
||||
"openssl-probe",
|
||||
"openssl-sys",
|
||||
"schannel",
|
||||
"security-framework 2.11.1",
|
||||
"security-framework",
|
||||
"security-framework-sys",
|
||||
"tempfile",
|
||||
]
|
||||
@@ -3525,104 +3383,6 @@ dependencies = [
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aaf416e4cb72756655126f7dd7bb0af49c674f4c1b9903e80c009e0c37e552e6"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"js-sys",
|
||||
"pin-project-lite",
|
||||
"thiserror 2.0.16",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-appender-tracing"
|
||||
version = "0.30.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e68f63eca5fad47e570e00e893094fc17be959c80c79a7d6ec1abdd5ae6ffc16"
|
||||
dependencies = [
|
||||
"opentelemetry",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-http"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "50f6639e842a97dbea8886e3439710ae463120091e2e064518ba8e716e6ac36d"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"http",
|
||||
"opentelemetry",
|
||||
"reqwest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-otlp"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dbee664a43e07615731afc539ca60c6d9f1a9425e25ca09c57bc36c87c55852b"
|
||||
dependencies = [
|
||||
"http",
|
||||
"opentelemetry",
|
||||
"opentelemetry-http",
|
||||
"opentelemetry-proto",
|
||||
"opentelemetry_sdk",
|
||||
"prost",
|
||||
"reqwest",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tonic",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-proto"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e046fd7660710fe5a05e8748e70d9058dc15c94ba914e7c4faa7c728f0e8ddc"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"hex",
|
||||
"opentelemetry",
|
||||
"opentelemetry_sdk",
|
||||
"prost",
|
||||
"serde",
|
||||
"tonic",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-semantic-conventions"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83d059a296a47436748557a353c5e6c5705b9470ef6c95cfc52c21a8814ddac2"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry_sdk"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11f644aa9e5e31d11896e024305d7e3c98a88884d9f8919dbf37a9991bc47a4b"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-executor",
|
||||
"futures-util",
|
||||
"opentelemetry",
|
||||
"percent-encoding",
|
||||
"rand 0.9.2",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "option-ext"
|
||||
version = "0.2.0"
|
||||
@@ -3737,26 +3497,6 @@ dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project"
|
||||
version = "1.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a"
|
||||
dependencies = [
|
||||
"pin-project-internal",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-internal"
|
||||
version = "1.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.16"
|
||||
@@ -3931,29 +3671,6 @@ dependencies = [
|
||||
"windows",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prost"
|
||||
version = "0.13.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"prost-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prost-derive"
|
||||
version = "0.13.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools 0.14.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.10.3"
|
||||
@@ -3999,9 +3716,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quinn"
|
||||
version = "0.11.8"
|
||||
version = "0.11.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8"
|
||||
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"cfg_aliases 0.2.1",
|
||||
@@ -4010,7 +3727,7 @@ dependencies = [
|
||||
"quinn-udp",
|
||||
"rustc-hash",
|
||||
"rustls",
|
||||
"socket2 0.5.10",
|
||||
"socket2",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -4019,9 +3736,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quinn-proto"
|
||||
version = "0.11.12"
|
||||
version = "0.11.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e"
|
||||
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"getrandom 0.3.3",
|
||||
@@ -4040,16 +3757,16 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quinn-udp"
|
||||
version = "0.5.13"
|
||||
version = "0.5.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970"
|
||||
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
|
||||
dependencies = [
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"socket2 0.5.10",
|
||||
"socket2",
|
||||
"tracing",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4276,7 +3993,6 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"quinn",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
"rustls-pki-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -4318,29 +4034,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "534fd1cd0601e798ac30545ff2b7f4a62c6f14edd4aaed1cc5eb1e85f69f09af"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"futures",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"paste",
|
||||
"pin-project-lite",
|
||||
"process-wrap",
|
||||
"rand 0.9.2",
|
||||
"reqwest",
|
||||
"rmcp-macros",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sse-stream",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4408,18 +4115,6 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-native-certs"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3"
|
||||
dependencies = [
|
||||
"openssl-probe",
|
||||
"rustls-pki-types",
|
||||
"schannel",
|
||||
"security-framework 3.3.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-pki-types"
|
||||
version = "1.12.0"
|
||||
@@ -4625,19 +4320,6 @@ dependencies = [
|
||||
"security-framework-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "security-framework"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c"
|
||||
dependencies = [
|
||||
"bitflags 2.9.4",
|
||||
"core-foundation 0.10.1",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
"security-framework-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "security-framework-sys"
|
||||
version = "2.15.0"
|
||||
@@ -4903,16 +4585,6 @@ version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c"
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.6.0"
|
||||
@@ -4923,19 +4595,6 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sse-stream"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb4dc4d33c68ec1f27d386b5610a351922656e1fdf5c05bbaad930cd1519479a"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.0"
|
||||
@@ -5431,7 +5090,7 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"slab",
|
||||
"socket2 0.6.0",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
@@ -5556,35 +5215,6 @@ version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109"
|
||||
|
||||
[[package]]
|
||||
name = "tonic"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e581ba15a835f4d9ea06c55ab1bd4dce26fc53752c69a04aac00703bfb49ba9"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"axum",
|
||||
"base64",
|
||||
"bytes",
|
||||
"h2",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-timeout",
|
||||
"hyper-util",
|
||||
"percent-encoding",
|
||||
"pin-project",
|
||||
"prost",
|
||||
"socket2 0.5.10",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.5.2"
|
||||
@@ -5593,15 +5223,11 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"indexmap 2.11.4",
|
||||
"pin-project-lite",
|
||||
"slab",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5718,27 +5344,6 @@ dependencies = [
|
||||
"tracing-log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-test"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68"
|
||||
dependencies = [
|
||||
"tracing-core",
|
||||
"tracing-subscriber",
|
||||
"tracing-test-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-test-macro"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.10"
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
members = [
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
"app-server",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"codex-backend-openapi-models",
|
||||
@@ -21,15 +20,12 @@ members = [
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/json-to-toml",
|
||||
"utils/readiness",
|
||||
]
|
||||
resolver = "2"
|
||||
@@ -44,9 +40,7 @@ edition = "2024"
|
||||
|
||||
[workspace.dependencies]
|
||||
# Internal
|
||||
app_test_support = { path = "app-server/tests/common" }
|
||||
codex-ansi-escape = { path = "ansi-escape" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
@@ -60,14 +54,11 @@ codex-login = { path = "login" }
|
||||
codex-mcp-client = { path = "mcp-client" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
@@ -97,8 +88,8 @@ dirs = "6"
|
||||
dotenvy = "0.15.7"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
escargot = "0.5"
|
||||
futures = "0.3"
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
@@ -116,11 +107,6 @@ mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
path-absolutize = "3.1.1"
|
||||
@@ -159,11 +145,9 @@ tokio-test = "0.4"
|
||||
tokio-util = "0.7.16"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.23.4"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.41"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.9"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
ts-rs = "11"
|
||||
|
||||
@@ -4,18 +4,18 @@ We provide Codex CLI as a standalone, native executable to ensure a zero-depende
|
||||
|
||||
## Installing Codex
|
||||
|
||||
Today, the easiest way to install Codex is via `npm`:
|
||||
Today, the easiest way to install Codex is via `npm`, though we plan to publish Codex to other package managers soon.
|
||||
|
||||
```shell
|
||||
npm i -g @openai/codex
|
||||
npm i -g @openai/codex@native
|
||||
codex
|
||||
```
|
||||
|
||||
You can also install via Homebrew (`brew install codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
You can also download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
The Rust implementation is now the maintained Codex CLI and serves as the default experience. It includes a number of features that the legacy TypeScript CLI never supported.
|
||||
While we are [working to close the gap between the TypeScript and Rust implementations of Codex CLI](https://github.com/openai/codex/issues/1262), note that the Rust CLI has a number of features that the TypeScript CLI does not!
|
||||
|
||||
### Config
|
||||
|
||||
@@ -25,14 +25,12 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
Codex CLI functions as an MCP client that can connect to MCP servers on startup. See the [`mcp_servers`](../docs/config.md#mcp_servers) section in the configuration documentation for details.
|
||||
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp-server`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
|
||||
```shell
|
||||
npx @modelcontextprotocol/inspector codex mcp-server
|
||||
npx @modelcontextprotocol/inspector codex mcp
|
||||
```
|
||||
|
||||
Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.toml`, and `codex mcp-server` to run the MCP server directly.
|
||||
|
||||
### Notifications
|
||||
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS.
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-app-server"
|
||||
version = { workspace = true }
|
||||
|
||||
[[bin]]
|
||||
name = "codex-app-server"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
# We should only be using mcp-types for JSON-RPC types: it would be nice to
|
||||
# split this out into a separate crate at some point.
|
||||
mcp-types = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
@@ -1,2 +0,0 @@
|
||||
pub(crate) const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
pub(crate) const INTERNAL_ERROR_CODE: i64 = -32603;
|
||||
@@ -1,84 +0,0 @@
|
||||
use std::num::NonZero;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use codex_file_search as file_search;
|
||||
use codex_protocol::mcp_protocol::FuzzyFileSearchResult;
|
||||
use tokio::task::JoinSet;
|
||||
use tracing::warn;
|
||||
|
||||
const LIMIT_PER_ROOT: usize = 50;
|
||||
const MAX_THREADS: usize = 12;
|
||||
const COMPUTE_INDICES: bool = true;
|
||||
|
||||
pub(crate) async fn run_fuzzy_file_search(
|
||||
query: String,
|
||||
roots: Vec<String>,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
) -> Vec<FuzzyFileSearchResult> {
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
|
||||
let cores = std::thread::available_parallelism()
|
||||
.map(std::num::NonZero::get)
|
||||
.unwrap_or(1);
|
||||
let threads = cores.min(MAX_THREADS);
|
||||
let threads_per_root = (threads / roots.len()).max(1);
|
||||
let threads = NonZero::new(threads_per_root).unwrap_or(NonZeroUsize::MIN);
|
||||
|
||||
let mut files: Vec<FuzzyFileSearchResult> = Vec::new();
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
for root in roots {
|
||||
let search_dir = PathBuf::from(&root);
|
||||
let query = query.clone();
|
||||
let cancel_flag = cancellation_flag.clone();
|
||||
join_set.spawn_blocking(move || {
|
||||
match file_search::run(
|
||||
query.as_str(),
|
||||
limit_per_root,
|
||||
&search_dir,
|
||||
Vec::new(),
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(res) = join_set.join_next().await {
|
||||
match res {
|
||||
Ok(Ok((root, res))) => {
|
||||
for m in res.matches {
|
||||
let result = FuzzyFileSearchResult {
|
||||
root: root.clone(),
|
||||
path: m.path,
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
};
|
||||
files.push(result);
|
||||
}
|
||||
}
|
||||
Ok(Err((root, err))) => {
|
||||
warn!("fuzzy-file-search in dir '{root}' failed: {err}");
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("fuzzy-file-search join_next failed: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files.sort_by(file_search::cmp_by_score_desc_then_path_asc::<
|
||||
FuzzyFileSearchResult,
|
||||
_,
|
||||
_,
|
||||
>(|f| f.score, |f| f.path.as_str()));
|
||||
|
||||
files
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
|
||||
mod codex_message_processor;
|
||||
mod error_code;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod outgoing_message;
|
||||
|
||||
/// Size of the bounded channels used to communicate between tasks. The value
|
||||
/// is a balance between throughput and memory usage – 128 messages should be
|
||||
/// plenty for an interactive CLI.
|
||||
const CHANNEL_CAPACITY: usize = 128;
|
||||
|
||||
pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
) -> IoResult<()> {
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
|
||||
// Task: read from stdin, push to `incoming_tx`.
|
||||
let stdin_reader_handle = tokio::spawn({
|
||||
async move {
|
||||
let stdin = io::stdin();
|
||||
let reader = BufReader::new(stdin);
|
||||
let mut lines = reader.lines();
|
||||
|
||||
while let Some(line) = lines.next_line().await.unwrap_or_default() {
|
||||
match serde_json::from_str::<JSONRPCMessage>(&line) {
|
||||
Ok(msg) => {
|
||||
if incoming_tx.send(msg).await.is_err() {
|
||||
// Receiver gone – nothing left to do.
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => error!("Failed to deserialize JSONRPCMessage: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
debug!("stdin reader finished (EOF)");
|
||||
}
|
||||
});
|
||||
|
||||
// Parse CLI overrides once and derive the base Config eagerly so later
|
||||
// components do not need to work with raw TOML values.
|
||||
let cli_kv_overrides = cli_config_overrides.parse_overrides().map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, ConfigOverrides::default())
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
);
|
||||
async move {
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
|
||||
info!("processor task exited (channel closed)");
|
||||
}
|
||||
});
|
||||
|
||||
// Task: write outgoing messages to stdout.
|
||||
let stdout_writer_handle = tokio::spawn(async move {
|
||||
let mut stdout = io::stdout();
|
||||
while let Some(outgoing_message) = outgoing_rx.recv().await {
|
||||
let msg: JSONRPCMessage = outgoing_message.into();
|
||||
match serde_json::to_string(&msg) {
|
||||
Ok(json) => {
|
||||
if let Err(e) = stdout.write_all(json.as_bytes()).await {
|
||||
error!("Failed to write to stdout: {e}");
|
||||
break;
|
||||
}
|
||||
if let Err(e) = stdout.write_all(b"\n").await {
|
||||
error!("Failed to write newline to stdout: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => error!("Failed to serialize JSONRPCMessage: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
info!("stdout writer exited (channel closed)");
|
||||
});
|
||||
|
||||
// Wait for all tasks to finish. The typical exit path is the stdin reader
|
||||
// hitting EOF which, once it drops `incoming_tx`, propagates shutdown to
|
||||
// the processor and then to the stdout task.
|
||||
let _ = tokio::join!(stdin_reader_handle, processor_handle, stdout_writer_handle);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
use codex_app_server::run_main;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_protocol::mcp_protocol::ClientInfo;
|
||||
use codex_protocol::mcp_protocol::ClientRequest;
|
||||
use codex_protocol::mcp_protocol::InitializeResponse;
|
||||
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use mcp_types::JSONRPCError;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
initialized: bool,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
/// Create a new `MessageProcessor`, retaining a handle to the outgoing
|
||||
/// `Sender` so handlers can enqueue messages to be written to stdout.
|
||||
pub(crate) fn new(
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(config.codex_home.clone());
|
||||
let conversation_manager = Arc::new(ConversationManager::new(auth_manager.clone()));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
auth_manager,
|
||||
conversation_manager,
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
initialized: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
// Currently, we do not expect to receive any notifications from the
|
||||
// client, so we just log them.
|
||||
tracing::info!("<- notification: {:?}", notification);
|
||||
}
|
||||
|
||||
/// Handle a standalone JSON-RPC response originating from the peer.
|
||||
pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) {
|
||||
tracing::info!("<- response: {:?}", response);
|
||||
let JSONRPCResponse { id, result, .. } = response;
|
||||
self.outgoing.notify_client_response(id, result).await
|
||||
}
|
||||
|
||||
/// Handle an error object received from the peer.
|
||||
pub(crate) fn process_error(&mut self, err: JSONRPCError) {
|
||||
tracing::error!("<- error: {:?}", err);
|
||||
}
|
||||
}
|
||||
@@ -1,239 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use codex_protocol::mcp_protocol::ServerNotification;
|
||||
use mcp_types::JSONRPC_VERSION;
|
||||
use mcp_types::JSONRPCError;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use mcp_types::Result;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
|
||||
/// Sends messages to the client and manages request callbacks.
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_request_id: AtomicI64,
|
||||
sender: mpsc::UnboundedSender<OutgoingMessage>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>,
|
||||
}
|
||||
|
||||
impl OutgoingMessageSender {
|
||||
pub(crate) fn new(sender: mpsc::UnboundedSender<OutgoingMessage>) -> Self {
|
||||
Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
sender,
|
||||
request_id_to_callback: Mutex::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_request(
|
||||
&self,
|
||||
method: &str,
|
||||
params: Option<serde_json::Value>,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed));
|
||||
let outgoing_message_id = id.clone();
|
||||
let (tx_approve, rx_approve) = oneshot::channel();
|
||||
{
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.insert(id, tx_approve);
|
||||
}
|
||||
|
||||
let outgoing_message = OutgoingMessage::Request(OutgoingRequest {
|
||||
id: outgoing_message_id,
|
||||
method: method.to_string(),
|
||||
params,
|
||||
});
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
rx_approve
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) {
|
||||
let entry = {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove_entry(&id)
|
||||
};
|
||||
|
||||
match entry {
|
||||
Some((id, sender)) => {
|
||||
if let Err(err) = sender.send(result) {
|
||||
warn!("could not notify callback for {id:?} due to: {err:?}");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
warn!("could not find callback for {id:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_response<T: Serialize>(&self, id: RequestId, response: T) {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
Err(err) => {
|
||||
self.send_error(
|
||||
id,
|
||||
JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to serialize response: {err}"),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_server_notification(&self, notification: ServerNotification) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(OutgoingMessage::AppServerNotification(notification));
|
||||
}
|
||||
|
||||
/// All notifications should be migrated to [`ServerNotification`] and
|
||||
/// [`OutgoingMessage::Notification`] should be removed.
|
||||
pub(crate) async fn send_notification(&self, notification: OutgoingNotification) {
|
||||
let outgoing_message = OutgoingMessage::Notification(notification);
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
|
||||
pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
}
|
||||
|
||||
/// Outgoing message from the server to the client.
|
||||
pub(crate) enum OutgoingMessage {
|
||||
Request(OutgoingRequest),
|
||||
Notification(OutgoingNotification),
|
||||
/// AppServerNotification is specific to the case where this is run as an
|
||||
/// "app server" as opposed to an MCP server.
|
||||
AppServerNotification(ServerNotification),
|
||||
Response(OutgoingResponse),
|
||||
Error(OutgoingError),
|
||||
}
|
||||
|
||||
impl From<OutgoingMessage> for JSONRPCMessage {
|
||||
fn from(val: OutgoingMessage) -> Self {
|
||||
use OutgoingMessage::*;
|
||||
match val {
|
||||
Request(OutgoingRequest { id, method, params }) => {
|
||||
JSONRPCMessage::Request(JSONRPCRequest {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
Notification(OutgoingNotification { method, params }) => {
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
AppServerNotification(notification) => {
|
||||
let method = notification.to_string();
|
||||
let params = match notification.to_params() {
|
||||
Ok(params) => Some(params),
|
||||
Err(err) => {
|
||||
warn!("failed to serialize notification params: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
Response(OutgoingResponse { id, result }) => {
|
||||
JSONRPCMessage::Response(JSONRPCResponse {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
result,
|
||||
})
|
||||
}
|
||||
Error(OutgoingError { id, error }) => JSONRPCMessage::Error(JSONRPCError {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
error,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingResponse {
|
||||
pub id: RequestId,
|
||||
pub result: Result,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_protocol::mcp_protocol::LoginChatGptCompleteNotification;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn verify_server_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::LoginChatGptComplete(LoginChatGptCompleteNotification {
|
||||
login_id: Uuid::nil(),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification: JSONRPCMessage =
|
||||
OutgoingMessage::AppServerNotification(notification).into();
|
||||
assert_eq!(
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: "2.0".into(),
|
||||
method: "loginChatGptComplete".into(),
|
||||
params: Some(json!({
|
||||
"loginId": Uuid::nil(),
|
||||
"success": true,
|
||||
})),
|
||||
}),
|
||||
jsonrpc_notification,
|
||||
"ensure the strum macros serialize the method field correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
// Single integration test binary that aggregates all test modules.
|
||||
// The submodules live in `tests/suite/`.
|
||||
mod suite;
|
||||
@@ -1,22 +0,0 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "app_test_support"
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
wiremock = { workspace = true }
|
||||
@@ -1,17 +0,0 @@
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
|
||||
pub use mcp_process::McpProcess;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
let value = serde_json::to_value(response.result)?;
|
||||
let codex_response = serde_json::from_value(value)?;
|
||||
Ok(codex_response)
|
||||
}
|
||||
@@ -1,477 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::process::Child;
|
||||
use tokio::process::ChildStdin;
|
||||
use tokio::process::ChildStdout;
|
||||
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::ArchiveConversationParams;
|
||||
use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
||||
use codex_protocol::mcp_protocol::ClientInfo;
|
||||
use codex_protocol::mcp_protocol::ClientNotification;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||
use codex_protocol::mcp_protocol::InitializeParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||
use codex_protocol::mcp_protocol::LoginApiKeyParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::RemoveConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnParams;
|
||||
use codex_protocol::mcp_protocol::SetDefaultModelParams;
|
||||
|
||||
use mcp_types::JSONRPC_VERSION;
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
pub struct McpProcess {
|
||||
next_request_id: AtomicI64,
|
||||
/// Retain this child process until the client is dropped. The Tokio runtime
|
||||
/// will make a "best effort" to reap the process after it exits, but it is
|
||||
/// not a guarantee. See the `kill_on_drop` documentation for details.
|
||||
#[allow(dead_code)]
|
||||
process: Child,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
}
|
||||
|
||||
impl McpProcess {
|
||||
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
|
||||
Self::new_with_env(codex_home, &[]).await
|
||||
}
|
||||
|
||||
/// Creates a new MCP process, allowing tests to override or remove
|
||||
/// specific environment variables for the child process only.
|
||||
///
|
||||
/// Pass a tuple of (key, Some(value)) to set/override, or (key, None) to
|
||||
/// remove a variable from the child's environment.
|
||||
pub async fn new_with_env(
|
||||
codex_home: &Path,
|
||||
env_overrides: &[(&str, Option<&str>)],
|
||||
) -> anyhow::Result<Self> {
|
||||
// Use assert_cmd to locate the binary path and then switch to tokio::process::Command
|
||||
let std_cmd = StdCommand::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-mcp-server")?;
|
||||
|
||||
let program = std_cmd.get_program().to_owned();
|
||||
|
||||
let mut cmd = Command::new(program);
|
||||
|
||||
cmd.stdin(Stdio::piped());
|
||||
cmd.stdout(Stdio::piped());
|
||||
cmd.stderr(Stdio::piped());
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
cmd.env("RUST_LOG", "debug");
|
||||
|
||||
for (k, v) in env_overrides {
|
||||
match v {
|
||||
Some(val) => {
|
||||
cmd.env(k, val);
|
||||
}
|
||||
None => {
|
||||
cmd.env_remove(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut process = cmd
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
.context("codex-mcp-server proc should start")?;
|
||||
let stdin = process
|
||||
.stdin
|
||||
.take()
|
||||
.ok_or_else(|| anyhow::format_err!("mcp should have stdin fd"))?;
|
||||
let stdout = process
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| anyhow::format_err!("mcp should have stdout fd"))?;
|
||||
let stdout = BufReader::new(stdout);
|
||||
|
||||
// Forward child's stderr to our stderr so failures are visible even
|
||||
// when stdout/stderr are captured by the test harness.
|
||||
if let Some(stderr) = process.stderr.take() {
|
||||
let mut stderr_reader = BufReader::new(stderr).lines();
|
||||
tokio::spawn(async move {
|
||||
while let Ok(Some(line)) = stderr_reader.next_line().await {
|
||||
eprintln!("[mcp stderr] {line}");
|
||||
}
|
||||
});
|
||||
}
|
||||
Ok(Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
process,
|
||||
stdin,
|
||||
stdout,
|
||||
})
|
||||
}
|
||||
|
||||
/// Performs the initialization handshake with the MCP server.
|
||||
pub async fn initialize(&mut self) -> anyhow::Result<()> {
|
||||
let params = Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-app-server-tests".to_string(),
|
||||
title: None,
|
||||
version: "0.1.0".to_string(),
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let JSONRPCMessage::Response(response) = initialized else {
|
||||
unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}");
|
||||
};
|
||||
if response.id != RequestId::Integer(req_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
req_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a `newConversation` JSON-RPC request.
|
||||
pub async fn send_new_conversation_request(
|
||||
&mut self,
|
||||
params: NewConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("newConversation", params).await
|
||||
}
|
||||
|
||||
/// Send an `archiveConversation` JSON-RPC request.
|
||||
pub async fn send_archive_conversation_request(
|
||||
&mut self,
|
||||
params: ArchiveConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("archiveConversation", params).await
|
||||
}
|
||||
|
||||
/// Send an `addConversationListener` JSON-RPC request.
|
||||
pub async fn send_add_conversation_listener_request(
|
||||
&mut self,
|
||||
params: AddConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("addConversationListener", params).await
|
||||
}
|
||||
|
||||
/// Send a `sendUserMessage` JSON-RPC request with a single text item.
|
||||
pub async fn send_send_user_message_request(
|
||||
&mut self,
|
||||
params: SendUserMessageParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
// Wire format expects variants in camelCase; text item uses external tagging.
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("sendUserMessage", params).await
|
||||
}
|
||||
|
||||
/// Send a `removeConversationListener` JSON-RPC request.
|
||||
pub async fn send_remove_conversation_listener_request(
|
||||
&mut self,
|
||||
params: RemoveConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("removeConversationListener", params)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Send a `sendUserTurn` JSON-RPC request.
|
||||
pub async fn send_send_user_turn_request(
|
||||
&mut self,
|
||||
params: SendUserTurnParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("sendUserTurn", params).await
|
||||
}
|
||||
|
||||
/// Send a `interruptConversation` JSON-RPC request.
|
||||
pub async fn send_interrupt_conversation_request(
|
||||
&mut self,
|
||||
params: InterruptConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("interruptConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `getAuthStatus` JSON-RPC request.
|
||||
pub async fn send_get_auth_status_request(
|
||||
&mut self,
|
||||
params: GetAuthStatusParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("getAuthStatus", params).await
|
||||
}
|
||||
|
||||
/// Send a `getUserSavedConfig` JSON-RPC request.
|
||||
pub async fn send_get_user_saved_config_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("getUserSavedConfig", None).await
|
||||
}
|
||||
|
||||
/// Send a `getUserAgent` JSON-RPC request.
|
||||
pub async fn send_get_user_agent_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("getUserAgent", None).await
|
||||
}
|
||||
|
||||
/// Send a `userInfo` JSON-RPC request.
|
||||
pub async fn send_user_info_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("userInfo", None).await
|
||||
}
|
||||
|
||||
/// Send a `setDefaultModel` JSON-RPC request.
|
||||
pub async fn send_set_default_model_request(
|
||||
&mut self,
|
||||
params: SetDefaultModelParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("setDefaultModel", params).await
|
||||
}
|
||||
|
||||
/// Send a `listConversations` JSON-RPC request.
|
||||
pub async fn send_list_conversations_request(
|
||||
&mut self,
|
||||
params: ListConversationsParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `resumeConversation` JSON-RPC request.
|
||||
pub async fn send_resume_conversation_request(
|
||||
&mut self,
|
||||
params: ResumeConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("resumeConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginApiKey` JSON-RPC request.
|
||||
pub async fn send_login_api_key_request(
|
||||
&mut self,
|
||||
params: LoginApiKeyParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("loginApiKey", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginChatGpt` JSON-RPC request.
|
||||
pub async fn send_login_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
params: CancelLoginChatGptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("cancelLoginChatGpt", params).await
|
||||
}
|
||||
|
||||
/// Send a `logoutChatGpt` JSON-RPC request.
|
||||
pub async fn send_logout_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
query: &str,
|
||||
roots: Vec<String>,
|
||||
cancellation_token: Option<String>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let mut params = serde_json::json!({
|
||||
"query": query,
|
||||
"roots": roots,
|
||||
});
|
||||
if let Some(token) = cancellation_token {
|
||||
params["cancellationToken"] = serde_json::json!(token);
|
||||
}
|
||||
self.send_request("fuzzyFileSearch", Some(params)).await
|
||||
}
|
||||
|
||||
async fn send_request(
|
||||
&mut self,
|
||||
method: &str,
|
||||
params: Option<serde_json::Value>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let request_id = self.next_request_id.fetch_add(1, Ordering::Relaxed);
|
||||
|
||||
let message = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id: RequestId::Integer(request_id),
|
||||
method: method.to_string(),
|
||||
params,
|
||||
});
|
||||
self.send_jsonrpc_message(message).await?;
|
||||
Ok(request_id)
|
||||
}
|
||||
|
||||
pub async fn send_response(
|
||||
&mut self,
|
||||
id: RequestId,
|
||||
result: serde_json::Value,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Response(JSONRPCResponse {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
result,
|
||||
}))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
&mut self,
|
||||
notification: ClientNotification,
|
||||
) -> anyhow::Result<()> {
|
||||
let value = serde_json::to_value(notification)?;
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method: value
|
||||
.get("method")
|
||||
.and_then(|m| m.as_str())
|
||||
.ok_or_else(|| anyhow::format_err!("notification missing method field"))?
|
||||
.to_string(),
|
||||
params: value.get("params").cloned(),
|
||||
}))
|
||||
.await
|
||||
}
|
||||
|
||||
async fn send_jsonrpc_message(&mut self, message: JSONRPCMessage) -> anyhow::Result<()> {
|
||||
eprintln!("writing message to stdin: {message:?}");
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
self.stdin.write_all(payload.as_bytes()).await?;
|
||||
self.stdin.write_all(b"\n").await?;
|
||||
self.stdin.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn read_jsonrpc_message(&mut self) -> anyhow::Result<JSONRPCMessage> {
|
||||
let mut line = String::new();
|
||||
self.stdout.read_line(&mut line).await?;
|
||||
let message = serde_json::from_str::<JSONRPCMessage>(&line)?;
|
||||
eprintln!("read message from stdout: {message:?}");
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<JSONRPCRequest> {
|
||||
eprintln!("in read_stream_until_request_message()");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(jsonrpc_request) => {
|
||||
return Ok(jsonrpc_request);
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_response_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
eprintln!("in read_stream_until_response_message({request_id:?})");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(jsonrpc_response) => {
|
||||
if jsonrpc_response.id == request_id {
|
||||
return Ok(jsonrpc_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_error_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<mcp_types::JSONRPCError> {
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
// Keep scanning; we're waiting for an error with matching id.
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
return Ok(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_notification_message(
|
||||
&mut self,
|
||||
method: &str,
|
||||
) -> anyhow::Result<JSONRPCNotification> {
|
||||
eprintln!("in read_stream_until_notification_message({method})");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if notification.method == method {
|
||||
return Ok(notification);
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
/// Create a mock server that will provide the responses, in order, for
|
||||
/// requests to the `/v1/chat/completions` endpoint.
|
||||
pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let num_calls = responses.len();
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.expect(num_calls as u64)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
}
|
||||
|
||||
impl Respond for SeqResponder {
|
||||
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
|
||||
let call_num = self.num_calls.fetch_add(1, Ordering::SeqCst);
|
||||
match self.responses.get(call_num) {
|
||||
Some(response) => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(response.clone(), "text/event-stream"),
|
||||
None => panic!("no response for {call_num}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn create_shell_sse_response(
|
||||
command: Vec<String>,
|
||||
workdir: Option<&Path>,
|
||||
timeout_ms: Option<u64>,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments`` for the `shell` tool is a serialized JSON object.
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
"timeout": timeout_ms
|
||||
}))?;
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_final_assistant_message_sse_response(message: &str) -> anyhow::Result<String> {
|
||||
let assistant_message = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": message
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&assistant_message)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_apply_patch_sse_response(
|
||||
patch_content: &str,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// Use shell command to call apply_patch with heredoc format
|
||||
let shell_command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": ["bash", "-lc", shell_command]
|
||||
}))?;
|
||||
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
use app_test_support::McpProcess;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_sorts_and_includes_indices() {
|
||||
// Prepare a temporary Codex home and a separate root with test files.
|
||||
let codex_home = TempDir::new().expect("create temp codex home");
|
||||
let root = TempDir::new().expect("create temp search root");
|
||||
|
||||
// Create files designed to have deterministic ordering for query "abc".
|
||||
std::fs::write(root.path().join("abc"), "x").expect("write file abc");
|
||||
std::fs::write(root.path().join("abcde"), "x").expect("write file abcx");
|
||||
std::fs::write(root.path().join("abexy"), "x").expect("write file abcx");
|
||||
std::fs::write(root.path().join("zzz.txt"), "x").expect("write file zzz");
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
// Send fuzzyFileSearch request.
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("abe", vec![root_path.clone()], None)
|
||||
.await
|
||||
.expect("send fuzzyFileSearch");
|
||||
|
||||
// Read response and verify shape and ordering.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("fuzzyFileSearch timeout")
|
||||
.expect("fuzzyFileSearch resp");
|
||||
|
||||
let value = resp.result;
|
||||
assert_eq!(
|
||||
value,
|
||||
json!({
|
||||
"files": [
|
||||
{ "root": root_path.clone(), "path": "abexy", "score": 88, "indices": [0, 1, 2] },
|
||||
{ "root": root_path.clone(), "path": "abcde", "score": 74, "indices": [0, 1, 4] },
|
||||
]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_accepts_cancellation_token() {
|
||||
let codex_home = TempDir::new().expect("create temp codex home");
|
||||
let root = TempDir::new().expect("create temp search root");
|
||||
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents").expect("write alpha");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("alp", vec![root_path.clone()], None)
|
||||
.await
|
||||
.expect("send fuzzyFileSearch");
|
||||
|
||||
let request_id_2 = mcp
|
||||
.send_fuzzy_file_search_request(
|
||||
"alp",
|
||||
vec![root_path.clone()],
|
||||
Some(request_id.to_string()),
|
||||
)
|
||||
.await
|
||||
.expect("send fuzzyFileSearch");
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id_2)),
|
||||
)
|
||||
.await
|
||||
.expect("fuzzyFileSearch timeout")
|
||||
.expect("fuzzyFileSearch resp");
|
||||
|
||||
let files = resp
|
||||
.result
|
||||
.get("files")
|
||||
.and_then(|value| value.as_array())
|
||||
.cloned()
|
||||
.expect("files array");
|
||||
|
||||
assert_eq!(files.len(), 1);
|
||||
assert_eq!(files[0]["root"], root_path);
|
||||
assert_eq!(files[0]["path"], "alpha.txt");
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
mod archive_conversation;
|
||||
mod auth;
|
||||
mod codex_message_processor_flow;
|
||||
mod config;
|
||||
mod create_conversation;
|
||||
mod fuzzy_file_search;
|
||||
mod interrupt;
|
||||
mod list_resume;
|
||||
mod login;
|
||||
mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
mod user_info;
|
||||
@@ -12,7 +12,5 @@ anyhow = "1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
tokio = { version = "1", features = ["macros", "rt"] }
|
||||
codex-backend-openapi-models = { path = "../codex-backend-openapi-models" }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1"
|
||||
|
||||
@@ -12,10 +12,8 @@ use serde::de::DeserializeOwned;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum PathStyle {
|
||||
/// /api/codex/…
|
||||
CodexApi,
|
||||
/// /wham/…
|
||||
ChatGptApi,
|
||||
CodexApi, // /api/codex/...
|
||||
ChatGptApi, // /wham/...
|
||||
}
|
||||
|
||||
impl PathStyle {
|
||||
|
||||
@@ -1,257 +1,13 @@
|
||||
pub use codex_backend_openapi_models::models::CodeTaskDetailsResponse;
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::TaskListItem;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::de::Deserializer;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Hand-rolled models for the Cloud Tasks task-details response.
|
||||
/// The generated OpenAPI models are pretty bad. This is a half-step
|
||||
/// towards hand-rolling them.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct CodeTaskDetailsResponse {
|
||||
#[serde(default)]
|
||||
pub current_user_turn: Option<Turn>,
|
||||
#[serde(default)]
|
||||
pub current_assistant_turn: Option<Turn>,
|
||||
#[serde(default)]
|
||||
pub current_diff_task_turn: Option<Turn>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Turn {
|
||||
#[serde(default)]
|
||||
pub id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub attempt_placement: Option<i64>,
|
||||
#[serde(default, rename = "turn_status")]
|
||||
pub turn_status: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub input_items: Vec<TurnItem>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub output_items: Vec<TurnItem>,
|
||||
#[serde(default)]
|
||||
pub worklog: Option<Worklog>,
|
||||
#[serde(default)]
|
||||
pub error: Option<TurnError>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct TurnItem {
|
||||
#[serde(rename = "type", default)]
|
||||
pub kind: String,
|
||||
#[serde(default)]
|
||||
pub role: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub content: Vec<ContentFragment>,
|
||||
#[serde(default)]
|
||||
pub diff: Option<String>,
|
||||
#[serde(default)]
|
||||
pub output_diff: Option<DiffPayload>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ContentFragment {
|
||||
Structured(StructuredContent),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct StructuredContent {
|
||||
#[serde(rename = "content_type", default)]
|
||||
pub content_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub text: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct DiffPayload {
|
||||
#[serde(default)]
|
||||
pub diff: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Worklog {
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub messages: Vec<WorklogMessage>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct WorklogMessage {
|
||||
#[serde(default)]
|
||||
pub author: Option<Author>,
|
||||
#[serde(default)]
|
||||
pub content: Option<WorklogContent>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Author {
|
||||
#[serde(default)]
|
||||
pub role: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct WorklogContent {
|
||||
#[serde(default)]
|
||||
pub parts: Vec<ContentFragment>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct TurnError {
|
||||
#[serde(default)]
|
||||
pub code: Option<String>,
|
||||
#[serde(default)]
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
impl ContentFragment {
|
||||
fn text(&self) -> Option<&str> {
|
||||
match self {
|
||||
ContentFragment::Structured(inner) => {
|
||||
if inner
|
||||
.content_type
|
||||
.as_deref()
|
||||
.map(|ct| ct.eq_ignore_ascii_case("text"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
inner.text.as_deref().filter(|s| !s.is_empty())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ContentFragment::Text(raw) => {
|
||||
if raw.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(raw.as_str())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TurnItem {
|
||||
fn text_values(&self) -> Vec<String> {
|
||||
self.content
|
||||
.iter()
|
||||
.filter_map(|fragment| fragment.text().map(str::to_string))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn diff_text(&self) -> Option<String> {
|
||||
if self.kind == "output_diff" {
|
||||
if let Some(diff) = &self.diff
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.clone());
|
||||
}
|
||||
} else if self.kind == "pr"
|
||||
&& let Some(payload) = &self.output_diff
|
||||
&& let Some(diff) = &payload.diff
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl Turn {
|
||||
fn unified_diff(&self) -> Option<String> {
|
||||
self.output_items.iter().find_map(TurnItem::diff_text)
|
||||
}
|
||||
|
||||
fn message_texts(&self) -> Vec<String> {
|
||||
let mut out: Vec<String> = self
|
||||
.output_items
|
||||
.iter()
|
||||
.filter(|item| item.kind == "message")
|
||||
.flat_map(TurnItem::text_values)
|
||||
.collect();
|
||||
|
||||
if let Some(log) = &self.worklog {
|
||||
for message in &log.messages {
|
||||
if message.is_assistant() {
|
||||
out.extend(message.text_values());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
fn user_prompt(&self) -> Option<String> {
|
||||
let parts: Vec<String> = self
|
||||
.input_items
|
||||
.iter()
|
||||
.filter(|item| item.kind == "message")
|
||||
.filter(|item| {
|
||||
item.role
|
||||
.as_deref()
|
||||
.map(|r| r.eq_ignore_ascii_case("user"))
|
||||
.unwrap_or(true)
|
||||
})
|
||||
.flat_map(TurnItem::text_values)
|
||||
.collect();
|
||||
|
||||
if parts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(parts.join(
|
||||
"
|
||||
|
||||
",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn error_summary(&self) -> Option<String> {
|
||||
self.error.as_ref().and_then(TurnError::summary)
|
||||
}
|
||||
}
|
||||
|
||||
impl WorklogMessage {
|
||||
fn is_assistant(&self) -> bool {
|
||||
self.author
|
||||
.as_ref()
|
||||
.and_then(|a| a.role.as_deref())
|
||||
.map(|role| role.eq_ignore_ascii_case("assistant"))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn text_values(&self) -> Vec<String> {
|
||||
self.content
|
||||
.as_ref()
|
||||
.map(|content| {
|
||||
content
|
||||
.parts
|
||||
.iter()
|
||||
.filter_map(|fragment| fragment.text().map(str::to_string))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
impl TurnError {
|
||||
fn summary(&self) -> Option<String> {
|
||||
let code = self.code.as_deref().unwrap_or("");
|
||||
let message = self.message.as_deref().unwrap_or("");
|
||||
match (code.is_empty(), message.is_empty()) {
|
||||
(true, true) => None,
|
||||
(false, true) => Some(code.to_string()),
|
||||
(true, false) => Some(message.to_string()),
|
||||
(false, false) => Some(format!("{code}: {message}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension helpers on generated types.
|
||||
pub trait CodeTaskDetailsResponseExt {
|
||||
/// Attempt to extract a unified diff string from the assistant or diff turn.
|
||||
/// Attempt to extract a unified diff string from `current_diff_task_turn`.
|
||||
fn unified_diff(&self) -> Option<String>;
|
||||
/// Extract assistant text output messages (no diff) from current turns.
|
||||
fn assistant_text_messages(&self) -> Vec<String>;
|
||||
@@ -260,110 +16,126 @@ pub trait CodeTaskDetailsResponseExt {
|
||||
/// Extract an assistant error message (if the turn failed and provided one).
|
||||
fn assistant_error_message(&self) -> Option<String>;
|
||||
}
|
||||
|
||||
impl CodeTaskDetailsResponseExt for CodeTaskDetailsResponse {
|
||||
fn unified_diff(&self) -> Option<String> {
|
||||
[
|
||||
self.current_diff_task_turn.as_ref(),
|
||||
self.current_assistant_turn.as_ref(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.find_map(Turn::unified_diff)
|
||||
}
|
||||
// `current_diff_task_turn` is an object; look for `output_items`.
|
||||
// Prefer explicit diff turn; fallback to assistant turn if needed.
|
||||
let candidates: [&Option<std::collections::HashMap<String, Value>>; 2] =
|
||||
[&self.current_diff_task_turn, &self.current_assistant_turn];
|
||||
|
||||
for map in candidates {
|
||||
let items = map
|
||||
.as_ref()
|
||||
.and_then(|m| m.get("output_items"))
|
||||
.and_then(|v| v.as_array());
|
||||
if let Some(items) = items {
|
||||
for item in items {
|
||||
match item.get("type").and_then(Value::as_str) {
|
||||
Some("output_diff") => {
|
||||
if let Some(s) = item.get("diff").and_then(Value::as_str) {
|
||||
return Some(s.to_string());
|
||||
}
|
||||
}
|
||||
Some("pr") => {
|
||||
if let Some(s) = item
|
||||
.get("output_diff")
|
||||
.and_then(|od| od.get("diff"))
|
||||
.and_then(Value::as_str)
|
||||
{
|
||||
return Some(s.to_string());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
fn assistant_text_messages(&self) -> Vec<String> {
|
||||
let mut out = Vec::new();
|
||||
for turn in [
|
||||
self.current_diff_task_turn.as_ref(),
|
||||
self.current_assistant_turn.as_ref(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
{
|
||||
out.extend(turn.message_texts());
|
||||
let candidates: [&Option<std::collections::HashMap<String, Value>>; 2] =
|
||||
[&self.current_diff_task_turn, &self.current_assistant_turn];
|
||||
for map in candidates {
|
||||
let items = map
|
||||
.as_ref()
|
||||
.and_then(|m| m.get("output_items"))
|
||||
.and_then(|v| v.as_array());
|
||||
if let Some(items) = items {
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) == Some("message")
|
||||
&& let Some(content) = item.get("content").and_then(Value::as_array)
|
||||
{
|
||||
for part in content {
|
||||
if part.get("content_type").and_then(Value::as_str) == Some("text")
|
||||
&& let Some(txt) = part.get("text").and_then(Value::as_str)
|
||||
{
|
||||
out.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn user_text_prompt(&self) -> Option<String> {
|
||||
self.current_user_turn.as_ref().and_then(Turn::user_prompt)
|
||||
use serde_json::Value;
|
||||
let map = self.current_user_turn.as_ref()?;
|
||||
let items = map.get("input_items").and_then(Value::as_array)?;
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) == Some("message") {
|
||||
// optional role filter (prefer user)
|
||||
let is_user = item
|
||||
.get("role")
|
||||
.and_then(Value::as_str)
|
||||
.map(|r| r.eq_ignore_ascii_case("user"))
|
||||
.unwrap_or(true);
|
||||
if !is_user {
|
||||
continue;
|
||||
}
|
||||
if let Some(content) = item.get("content").and_then(Value::as_array) {
|
||||
for c in content {
|
||||
if c.get("content_type").and_then(Value::as_str) == Some("text")
|
||||
&& let Some(txt) = c.get("text").and_then(Value::as_str)
|
||||
{
|
||||
parts.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if parts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(parts.join("\n\n"))
|
||||
}
|
||||
}
|
||||
|
||||
fn assistant_error_message(&self) -> Option<String> {
|
||||
self.current_assistant_turn
|
||||
.as_ref()
|
||||
.and_then(Turn::error_summary)
|
||||
let map = self.current_assistant_turn.as_ref()?;
|
||||
let err = map.get("error")?.as_object()?;
|
||||
let message = err.get("message").and_then(Value::as_str).unwrap_or("");
|
||||
let code = err.get("code").and_then(Value::as_str).unwrap_or("");
|
||||
if message.is_empty() && code.is_empty() {
|
||||
None
|
||||
} else if message.is_empty() {
|
||||
Some(code.to_string())
|
||||
} else if code.is_empty() {
|
||||
Some(message.to_string())
|
||||
} else {
|
||||
Some(format!("{code}: {message}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_vec<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
T: Deserialize<'de>,
|
||||
{
|
||||
Option::<Vec<T>>::deserialize(deserializer).map(|opt| opt.unwrap_or_default())
|
||||
}
|
||||
// Removed unused helpers `single_file_paths` and `extract_file_paths_list` to reduce
|
||||
// surface area; reintroduce as needed near call sites.
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct TurnAttemptsSiblingTurnsResponse {
|
||||
#[serde(default)]
|
||||
pub sibling_turns: Vec<HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn fixture(name: &str) -> CodeTaskDetailsResponse {
|
||||
let json = match name {
|
||||
"diff" => include_str!("../tests/fixtures/task_details_with_diff.json"),
|
||||
"error" => include_str!("../tests/fixtures/task_details_with_error.json"),
|
||||
other => panic!("unknown fixture {other}"),
|
||||
};
|
||||
serde_json::from_str(json).expect("fixture should deserialize")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unified_diff_prefers_current_diff_task_turn() {
|
||||
let details = fixture("diff");
|
||||
let diff = details.unified_diff().expect("diff present");
|
||||
assert!(diff.contains("diff --git"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unified_diff_falls_back_to_pr_output_diff() {
|
||||
let details = fixture("error");
|
||||
let diff = details.unified_diff().expect("diff from pr output");
|
||||
assert!(diff.contains("lib.rs"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assistant_text_messages_extracts_text_content() {
|
||||
let details = fixture("diff");
|
||||
let messages = details.assistant_text_messages();
|
||||
assert_eq!(messages, vec!["Assistant response".to_string()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn user_text_prompt_joins_parts_with_spacing() {
|
||||
let details = fixture("diff");
|
||||
let prompt = details.user_text_prompt().expect("prompt present");
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"First line
|
||||
|
||||
Second line"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assistant_error_message_combines_code_and_message() {
|
||||
let details = fixture("error");
|
||||
let msg = details
|
||||
.assistant_error_message()
|
||||
.expect("error should be present");
|
||||
assert_eq!(msg, "APPLY_FAILED: Patch could not be applied");
|
||||
}
|
||||
pub sibling_turns: Vec<std::collections::HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"task": {
|
||||
"id": "task_123",
|
||||
"title": "Refactor cloud task client",
|
||||
"archived": false,
|
||||
"external_pull_requests": []
|
||||
},
|
||||
"current_user_turn": {
|
||||
"input_items": [
|
||||
{
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [
|
||||
{ "content_type": "text", "text": "First line" },
|
||||
{ "content_type": "text", "text": "Second line" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"current_assistant_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "message",
|
||||
"content": [
|
||||
{ "content_type": "text", "text": "Assistant response" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"current_diff_task_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "output_diff",
|
||||
"diff": "diff --git a/src/main.rs b/src/main.rs\n+fn main() { println!(\"hi\"); }\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"task": {
|
||||
"id": "task_456",
|
||||
"title": "Investigate failure",
|
||||
"archived": false,
|
||||
"external_pull_requests": []
|
||||
},
|
||||
"current_assistant_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "pr",
|
||||
"output_diff": {
|
||||
"diff": "diff --git a/lib.rs b/lib.rs\n+pub fn hello() {}\n"
|
||||
}
|
||||
}
|
||||
],
|
||||
"error": {
|
||||
"code": "APPLY_FAILED",
|
||||
"message": "Patch could not be applied"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,6 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
clap_complete = { workspace = true }
|
||||
codex-app-server = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-chatgpt = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
@@ -26,7 +25,6 @@ codex-core = { workspace = true }
|
||||
codex-exec = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
@@ -46,6 +44,15 @@ tokio = { workspace = true, features = [
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "android")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
|
||||
@@ -6,7 +6,6 @@ use codex_core::auth::logout;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::ServerOptions;
|
||||
use codex_login::run_device_code_login;
|
||||
use codex_login::run_login_server;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use std::path::PathBuf;
|
||||
@@ -56,32 +55,6 @@ pub async fn run_login_with_api_key(
|
||||
}
|
||||
}
|
||||
|
||||
/// Login using the OAuth device code flow.
|
||||
pub async fn run_login_with_device_code(
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
issuer_base_url: Option<String>,
|
||||
client_id: Option<String>,
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
let mut opts = ServerOptions::new(
|
||||
config.codex_home,
|
||||
client_id.unwrap_or(CLIENT_ID.to_string()),
|
||||
);
|
||||
if let Some(iss) = issuer_base_url {
|
||||
opts.issuer = iss;
|
||||
}
|
||||
match run_device_code_login(opts).await {
|
||||
Ok(()) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error logging in with device code: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use anyhow::Context;
|
||||
use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
@@ -10,7 +11,6 @@ use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::login::run_login_with_device_code;
|
||||
use codex_cli::login::run_logout;
|
||||
use codex_cli::proto;
|
||||
use codex_cloud_tasks::Cli as CloudTasksCli;
|
||||
@@ -24,6 +24,7 @@ use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
|
||||
mod mcp_cmd;
|
||||
mod pre_main_hardening;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use crate::proto::ProtoCli;
|
||||
@@ -68,12 +69,6 @@ enum Subcommand {
|
||||
/// [experimental] Run Codex as an MCP server and manage MCP servers.
|
||||
Mcp(McpCli),
|
||||
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server.
|
||||
AppServer,
|
||||
|
||||
/// Run the Protocol stream via stdin/stdout
|
||||
#[clap(visible_alias = "p")]
|
||||
Proto(ProtoCli),
|
||||
@@ -94,7 +89,8 @@ enum Subcommand {
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
/// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally.
|
||||
|
||||
/// Browse and apply tasks from the cloud.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
|
||||
@@ -148,20 +144,6 @@ struct LoginCommand {
|
||||
#[arg(long = "api-key", value_name = "API_KEY")]
|
||||
api_key: Option<String>,
|
||||
|
||||
/// EXPERIMENTAL: Use device code flow (not yet supported)
|
||||
/// This feature is experimental and may changed in future releases.
|
||||
#[arg(long = "experimental_use-device-code", hide = true)]
|
||||
use_device_code: bool,
|
||||
|
||||
/// EXPERIMENTAL: Use custom OAuth issuer base URL (advanced)
|
||||
/// Override the OAuth issuer base URL (advanced)
|
||||
#[arg(long = "experimental_issuer", value_name = "URL", hide = true)]
|
||||
issuer_base_url: Option<String>,
|
||||
|
||||
/// EXPERIMENTAL: Use custom OAuth client ID (advanced)
|
||||
#[arg(long = "experimental_client-id", value_name = "CLIENT_ID", hide = true)]
|
||||
client_id: Option<String>,
|
||||
|
||||
#[command(subcommand)]
|
||||
action: Option<LoginSubcommand>,
|
||||
}
|
||||
@@ -211,7 +193,7 @@ fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<Stri
|
||||
} else {
|
||||
resume_cmd
|
||||
};
|
||||
lines.push(format!("To continue this session, run {command}"));
|
||||
lines.push(format!("To continue this session, run {command}."));
|
||||
}
|
||||
|
||||
lines
|
||||
@@ -236,7 +218,14 @@ fn pre_main_hardening() {
|
||||
};
|
||||
|
||||
if secure_mode == "1" {
|
||||
codex_process_hardening::pre_main_hardening();
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
crate::pre_main_hardening::pre_main_hardening_linux();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
crate::pre_main_hardening::pre_main_hardening_macos();
|
||||
|
||||
#[cfg(windows)]
|
||||
crate::pre_main_hardening::pre_main_hardening_windows();
|
||||
}
|
||||
|
||||
// Always clear this env var so child processes don't inherit it.
|
||||
@@ -275,16 +264,10 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
);
|
||||
codex_exec::run_main(exec_cli, codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::McpServer) => {
|
||||
codex_mcp_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::Mcp(mut mcp_cli)) => {
|
||||
// Propagate any root-level config overrides (e.g. `-c key=value`).
|
||||
prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone());
|
||||
mcp_cli.run().await?;
|
||||
}
|
||||
Some(Subcommand::AppServer) => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
mcp_cli.run(codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Resume(ResumeCommand {
|
||||
session_id,
|
||||
@@ -310,14 +293,7 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
run_login_status(login_cli.config_overrides).await;
|
||||
}
|
||||
None => {
|
||||
if login_cli.use_device_code {
|
||||
run_login_with_device_code(
|
||||
login_cli.config_overrides,
|
||||
login_cli.issuer_base_url,
|
||||
login_cli.client_id,
|
||||
)
|
||||
.await;
|
||||
} else if let Some(api_key) = login_cli.api_key {
|
||||
if let Some(api_key) = login_cli.api_key {
|
||||
run_login_with_api_key(login_cli.config_overrides, api_key).await;
|
||||
} else {
|
||||
run_login_with_chatgpt(login_cli.config_overrides).await;
|
||||
@@ -380,13 +356,14 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
);
|
||||
run_apply_command(apply_cli, None).await?;
|
||||
}
|
||||
Some(Subcommand::ResponsesApiProxy(args)) => {
|
||||
tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::ResponsesApiProxy(args)) => {
|
||||
tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
|
||||
.await
|
||||
.context("responses-api-proxy blocking task panicked")??;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -535,7 +512,7 @@ mod tests {
|
||||
lines,
|
||||
vec![
|
||||
"Token usage: total=2 input=0 output=2".to_string(),
|
||||
"To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000"
|
||||
"To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000."
|
||||
.to_string(),
|
||||
]
|
||||
);
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
@@ -11,7 +13,6 @@ use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
|
||||
/// [experimental] Launch Codex as an MCP server or manage configured MCP servers.
|
||||
///
|
||||
@@ -27,11 +28,14 @@ pub struct McpCli {
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub subcommand: McpSubcommand,
|
||||
pub cmd: Option<McpSubcommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum McpSubcommand {
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
Serve,
|
||||
|
||||
/// [experimental] List configured MCP servers.
|
||||
List(ListArgs),
|
||||
|
||||
@@ -83,13 +87,17 @@ pub struct RemoveArgs {
|
||||
}
|
||||
|
||||
impl McpCli {
|
||||
pub async fn run(self) -> Result<()> {
|
||||
pub async fn run(self, codex_linux_sandbox_exe: Option<PathBuf>) -> Result<()> {
|
||||
let McpCli {
|
||||
config_overrides,
|
||||
subcommand,
|
||||
cmd,
|
||||
} = self;
|
||||
let subcommand = cmd.unwrap_or(McpSubcommand::Serve);
|
||||
|
||||
match subcommand {
|
||||
McpSubcommand::Serve => {
|
||||
codex_mcp_server::run_main(codex_linux_sandbox_exe, config_overrides).await?;
|
||||
}
|
||||
McpSubcommand::List(args) => {
|
||||
run_list(&config_overrides, args)?;
|
||||
}
|
||||
@@ -137,11 +145,9 @@ fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<(
|
||||
.with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?;
|
||||
|
||||
let new_entry = McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
},
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
};
|
||||
@@ -195,25 +201,16 @@ fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Resul
|
||||
let json_entries: Vec<_> = entries
|
||||
.into_iter()
|
||||
.map(|(name, cfg)| {
|
||||
let transport = match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let env = cfg.env.as_ref().map(|env| {
|
||||
env.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
});
|
||||
serde_json::json!({
|
||||
"name": name,
|
||||
"transport": transport,
|
||||
"command": cfg.command,
|
||||
"args": cfg.args,
|
||||
"env": env,
|
||||
"startup_timeout_sec": cfg
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
@@ -233,111 +230,62 @@ fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Resul
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut stdio_rows: Vec<[String; 4]> = Vec::new();
|
||||
let mut http_rows: Vec<[String; 3]> = Vec::new();
|
||||
|
||||
let mut rows: Vec<[String; 4]> = Vec::new();
|
||||
for (name, cfg) in entries {
|
||||
match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
stdio_rows.push([name.clone(), command.clone(), args_display, env_display]);
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
let has_bearer = if bearer_token.is_some() {
|
||||
"True"
|
||||
} else {
|
||||
"False"
|
||||
};
|
||||
http_rows.push([name.clone(), url.clone(), has_bearer.into()]);
|
||||
let args = if cfg.args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
cfg.args.join(" ")
|
||||
};
|
||||
|
||||
let env = match cfg.env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
|
||||
rows.push([name.clone(), cfg.command.clone(), args, env]);
|
||||
}
|
||||
|
||||
let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()];
|
||||
for row in &rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()];
|
||||
for row in &stdio_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
"Name",
|
||||
"Command",
|
||||
"Args",
|
||||
"Env",
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
|
||||
for row in rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
"Name",
|
||||
"Command",
|
||||
"Args",
|
||||
"Env",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
row[3],
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
|
||||
for row in &stdio_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
row[3],
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() && !http_rows.is_empty() {
|
||||
println!();
|
||||
}
|
||||
|
||||
if !http_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Url".len(), "Has Bearer Token".len()];
|
||||
for row in &http_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
"Name",
|
||||
"Url",
|
||||
"Has Bearer Token",
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
);
|
||||
|
||||
for row in &http_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -353,22 +301,16 @@ fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<(
|
||||
};
|
||||
|
||||
if get_args.json {
|
||||
let transport = match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
}),
|
||||
};
|
||||
let env = server.env.as_ref().map(|env| {
|
||||
env.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
});
|
||||
let output = serde_json::to_string_pretty(&serde_json::json!({
|
||||
"name": get_args.name,
|
||||
"transport": transport,
|
||||
"command": server.command,
|
||||
"args": server.args,
|
||||
"env": env,
|
||||
"startup_timeout_sec": server
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
@@ -381,38 +323,27 @@ fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<(
|
||||
}
|
||||
|
||||
println!("{}", get_args.name);
|
||||
match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
println!(" transport: stdio");
|
||||
println!(" command: {command}");
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
println!(" args: {args_display}");
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
println!(" env: {env_display}");
|
||||
println!(" command: {}", server.command);
|
||||
let args = if server.args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
server.args.join(" ")
|
||||
};
|
||||
println!(" args: {args}");
|
||||
let env_display = match server.env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let bearer = bearer_token.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token: {bearer}");
|
||||
}
|
||||
}
|
||||
};
|
||||
println!(" env: {env_display}");
|
||||
if let Some(timeout) = server.startup_timeout_sec {
|
||||
println!(" startup_timeout_sec: {}", timeout.as_secs_f64());
|
||||
}
|
||||
|
||||
@@ -1,19 +1,3 @@
|
||||
/// This is designed to be called pre-main() (using `#[ctor::ctor]`) to perform
|
||||
/// various process hardening steps, such as
|
||||
/// - disabling core dumps
|
||||
/// - disabling ptrace attach on Linux and macOS.
|
||||
/// - removing dangerous environment variables such as LD_PRELOAD and DYLD_*
|
||||
pub fn pre_main_hardening() {
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
pre_main_hardening_linux();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pre_main_hardening_macos();
|
||||
|
||||
#[cfg(windows)]
|
||||
pre_main_hardening_windows();
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
const PRCTL_FAILED_EXIT_CODE: i32 = 5;
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
@@ -27,14 +26,9 @@ fn add_and_remove_server_updates_global_config() -> Result<()> {
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
assert_eq!(servers.len(), 1);
|
||||
let docs = servers.get("docs").expect("server should exist");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "echo");
|
||||
assert_eq!(args, &vec!["hello".to_string()]);
|
||||
assert!(env.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert_eq!(docs.command, "echo");
|
||||
assert_eq!(docs.args, vec!["hello".to_string()]);
|
||||
assert!(docs.env.is_none());
|
||||
|
||||
let mut remove_cmd = codex_command(codex_home.path())?;
|
||||
remove_cmd
|
||||
@@ -82,10 +76,7 @@ fn add_with_env_preserves_key_order_and_values() -> Result<()> {
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
let envy = servers.get("envy").expect("server should exist");
|
||||
let env = match &envy.transport {
|
||||
McpServerTransportConfig::Stdio { env: Some(env), .. } => env,
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
};
|
||||
let env = envy.env.as_ref().expect("env should be present");
|
||||
|
||||
assert_eq!(env.len(), 2);
|
||||
assert_eq!(env.get("FOO"), Some(&"bar".to_string()));
|
||||
|
||||
@@ -4,7 +4,6 @@ use anyhow::Result;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value as JsonValue;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
@@ -59,35 +58,38 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(json_output.status.success());
|
||||
let stdout = String::from_utf8(json_output.stdout)?;
|
||||
let parsed: JsonValue = serde_json::from_str(&stdout)?;
|
||||
let array = parsed.as_array().expect("expected array");
|
||||
assert_eq!(array.len(), 1);
|
||||
let entry = &array[0];
|
||||
assert_eq!(entry.get("name"), Some(&JsonValue::String("docs".into())));
|
||||
assert_eq!(
|
||||
parsed,
|
||||
json!([
|
||||
{
|
||||
"name": "docs",
|
||||
"transport": {
|
||||
"type": "stdio",
|
||||
"command": "docs-server",
|
||||
"args": [
|
||||
"--port",
|
||||
"4000"
|
||||
],
|
||||
"env": {
|
||||
"TOKEN": "secret"
|
||||
}
|
||||
},
|
||||
"startup_timeout_sec": null,
|
||||
"tool_timeout_sec": null
|
||||
}
|
||||
]
|
||||
)
|
||||
entry.get("command"),
|
||||
Some(&JsonValue::String("docs-server".into()))
|
||||
);
|
||||
|
||||
let args = entry
|
||||
.get("args")
|
||||
.and_then(|v| v.as_array())
|
||||
.expect("args array");
|
||||
assert_eq!(
|
||||
args,
|
||||
&vec![
|
||||
JsonValue::String("--port".into()),
|
||||
JsonValue::String("4000".into())
|
||||
]
|
||||
);
|
||||
|
||||
let env = entry
|
||||
.get("env")
|
||||
.and_then(|v| v.as_object())
|
||||
.expect("env map");
|
||||
assert_eq!(env.get("TOKEN"), Some(&JsonValue::String("secret".into())));
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
assert!(get_output.status.success());
|
||||
let stdout = String::from_utf8(get_output.stdout)?;
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("transport: stdio"));
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
|
||||
@@ -12,7 +12,7 @@ workspace = true
|
||||
|
||||
[features]
|
||||
default = ["online"]
|
||||
online = ["dep:codex-backend-client"]
|
||||
online = ["dep:reqwest", "dep:tokio", "dep:codex-backend-client"]
|
||||
mock = []
|
||||
|
||||
[dependencies]
|
||||
@@ -20,8 +20,11 @@ anyhow = "1"
|
||||
async-trait = "0.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
reqwest = { version = "0.12", features = ["json"], optional = true }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"], optional = true }
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
dirs = { workspace = true }
|
||||
|
||||
@@ -94,6 +94,32 @@ pub struct CreatedTask {
|
||||
pub id: TaskId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentKind {
|
||||
File,
|
||||
Image,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentReference {
|
||||
pub sediment_id: String,
|
||||
pub asset_pointer: String,
|
||||
pub path: Option<String>,
|
||||
pub display_name: Option<String>,
|
||||
pub kind: AttachmentKind,
|
||||
pub size_bytes: Option<u64>,
|
||||
pub width: Option<u32>,
|
||||
pub height: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct FileServiceConfig {
|
||||
pub base_url: String,
|
||||
pub bearer_token: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct DiffSummary {
|
||||
pub files_changed: usize,
|
||||
@@ -153,6 +179,10 @@ pub trait CloudBackend: Send + Sync {
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
attachments: &[AttachmentReference],
|
||||
) -> Result<CreatedTask>;
|
||||
|
||||
fn file_service_config(&self) -> Option<FileServiceConfig> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,11 +2,14 @@ mod api;
|
||||
|
||||
pub use api::ApplyOutcome;
|
||||
pub use api::ApplyStatus;
|
||||
pub use api::AttachmentKind;
|
||||
pub use api::AttachmentReference;
|
||||
pub use api::AttemptStatus;
|
||||
pub use api::CloudBackend;
|
||||
pub use api::CloudTaskError;
|
||||
pub use api::CreatedTask;
|
||||
pub use api::DiffSummary;
|
||||
pub use api::FileServiceConfig;
|
||||
pub use api::Result;
|
||||
pub use api::TaskId;
|
||||
pub use api::TaskStatus;
|
||||
|
||||
@@ -129,9 +129,9 @@ impl CloudBackend for MockClient {
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
attachments: &[crate::AttachmentReference],
|
||||
) -> Result<crate::CreatedTask> {
|
||||
let _ = (env_id, prompt, git_ref, qa_mode, best_of_n);
|
||||
let _ = (env_id, prompt, git_ref, qa_mode, attachments);
|
||||
let id = format!("task_local_{}", chrono::Utc::now().timestamp_millis());
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ workspace = true
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
tokio = { version = "1", features = ["fs", "macros", "rt-multi-thread"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = ["mock", "online"] }
|
||||
@@ -24,6 +24,7 @@ tokio-stream = "0.1.17"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
codex-login = { path = "../login" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-backend-client = { path = "../backend-client" }
|
||||
throbber-widgets-tui = "0.8.0"
|
||||
base64 = "0.22"
|
||||
serde_json = "1"
|
||||
@@ -31,6 +32,23 @@ reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
unicode-width = "0.1"
|
||||
codex-tui = { path = "../tui" }
|
||||
codex-file-search = { path = "../file-search" }
|
||||
mime_guess = "2"
|
||||
url = "2"
|
||||
image = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
async-trait = "0.1"
|
||||
tempfile = "3"
|
||||
|
||||
[[bin]]
|
||||
name = "conncheck"
|
||||
path = "src/bin/conncheck.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "newtask"
|
||||
path = "src/bin/newtask.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "envcheck"
|
||||
path = "src/bin/envcheck.rs"
|
||||
|
||||
@@ -7,6 +7,7 @@ pub struct EnvironmentRow {
|
||||
pub label: Option<String>,
|
||||
pub is_pinned: bool,
|
||||
pub repo_hints: Option<String>, // e.g., "openai/codex"
|
||||
pub default_branch: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@@ -15,11 +16,6 @@ pub struct EnvModalState {
|
||||
pub selected: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct BestOfModalState {
|
||||
pub selected: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Copy, PartialEq, Eq)]
|
||||
pub enum ApplyResultLevel {
|
||||
Success,
|
||||
@@ -57,14 +53,12 @@ pub struct App {
|
||||
pub env_filter: Option<String>,
|
||||
pub env_modal: Option<EnvModalState>,
|
||||
pub apply_modal: Option<ApplyModalState>,
|
||||
pub best_of_modal: Option<BestOfModalState>,
|
||||
pub environments: Vec<EnvironmentRow>,
|
||||
pub env_last_loaded: Option<std::time::Instant>,
|
||||
pub env_loading: bool,
|
||||
pub env_error: Option<String>,
|
||||
// New Task page
|
||||
pub new_task: Option<crate::new_task::NewTaskPage>,
|
||||
pub best_of_n: usize,
|
||||
// Apply preflight spinner state
|
||||
pub apply_preflight_inflight: bool,
|
||||
// Apply action spinner state
|
||||
@@ -88,13 +82,11 @@ impl App {
|
||||
env_filter: None,
|
||||
env_modal: None,
|
||||
apply_modal: None,
|
||||
best_of_modal: None,
|
||||
environments: Vec::new(),
|
||||
env_last_loaded: None,
|
||||
env_loading: false,
|
||||
env_error: None,
|
||||
new_task: None,
|
||||
best_of_n: 1,
|
||||
apply_preflight_inflight: false,
|
||||
apply_inflight: false,
|
||||
list_generation: 0,
|
||||
@@ -449,7 +441,7 @@ mod tests {
|
||||
_prompt: &str,
|
||||
_git_ref: &str,
|
||||
_qa_mode: bool,
|
||||
_best_of_n: usize,
|
||||
_attachments: &[codex_cloud_tasks_client::AttachmentReference],
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::CreatedTask> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
|
||||
226
codex-rs/cloud-tasks/src/attachments/mod.rs
Normal file
226
codex-rs/cloud-tasks/src/attachments/mod.rs
Normal file
@@ -0,0 +1,226 @@
|
||||
pub mod upload;
|
||||
|
||||
pub use upload::AttachmentAssetPointer;
|
||||
pub use upload::AttachmentId;
|
||||
pub use upload::AttachmentUploadError;
|
||||
pub use upload::AttachmentUploadMode;
|
||||
pub use upload::AttachmentUploadProgress;
|
||||
pub use upload::AttachmentUploadState;
|
||||
pub use upload::AttachmentUploadUpdate;
|
||||
pub use upload::AttachmentUploader;
|
||||
pub use upload::HttpConfig as AttachmentUploadHttpConfig;
|
||||
pub use upload::pointer_id_from_value;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
const MAX_SUGGESTIONS: usize = 5;
|
||||
|
||||
/// The type of attachment included alongside a composer submission.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum AttachmentKind {
|
||||
File,
|
||||
Image,
|
||||
}
|
||||
|
||||
/// Metadata describing a file or asset attached via an `@` mention.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ComposerAttachment {
|
||||
pub kind: AttachmentKind,
|
||||
pub label: String,
|
||||
pub path: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub fs_path: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub start_line: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub end_line: Option<u32>,
|
||||
#[serde(skip, default)]
|
||||
pub id: AttachmentId,
|
||||
#[serde(skip_serializing, skip_deserializing)]
|
||||
pub upload: AttachmentUploadState,
|
||||
}
|
||||
|
||||
impl ComposerAttachment {
|
||||
pub fn from_suggestion(id: AttachmentId, suggestion: &MentionSuggestion) -> Self {
|
||||
Self {
|
||||
kind: AttachmentKind::File,
|
||||
label: suggestion.label.clone(),
|
||||
path: suggestion.path.clone(),
|
||||
fs_path: suggestion.fs_path.clone(),
|
||||
start_line: suggestion.start_line,
|
||||
end_line: suggestion.end_line,
|
||||
id,
|
||||
upload: AttachmentUploadState::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// UI state for the active `@` mention query inside the composer.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct MentionQueryState {
|
||||
pub current: Option<MentionToken>,
|
||||
}
|
||||
|
||||
impl MentionQueryState {
|
||||
/// Returns true when the stored token changed.
|
||||
pub fn update_from(&mut self, token: Option<String>) -> bool {
|
||||
let next = token.map(MentionToken::from_query);
|
||||
if next != self.current {
|
||||
self.current = next;
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents an `@` mention currently under the user's cursor.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MentionToken {
|
||||
/// Query string without the leading `@`.
|
||||
pub query: String,
|
||||
/// Raw token including the `@` prefix.
|
||||
pub raw: String,
|
||||
}
|
||||
|
||||
impl MentionToken {
|
||||
pub(crate) fn from_query(query: String) -> Self {
|
||||
let raw = format!("@{query}");
|
||||
Self { query, raw }
|
||||
}
|
||||
}
|
||||
|
||||
/// A suggested file (or range within a file) that matches the active `@` token.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MentionSuggestion {
|
||||
pub label: String,
|
||||
pub path: String,
|
||||
pub fs_path: Option<String>,
|
||||
pub start_line: Option<u32>,
|
||||
pub end_line: Option<u32>,
|
||||
}
|
||||
|
||||
impl MentionSuggestion {
|
||||
pub fn new(label: impl Into<String>, path: impl Into<String>) -> Self {
|
||||
Self {
|
||||
label: label.into(),
|
||||
path: path.into(),
|
||||
fs_path: None,
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Tracks suggestion list + selection for the mention picker overlay.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct MentionPickerState {
|
||||
suggestions: Vec<MentionSuggestion>,
|
||||
selected: usize,
|
||||
}
|
||||
|
||||
impl MentionPickerState {
|
||||
pub fn clear(&mut self) -> bool {
|
||||
if self.suggestions.is_empty() {
|
||||
return false;
|
||||
}
|
||||
self.suggestions.clear();
|
||||
self.selected = 0;
|
||||
true
|
||||
}
|
||||
|
||||
pub fn move_selection(&mut self, delta: isize) {
|
||||
if self.suggestions.is_empty() {
|
||||
return;
|
||||
}
|
||||
let len = self.suggestions.len() as isize;
|
||||
let mut idx = self.selected as isize + delta;
|
||||
if idx < 0 {
|
||||
idx = len - 1;
|
||||
}
|
||||
if idx >= len {
|
||||
idx = 0;
|
||||
}
|
||||
self.selected = idx as usize;
|
||||
}
|
||||
|
||||
pub fn selected_index(&self) -> usize {
|
||||
self.selected.min(self.suggestions.len().saturating_sub(1))
|
||||
}
|
||||
|
||||
pub fn current(&self) -> Option<&MentionSuggestion> {
|
||||
self.suggestions.get(self.selected_index())
|
||||
}
|
||||
|
||||
pub fn render_height(&self) -> u16 {
|
||||
let rows = self.suggestions.len().clamp(1, MAX_SUGGESTIONS) as u16;
|
||||
// Add borders + padding space.
|
||||
rows.saturating_add(2)
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &[MentionSuggestion] {
|
||||
&self.suggestions
|
||||
}
|
||||
|
||||
pub fn set_suggestions(&mut self, suggestions: Vec<MentionSuggestion>) -> bool {
|
||||
let mut trimmed = suggestions;
|
||||
if trimmed.len() > MAX_SUGGESTIONS {
|
||||
trimmed.truncate(MAX_SUGGESTIONS);
|
||||
}
|
||||
if trimmed == self.suggestions {
|
||||
return false;
|
||||
}
|
||||
self.suggestions = trimmed;
|
||||
self.selected = 0;
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::AttachmentUploadState;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn compose_attachment_from_suggestion_copies_fields() {
|
||||
let mut suggestion = MentionSuggestion::new("src/main.rs", "src/main.rs");
|
||||
suggestion.fs_path = Some("/repo/src/main.rs".to_string());
|
||||
suggestion.start_line = Some(10);
|
||||
suggestion.end_line = Some(20);
|
||||
let att = ComposerAttachment::from_suggestion(AttachmentId::new(42), &suggestion);
|
||||
assert_eq!(att.label, "src/main.rs");
|
||||
assert_eq!(att.path, "src/main.rs");
|
||||
assert_eq!(att.fs_path.as_deref(), Some("/repo/src/main.rs"));
|
||||
assert_eq!(att.start_line, Some(10));
|
||||
assert_eq!(att.end_line, Some(20));
|
||||
assert!(matches!(att.upload, AttachmentUploadState::NotStarted));
|
||||
assert_eq!(att.id.raw(), 42);
|
||||
}
|
||||
#[test]
|
||||
fn move_selection_wraps() {
|
||||
let _token = MentionToken::from_query("foo".to_string());
|
||||
let mut picker = MentionPickerState::default();
|
||||
assert!(picker.set_suggestions(vec![
|
||||
MentionSuggestion::new("src/foo.rs", "src/foo.rs"),
|
||||
MentionSuggestion::new("src/main.rs", "src/main.rs"),
|
||||
]));
|
||||
picker.move_selection(1);
|
||||
assert_eq!(
|
||||
picker.selected_index(),
|
||||
1.min(picker.items().len().saturating_sub(1))
|
||||
);
|
||||
picker.move_selection(-1);
|
||||
assert_eq!(picker.selected_index(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn refresh_none_clears_suggestions() {
|
||||
let _token = MentionToken::from_query("bar".to_string());
|
||||
let mut picker = MentionPickerState::default();
|
||||
assert!(
|
||||
picker.set_suggestions(vec![MentionSuggestion::new("docs/bar.md", "docs/bar.md",)])
|
||||
);
|
||||
assert!(picker.clear());
|
||||
assert!(picker.items().is_empty());
|
||||
}
|
||||
}
|
||||
605
codex-rs/cloud-tasks/src/attachments/upload.rs
Normal file
605
codex-rs/cloud-tasks/src/attachments/upload.rs
Normal file
@@ -0,0 +1,605 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use crate::util::append_error_log;
|
||||
use chrono::Local;
|
||||
use mime_guess::MimeGuess;
|
||||
use reqwest::Client;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
use tracing::debug;
|
||||
use tracing::warn;
|
||||
use url::Url;
|
||||
|
||||
const UPLOAD_USE_CASE: &str = "codex";
|
||||
|
||||
/// Stable identifier assigned to each staged attachment.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct AttachmentId(pub u64);
|
||||
|
||||
impl AttachmentId {
|
||||
pub const fn new(raw: u64) -> Self {
|
||||
Self(raw)
|
||||
}
|
||||
|
||||
pub const fn raw(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the lifecycle of an attachment upload initiated after an `@` mention.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentUploadState {
|
||||
NotStarted,
|
||||
Uploading(AttachmentUploadProgress),
|
||||
Uploaded(AttachmentUploadSuccess),
|
||||
Failed(AttachmentUploadError),
|
||||
}
|
||||
|
||||
impl Default for AttachmentUploadState {
|
||||
fn default() -> Self {
|
||||
Self::NotStarted
|
||||
}
|
||||
}
|
||||
|
||||
impl AttachmentUploadState {
|
||||
pub fn is_pending(&self) -> bool {
|
||||
matches!(self, Self::NotStarted | Self::Uploading(_))
|
||||
}
|
||||
|
||||
pub fn is_uploaded(&self) -> bool {
|
||||
matches!(self, Self::Uploaded(_))
|
||||
}
|
||||
|
||||
pub fn is_failed(&self) -> bool {
|
||||
matches!(self, Self::Failed(_))
|
||||
}
|
||||
}
|
||||
|
||||
/// Progress for uploads where the total size is known.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadProgress {
|
||||
pub uploaded_bytes: u64,
|
||||
pub total_bytes: Option<u64>,
|
||||
}
|
||||
|
||||
impl AttachmentUploadProgress {
|
||||
pub fn new(uploaded_bytes: u64, total_bytes: Option<u64>) -> Self {
|
||||
Self {
|
||||
uploaded_bytes,
|
||||
total_bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Successful upload metadata containing the remote pointer.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadSuccess {
|
||||
pub asset_pointer: AttachmentAssetPointer,
|
||||
pub display_name: String,
|
||||
}
|
||||
|
||||
impl AttachmentUploadSuccess {
|
||||
pub fn new(asset_pointer: AttachmentAssetPointer, display_name: impl Into<String>) -> Self {
|
||||
Self {
|
||||
asset_pointer,
|
||||
display_name: display_name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describes the remote asset pointer returned by the file service.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentAssetPointer {
|
||||
pub kind: AttachmentPointerKind,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl AttachmentAssetPointer {
|
||||
pub fn new(kind: AttachmentPointerKind, value: impl Into<String>) -> Self {
|
||||
Self {
|
||||
kind,
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// High-level pointer type so we can support both single file and container uploads.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentPointerKind {
|
||||
File,
|
||||
Image,
|
||||
#[allow(dead_code)]
|
||||
Container,
|
||||
}
|
||||
|
||||
impl fmt::Display for AttachmentPointerKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::File => write!(f, "file"),
|
||||
Self::Image => write!(f, "image"),
|
||||
Self::Container => write!(f, "container"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Captures a user-visible error when uploading an attachment fails.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl AttachmentUploadError {
|
||||
pub fn new(message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AttachmentUploadError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.message)
|
||||
}
|
||||
}
|
||||
|
||||
/// Internal update emitted by the background uploader task.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentUploadUpdate {
|
||||
Started {
|
||||
id: AttachmentId,
|
||||
total_bytes: Option<u64>,
|
||||
},
|
||||
Finished {
|
||||
id: AttachmentId,
|
||||
result: Result<AttachmentUploadSuccess, AttachmentUploadError>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Configuration for attachment uploads.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum AttachmentUploadMode {
|
||||
Disabled,
|
||||
#[cfg_attr(not(test), allow(dead_code))]
|
||||
ImmediateSuccess,
|
||||
Http(HttpConfig),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HttpConfig {
|
||||
pub base_url: String,
|
||||
pub bearer_token: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl HttpConfig {
|
||||
fn trimmed_base(&self) -> String {
|
||||
self.base_url.trim_end_matches('/').to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum AttachmentUploadBackend {
|
||||
Disabled,
|
||||
ImmediateSuccess,
|
||||
Http(Arc<AttachmentUploadHttp>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AttachmentUploadHttp {
|
||||
client: Client,
|
||||
base_url: String,
|
||||
bearer_token: Option<String>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl AttachmentUploadHttp {
|
||||
fn apply_default_headers(&self, builder: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
|
||||
let mut b = builder;
|
||||
if let Some(token) = &self.bearer_token {
|
||||
b = b.bearer_auth(token);
|
||||
}
|
||||
if let Some(acc) = &self.chatgpt_account_id {
|
||||
b = b.header("ChatGPT-Account-Id", acc);
|
||||
}
|
||||
if let Some(ua) = &self.user_agent {
|
||||
b = b.header(reqwest::header::USER_AGENT, ua.clone());
|
||||
}
|
||||
b
|
||||
}
|
||||
}
|
||||
|
||||
/// Bookkeeping for in-flight attachment uploads, providing polling APIs for the UI thread.
|
||||
pub struct AttachmentUploader {
|
||||
update_tx: UnboundedSender<AttachmentUploadUpdate>,
|
||||
update_rx: UnboundedReceiver<AttachmentUploadUpdate>,
|
||||
inflight: HashMap<AttachmentId, Arc<AtomicBool>>,
|
||||
backend: AttachmentUploadBackend,
|
||||
}
|
||||
|
||||
impl AttachmentUploader {
|
||||
pub fn new(mode: AttachmentUploadMode) -> Self {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let backend = match mode {
|
||||
AttachmentUploadMode::Disabled => AttachmentUploadBackend::Disabled,
|
||||
AttachmentUploadMode::ImmediateSuccess => AttachmentUploadBackend::ImmediateSuccess,
|
||||
AttachmentUploadMode::Http(cfg) => match Client::builder().build() {
|
||||
Ok(client) => AttachmentUploadBackend::Http(Arc::new(AttachmentUploadHttp {
|
||||
client,
|
||||
base_url: cfg.trimmed_base(),
|
||||
bearer_token: cfg.bearer_token,
|
||||
chatgpt_account_id: cfg.chatgpt_account_id,
|
||||
user_agent: cfg.user_agent,
|
||||
})),
|
||||
Err(err) => {
|
||||
warn!("attachment_upload.http_client_init_failed: {err}");
|
||||
AttachmentUploadBackend::Disabled
|
||||
}
|
||||
},
|
||||
};
|
||||
Self {
|
||||
update_tx: tx,
|
||||
update_rx: rx,
|
||||
inflight: HashMap::new(),
|
||||
backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_upload(
|
||||
&mut self,
|
||||
id: AttachmentId,
|
||||
display_name: impl Into<String>,
|
||||
fs_path: PathBuf,
|
||||
) -> Result<(), AttachmentUploadError> {
|
||||
if self.inflight.contains_key(&id) {
|
||||
return Err(AttachmentUploadError::new("upload already queued"));
|
||||
}
|
||||
if let AttachmentUploadBackend::Disabled = &self.backend {
|
||||
return Err(AttachmentUploadError::new(
|
||||
"file uploads are not available in this environment",
|
||||
));
|
||||
}
|
||||
|
||||
if !is_supported_image(&fs_path) {
|
||||
return Err(AttachmentUploadError::new(
|
||||
"only image files can be uploaded",
|
||||
));
|
||||
}
|
||||
|
||||
let cancel_token = Arc::new(AtomicBool::new(false));
|
||||
self.inflight.insert(id, cancel_token.clone());
|
||||
let tx = self.update_tx.clone();
|
||||
let backend = self.backend.clone();
|
||||
let path_clone = fs_path.clone();
|
||||
let label = display_name.into();
|
||||
tokio::spawn(async move {
|
||||
let metadata = tokio::fs::metadata(&fs_path).await.ok();
|
||||
let total_bytes = metadata.as_ref().map(std::fs::Metadata::len);
|
||||
let _ = tx.send(AttachmentUploadUpdate::Started { id, total_bytes });
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
let _ = tx.send(AttachmentUploadUpdate::Finished {
|
||||
id,
|
||||
result: Err(AttachmentUploadError::new("upload canceled")),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let result = match backend {
|
||||
AttachmentUploadBackend::Disabled => Err(AttachmentUploadError::new(
|
||||
"file uploads are not available in this environment",
|
||||
)),
|
||||
AttachmentUploadBackend::ImmediateSuccess => {
|
||||
let pointer = AttachmentAssetPointer::new(
|
||||
AttachmentPointerKind::File,
|
||||
format!("file-service://mock-{}", id.raw()),
|
||||
);
|
||||
Ok(AttachmentUploadSuccess::new(pointer, label.clone()))
|
||||
}
|
||||
AttachmentUploadBackend::Http(http) => {
|
||||
perform_http_upload(
|
||||
http,
|
||||
&path_clone,
|
||||
&label,
|
||||
total_bytes,
|
||||
cancel_token.clone(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
};
|
||||
|
||||
let _ = tx.send(AttachmentUploadUpdate::Finished { id, result });
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), allow(dead_code))]
|
||||
pub fn cancel_all(&mut self) {
|
||||
for cancel in self.inflight.values() {
|
||||
cancel.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn poll(&mut self) -> Vec<AttachmentUploadUpdate> {
|
||||
let mut out = Vec::new();
|
||||
while let Ok(update) = self.update_rx.try_recv() {
|
||||
if let AttachmentUploadUpdate::Finished { id, .. } = &update {
|
||||
self.inflight.remove(id);
|
||||
}
|
||||
out.push(update);
|
||||
}
|
||||
out
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AttachmentUploader {
|
||||
fn default() -> Self {
|
||||
Self::new(AttachmentUploadMode::Disabled)
|
||||
}
|
||||
}
|
||||
|
||||
async fn perform_http_upload(
|
||||
http: Arc<AttachmentUploadHttp>,
|
||||
fs_path: &Path,
|
||||
display_label: &str,
|
||||
total_bytes: Option<u64>,
|
||||
cancel_token: Arc<AtomicBool>,
|
||||
) -> Result<AttachmentUploadSuccess, AttachmentUploadError> {
|
||||
let file_bytes = tokio::fs::read(fs_path)
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("failed to read file: {e}")))?;
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
let file_name = fs_path
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or_else(|| display_label.to_string());
|
||||
|
||||
let create_url = format!("{}/files", http.base_url);
|
||||
let body = CreateFileRequest {
|
||||
file_name: &file_name,
|
||||
file_size: total_bytes.unwrap_or(file_bytes.len() as u64),
|
||||
use_case: UPLOAD_USE_CASE,
|
||||
timezone_offset_min: (Local::now().offset().utc_minus_local() / 60),
|
||||
reset_rate_limits: false,
|
||||
};
|
||||
|
||||
let create_resp = http
|
||||
.apply_default_headers(http.client.post(&create_url))
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("file create failed: {e}")))?;
|
||||
if !create_resp.status().is_success() {
|
||||
let status = create_resp.status();
|
||||
let text = create_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"file create request failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
let created: CreateFileResponse = create_resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("decode file create response: {e}")))?;
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
let upload_url = resolve_upload_url(&created.upload_url)
|
||||
.ok_or_else(|| AttachmentUploadError::new("invalid upload url"))?;
|
||||
|
||||
let mime = infer_image_mime(fs_path)
|
||||
.ok_or_else(|| AttachmentUploadError::new("only image files can be uploaded"))?;
|
||||
let mut azure_req = http.client.put(&upload_url);
|
||||
azure_req = azure_req
|
||||
.header("x-ms-blob-type", "BlockBlob")
|
||||
.header("x-ms-version", "2020-04-08");
|
||||
|
||||
azure_req = azure_req
|
||||
.header(reqwest::header::CONTENT_TYPE, mime.as_str())
|
||||
.header("x-ms-blob-content-type", mime.as_str());
|
||||
|
||||
let azure_resp = azure_req
|
||||
.body(file_bytes)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("blob upload failed: {e}")))?;
|
||||
|
||||
if !(200..300).contains(&azure_resp.status().as_u16()) {
|
||||
let status = azure_resp.status();
|
||||
let text = azure_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"blob upload failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
// Finalization must succeed so the pointer can be used; surface any failure
|
||||
// to the caller after logging for easier debugging.
|
||||
if let Err(err) = finalize_upload(http.clone(), &created.file_id, &file_name).await {
|
||||
let reason = err.message.clone();
|
||||
warn!(
|
||||
"mention.attachment.upload.finalize_failed file_id={} reason={reason}",
|
||||
created.file_id
|
||||
);
|
||||
append_error_log(format!(
|
||||
"mention.attachment.upload.finalize_failed file_id={} reason={reason}",
|
||||
created.file_id
|
||||
));
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let pointer = asset_pointer_from_id(&created.file_id);
|
||||
debug!(
|
||||
"mention.attachment.upload.success file_id={} pointer={}",
|
||||
created.file_id, pointer
|
||||
);
|
||||
let pointer_kind = AttachmentPointerKind::Image;
|
||||
|
||||
Ok(AttachmentUploadSuccess::new(
|
||||
AttachmentAssetPointer::new(pointer_kind, pointer),
|
||||
display_label,
|
||||
))
|
||||
}
|
||||
|
||||
fn asset_pointer_from_id(file_id: &str) -> String {
|
||||
if file_id.starts_with("file_") {
|
||||
format!("sediment://{file_id}")
|
||||
} else {
|
||||
format!("file-service://{file_id}")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pointer_id_from_value(pointer: &str) -> Option<String> {
|
||||
pointer
|
||||
.strip_prefix("file-service://")
|
||||
.or_else(|| pointer.strip_prefix("sediment://"))
|
||||
.map(str::to_string)
|
||||
.or_else(|| (!pointer.is_empty()).then(|| pointer.to_string()))
|
||||
}
|
||||
|
||||
async fn finalize_upload(
|
||||
http: Arc<AttachmentUploadHttp>,
|
||||
file_id: &str,
|
||||
file_name: &str,
|
||||
) -> Result<(), AttachmentUploadError> {
|
||||
let finalize_url = format!("{}/files/process_upload_stream", http.base_url);
|
||||
let body = FinalizeUploadRequest {
|
||||
file_id,
|
||||
use_case: UPLOAD_USE_CASE,
|
||||
index_for_retrieval: false,
|
||||
file_name,
|
||||
};
|
||||
let finalize_resp = http
|
||||
.apply_default_headers(http.client.post(&finalize_url))
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("finalize upload failed: {e}")))?;
|
||||
if !finalize_resp.status().is_success() {
|
||||
let status = finalize_resp.status();
|
||||
let text = finalize_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"finalize upload failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_upload_url(url: &str) -> Option<String> {
|
||||
let parsed = Url::parse(url).ok()?;
|
||||
if !parsed.as_str().to_lowercase().contains("estuary") {
|
||||
return Some(parsed.into());
|
||||
}
|
||||
parsed
|
||||
.query_pairs()
|
||||
.find(|(k, _)| k == "upload_url")
|
||||
.map(|(_, v)| v.into_owned())
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct CreateFileRequest<'a> {
|
||||
file_name: &'a str,
|
||||
file_size: u64,
|
||||
use_case: &'a str,
|
||||
timezone_offset_min: i32,
|
||||
reset_rate_limits: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct FinalizeUploadRequest<'a> {
|
||||
file_id: &'a str,
|
||||
use_case: &'a str,
|
||||
index_for_retrieval: bool,
|
||||
file_name: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CreateFileResponse {
|
||||
file_id: String,
|
||||
upload_url: String,
|
||||
}
|
||||
|
||||
fn is_supported_image(path: &Path) -> bool {
|
||||
infer_image_mime(path).is_some()
|
||||
}
|
||||
|
||||
fn infer_image_mime(path: &Path) -> Option<String> {
|
||||
let guess = MimeGuess::from_path(path)
|
||||
.first_raw()
|
||||
.map(std::string::ToString::to_string);
|
||||
if let Some(m) = guess {
|
||||
if m.starts_with("image/") {
|
||||
return Some(m);
|
||||
}
|
||||
}
|
||||
|
||||
let ext = path
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
.map(|ext| ext.trim().to_ascii_lowercase())?;
|
||||
|
||||
let mime = match ext.as_str() {
|
||||
"png" => "image/png",
|
||||
"jpg" | "jpeg" => "image/jpeg",
|
||||
"gif" => "image/gif",
|
||||
"webp" => "image/webp",
|
||||
"bmp" => "image/bmp",
|
||||
"svg" => "image/svg+xml",
|
||||
"heic" => "image/heic",
|
||||
"heif" => "image/heif",
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(mime.to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn infer_image_mime_accepts_common_extensions() {
|
||||
let cases = [
|
||||
("foo.png", Some("image/png")),
|
||||
("bar.JPG", Some("image/jpeg")),
|
||||
("baz.jpeg", Some("image/jpeg")),
|
||||
("img.gif", Some("image/gif")),
|
||||
("slide.WEBP", Some("image/webp")),
|
||||
("art.bmp", Some("image/bmp")),
|
||||
("vector.svg", Some("image/svg+xml")),
|
||||
("photo.heic", Some("image/heic")),
|
||||
("photo.heif", Some("image/heif")),
|
||||
];
|
||||
|
||||
for (path, expected) in cases {
|
||||
let actual = infer_image_mime(Path::new(path));
|
||||
assert_eq!(actual.as_deref(), expected, "case {path}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_image_mime_rejects_unknown_extension() {
|
||||
assert!(infer_image_mime(Path::new("doc.txt")).is_none());
|
||||
}
|
||||
}
|
||||
106
codex-rs/cloud-tasks/src/bin/conncheck.rs
Normal file
106
codex-rs/cloud-tasks/src/bin/conncheck.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_cloud_tasks::util::extract_chatgpt_account_id;
|
||||
use codex_cloud_tasks::util::normalize_base_url;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
use std::time::Duration;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
// Base URL (default to ChatGPT backend API) and normalize to canonical form
|
||||
let raw_base = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
let base_url = normalize_base_url(&raw_base);
|
||||
println!("base_url: {base_url}");
|
||||
let path_style = if base_url.contains("/backend-api") {
|
||||
"wham"
|
||||
} else {
|
||||
"codex-api"
|
||||
};
|
||||
println!("path_style: {path_style}");
|
||||
|
||||
// Locate CODEX_HOME and try to load ChatGPT auth
|
||||
let codex_home = match find_codex_home() {
|
||||
Ok(p) => {
|
||||
println!("codex_home: {}", p.display());
|
||||
Some(p)
|
||||
}
|
||||
Err(e) => {
|
||||
println!("codex_home: <not found> ({e})");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// Build backend client with UA
|
||||
set_user_agent_suffix("codex_cloud_tasks_conncheck");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut client = BackendClient::new(base_url.clone())?.with_user_agent(ua);
|
||||
|
||||
// Attach bearer token if available from ChatGPT auth
|
||||
let mut have_auth = false;
|
||||
if let Some(home) = codex_home {
|
||||
let authm = AuthManager::new(home);
|
||||
if let Some(auth) = authm.auth() {
|
||||
match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
have_auth = true;
|
||||
println!("auth: ChatGPT token present ({} chars)", token.len());
|
||||
// Add Authorization header
|
||||
client = client.with_bearer_token(&token);
|
||||
|
||||
// Attempt to extract ChatGPT account id from the JWT and set header.
|
||||
if let Some(account_id) = extract_chatgpt_account_id(&token) {
|
||||
println!("auth: ChatGPT-Account-Id: {account_id}");
|
||||
client = client.with_chatgpt_account_id(account_id);
|
||||
} else if let Some(acc) = auth.get_account_id() {
|
||||
// Fallback: some older auth.jsons persist account_id
|
||||
println!("auth: ChatGPT-Account-Id (from auth.json): {acc}");
|
||||
client = client.with_chatgpt_account_id(acc);
|
||||
}
|
||||
}
|
||||
Ok(_) => {
|
||||
println!("auth: ChatGPT token empty");
|
||||
}
|
||||
Err(e) => {
|
||||
println!("auth: failed to load ChatGPT token: {e}");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("auth: no ChatGPT auth.json");
|
||||
}
|
||||
}
|
||||
|
||||
if !have_auth {
|
||||
println!("note: Online endpoints typically require ChatGPT sign-in. Run: `codex login`");
|
||||
}
|
||||
|
||||
// Attempt the /list call with a short timeout to avoid hanging
|
||||
match path_style {
|
||||
"wham" => println!("request: GET /wham/tasks/list?limit=5&task_filter=current"),
|
||||
_ => println!("request: GET /api/codex/tasks/list?limit=5&task_filter=current"),
|
||||
}
|
||||
let fut = client.list_tasks(Some(5), Some("current"), None);
|
||||
let res = tokio::time::timeout(Duration::from_secs(30), fut).await;
|
||||
match res {
|
||||
Err(_) => {
|
||||
println!("error: request timed out after 30s");
|
||||
std::process::exit(2);
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
// backend-client includes HTTP status and body in errors.
|
||||
println!("error: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Ok(Ok(list)) => {
|
||||
println!("ok: received {} tasks", list.items.len());
|
||||
for item in list.items.iter().take(5) {
|
||||
println!("- {} — {}", item.id, item.title);
|
||||
}
|
||||
// Keep output concise; omit full JSON payload to stay readable.
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
45
codex-rs/cloud-tasks/src/bin/detailcheck.rs
Normal file
45
codex-rs/cloud-tasks/src/bin/detailcheck.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
set_user_agent_suffix("codex_cloud_tasks_detailcheck");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut client = BackendClient::new(base_url)?.with_user_agent(ua);
|
||||
|
||||
if let Ok(home) = find_codex_home() {
|
||||
let am = AuthManager::new(home);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
{
|
||||
client = client.with_bearer_token(tok);
|
||||
}
|
||||
}
|
||||
|
||||
let list = client.list_tasks(Some(5), Some("current"), None).await?;
|
||||
println!("items: {}", list.items.len());
|
||||
for item in list.items.iter().take(5) {
|
||||
println!("item: {} {}", item.id, item.title);
|
||||
let (details, body, ct) = client.get_task_details_with_body(&item.id).await?;
|
||||
let diff = codex_backend_client::CodeTaskDetailsResponseExt::unified_diff(&details);
|
||||
match diff {
|
||||
Some(d) => println!(
|
||||
"unified diff len={} sample=\n{}",
|
||||
d.len(),
|
||||
&d.lines().take(10).collect::<Vec<_>>().join("\n")
|
||||
),
|
||||
None => {
|
||||
println!(
|
||||
"no unified diff found; ct={ct}; body sample=\n{}",
|
||||
&body.chars().take(5000).collect::<String>()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
136
codex-rs/cloud-tasks/src/bin/envcheck.rs
Normal file
136
codex-rs/cloud-tasks/src/bin/envcheck.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
use base64::Engine;
|
||||
use clap::Parser;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(version, about = "Resolve Codex environment id (debug helper)")]
|
||||
struct Args {
|
||||
/// Optional override for environment id; if present we just echo it.
|
||||
#[arg(long = "env-id")]
|
||||
environment_id: Option<String>,
|
||||
/// Optional label to select a matching environment (case-insensitive exact match).
|
||||
#[arg(long = "env-label")]
|
||||
environment_label: Option<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args = Args::parse();
|
||||
|
||||
// Base URL (default to ChatGPT backend API) with normalization
|
||||
let mut base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
println!("base_url: {base_url}");
|
||||
println!(
|
||||
"path_style: {}",
|
||||
if base_url.contains("/backend-api") {
|
||||
"wham"
|
||||
} else {
|
||||
"codex-api"
|
||||
}
|
||||
);
|
||||
|
||||
// Build headers: UA + ChatGPT auth if available
|
||||
set_user_agent_suffix("codex_cloud_tasks_envcheck");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
reqwest::header::USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
|
||||
// Locate CODEX_HOME and try to load ChatGPT auth
|
||||
if let Ok(home) = find_codex_home() {
|
||||
println!("codex_home: {}", home.display());
|
||||
let authm = AuthManager::new(home);
|
||||
if let Some(auth) = authm.auth() {
|
||||
match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
println!("auth: ChatGPT token present ({} chars)", token.len());
|
||||
let value = format!("Bearer {token}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&value) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(account_id) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&token))
|
||||
{
|
||||
println!("auth: ChatGPT-Account-Id: {account_id}");
|
||||
if let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&account_id)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(_) => println!("auth: ChatGPT token empty"),
|
||||
Err(e) => println!("auth: failed to load ChatGPT token: {e}"),
|
||||
}
|
||||
} else {
|
||||
println!("auth: no ChatGPT auth.json");
|
||||
}
|
||||
} else {
|
||||
println!("codex_home: <not found>");
|
||||
}
|
||||
|
||||
// If user supplied an environment id, just echo it and exit.
|
||||
if let Some(id) = args.environment_id {
|
||||
println!("env: provided env-id={id}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Auto-detect environment id using shared env_detect
|
||||
match codex_cloud_tasks::env_detect::autodetect_environment_id(
|
||||
&base_url,
|
||||
&headers,
|
||||
args.environment_label,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(sel) => {
|
||||
println!(
|
||||
"env: selected environment_id={} label={}",
|
||||
sel.id,
|
||||
sel.label.unwrap_or_else(|| "<none>".to_string())
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
println!("env: failed: {e}");
|
||||
std::process::exit(2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
// JWT: header.payload.signature
|
||||
let mut parts = token.split('.');
|
||||
let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
|
||||
_ => return None,
|
||||
};
|
||||
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.decode(payload_b64)
|
||||
.ok()?;
|
||||
let v: serde_json::Value = serde_json::from_slice(&payload_bytes).ok()?;
|
||||
v.get("https://api.openai.com/auth")
|
||||
.and_then(|auth| auth.get("chatgpt_account_id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.map(str::to_string)
|
||||
}
|
||||
206
codex-rs/cloud-tasks/src/bin/newtask.rs
Normal file
206
codex-rs/cloud-tasks/src/bin/newtask.rs
Normal file
@@ -0,0 +1,206 @@
|
||||
use base64::Engine;
|
||||
use clap::Parser;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(version, about = "Create a new Codex cloud task (debug helper)")]
|
||||
struct Args {
|
||||
/// Optional override for environment id; if absent we auto-detect.
|
||||
#[arg(long = "env-id")]
|
||||
environment_id: Option<String>,
|
||||
/// Optional label match for environment selection (case-insensitive, exact match).
|
||||
#[arg(long = "env-label")]
|
||||
environment_label: Option<String>,
|
||||
/// Branch or ref to use (e.g., main)
|
||||
#[arg(long = "ref", default_value = "main")]
|
||||
git_ref: String,
|
||||
/// Run environment in QA (ask) mode
|
||||
#[arg(long = "qa-mode", default_value_t = false)]
|
||||
qa_mode: bool,
|
||||
/// Task prompt text
|
||||
#[arg(required = true)]
|
||||
prompt: Vec<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args = Args::parse();
|
||||
let prompt = args.prompt.join(" ");
|
||||
|
||||
// Base URL (default to ChatGPT backend API)
|
||||
let mut base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
println!("base_url: {base_url}");
|
||||
let is_wham = base_url.contains("/backend-api");
|
||||
println!("path_style: {}", if is_wham { "wham" } else { "codex-api" });
|
||||
|
||||
// Build headers: UA + ChatGPT auth if available
|
||||
set_user_agent_suffix("codex_cloud_tasks_newtask");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
reqwest::header::USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
let mut have_auth = false;
|
||||
// Locate CODEX_HOME and try to load ChatGPT auth
|
||||
if let Ok(home) = find_codex_home() {
|
||||
let authm = AuthManager::new(home);
|
||||
if let Some(auth) = authm.auth() {
|
||||
match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
have_auth = true;
|
||||
println!("auth: ChatGPT token present ({} chars)", token.len());
|
||||
let value = format!("Bearer {token}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&value) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(account_id) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&token))
|
||||
{
|
||||
println!("auth: ChatGPT-Account-Id: {account_id}");
|
||||
if let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&account_id)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(_) => println!("auth: ChatGPT token empty"),
|
||||
Err(e) => println!("auth: failed to load ChatGPT token: {e}"),
|
||||
}
|
||||
} else {
|
||||
println!("auth: no ChatGPT auth.json");
|
||||
}
|
||||
}
|
||||
if !have_auth {
|
||||
println!("note: Online endpoints typically require ChatGPT sign-in. Run: `codex login`");
|
||||
}
|
||||
|
||||
// Determine environment id: prefer flag, then by-repo lookup, then full list.
|
||||
let env_id = if let Some(id) = args.environment_id.clone() {
|
||||
println!("env: using provided env-id={id}");
|
||||
id
|
||||
} else {
|
||||
match codex_cloud_tasks::env_detect::autodetect_environment_id(
|
||||
&base_url,
|
||||
&headers,
|
||||
args.environment_label.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(sel) => sel.id,
|
||||
Err(e) => {
|
||||
println!("env: failed to auto-detect environment: {e}");
|
||||
std::process::exit(2);
|
||||
}
|
||||
}
|
||||
};
|
||||
println!("env: selected environment_id={env_id}");
|
||||
|
||||
// Build request payload patterned after VSCode: POST /wham/tasks
|
||||
let url = if is_wham {
|
||||
format!("{base_url}/wham/tasks")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/tasks")
|
||||
};
|
||||
println!(
|
||||
"request: POST {}",
|
||||
url.strip_prefix(&base_url).unwrap_or(&url)
|
||||
);
|
||||
|
||||
// input_items
|
||||
let mut input_items: Vec<serde_json::Value> = Vec::new();
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [{ "content_type": "text", "text": prompt }]
|
||||
}));
|
||||
|
||||
// Optional: starting diff via env var for quick testing
|
||||
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "pre_apply_patch",
|
||||
"output_diff": { "diff": diff }
|
||||
}));
|
||||
}
|
||||
|
||||
let request_body = serde_json::json!({
|
||||
"new_task": {
|
||||
"environment_id": env_id,
|
||||
"branch": args.git_ref,
|
||||
"run_environment_in_qa_mode": args.qa_mode,
|
||||
},
|
||||
"input_items": input_items,
|
||||
});
|
||||
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let res = http
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header(CONTENT_TYPE, HeaderValue::from_static("application/json"))
|
||||
.json(&request_body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
println!("status: {status}");
|
||||
println!("content-type: {ct}");
|
||||
match serde_json::from_str::<serde_json::Value>(&body) {
|
||||
Ok(v) => println!(
|
||||
"response (pretty JSON):\n{}",
|
||||
serde_json::to_string_pretty(&v).unwrap_or(body)
|
||||
),
|
||||
Err(_) => println!("response (raw):\n{body}"),
|
||||
}
|
||||
|
||||
if !status.is_success() {
|
||||
// Exit non-zero on failure
|
||||
std::process::exit(1);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
// JWT: header.payload.signature
|
||||
let mut parts = token.split('.');
|
||||
let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
|
||||
_ => return None,
|
||||
};
|
||||
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.decode(payload_b64)
|
||||
.ok()?;
|
||||
let v: serde_json::Value = serde_json::from_slice(&payload_bytes).ok()?;
|
||||
v.get("https://api.openai.com/auth")
|
||||
.and_then(|auth| auth.get("chatgpt_account_id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.map(str::to_string)
|
||||
}
|
||||
@@ -13,12 +13,79 @@ struct CodeEnvironment {
|
||||
is_pinned: Option<bool>,
|
||||
#[serde(default)]
|
||||
task_count: Option<i64>,
|
||||
#[serde(default)]
|
||||
repo_map: Option<HashMap<String, GitRepository>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
struct GitRepository {
|
||||
#[serde(default)]
|
||||
repository_full_name: Option<String>,
|
||||
#[serde(default)]
|
||||
default_branch: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AutodetectSelection {
|
||||
pub id: String,
|
||||
pub label: Option<String>,
|
||||
pub default_branch: Option<String>,
|
||||
}
|
||||
|
||||
fn clean_branch(branch: Option<&str>) -> Option<String> {
|
||||
branch
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(std::string::ToString::to_string)
|
||||
}
|
||||
|
||||
fn default_branch_from_env(env: &CodeEnvironment, repo_hint: Option<&str>) -> Option<String> {
|
||||
let repo_map = env.repo_map.as_ref()?;
|
||||
if let Some(hint) = repo_hint {
|
||||
if let Some(repo) = repo_map
|
||||
.values()
|
||||
.find(|repo| repo.repository_full_name.as_deref() == Some(hint))
|
||||
&& let Some(branch) = clean_branch(repo.default_branch.as_deref())
|
||||
{
|
||||
return Some(branch);
|
||||
}
|
||||
if let Some(repo) = repo_map.get(hint)
|
||||
&& let Some(branch) = clean_branch(repo.default_branch.as_deref())
|
||||
{
|
||||
return Some(branch);
|
||||
}
|
||||
}
|
||||
repo_map
|
||||
.values()
|
||||
.find_map(|repo| clean_branch(repo.default_branch.as_deref()))
|
||||
}
|
||||
|
||||
fn merge_environment_row(
|
||||
map: &mut HashMap<String, crate::app::EnvironmentRow>,
|
||||
env: &CodeEnvironment,
|
||||
repo_hint: Option<&str>,
|
||||
) {
|
||||
let default_branch = default_branch_from_env(env, repo_hint);
|
||||
let repo_hint_owned = repo_hint.map(str::to_string);
|
||||
let entry = map
|
||||
.entry(env.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: env.id.clone(),
|
||||
label: env.label.clone(),
|
||||
is_pinned: env.is_pinned.unwrap_or(false),
|
||||
repo_hints: repo_hint_owned.clone(),
|
||||
default_branch: default_branch.clone(),
|
||||
});
|
||||
if entry.label.is_none() {
|
||||
entry.label = env.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || env.is_pinned.unwrap_or(false);
|
||||
if entry.repo_hints.is_none() {
|
||||
entry.repo_hints = repo_hint_owned;
|
||||
}
|
||||
if let Some(branch) = default_branch {
|
||||
entry.default_branch = Some(branch);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn autodetect_environment_id(
|
||||
@@ -62,6 +129,7 @@ pub async fn autodetect_environment_id(
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
default_branch: default_branch_from_env(&env, None),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -101,6 +169,7 @@ pub async fn autodetect_environment_id(
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
default_branch: default_branch_from_env(&env, None),
|
||||
});
|
||||
}
|
||||
anyhow::bail!("no environments available")
|
||||
@@ -276,23 +345,9 @@ pub async fn list_environments(
|
||||
match get_json::<Vec<CodeEnvironment>>(&url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: by-repo {}:{} -> {} envs", owner, repo, list.len());
|
||||
for e in list {
|
||||
let entry =
|
||||
map.entry(e.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: e.id.clone(),
|
||||
label: e.label.clone(),
|
||||
is_pinned: e.is_pinned.unwrap_or(false),
|
||||
repo_hints: Some(format!("{owner}/{repo}")),
|
||||
});
|
||||
// Merge: keep label if present, or use new; accumulate pinned flag
|
||||
if entry.label.is_none() {
|
||||
entry.label = e.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false);
|
||||
if entry.repo_hints.is_none() {
|
||||
entry.repo_hints = Some(format!("{owner}/{repo}"));
|
||||
}
|
||||
for env in list {
|
||||
let repo_hint = format!("{owner}/{repo}");
|
||||
merge_environment_row(&mut map, &env, Some(repo_hint.as_str()));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -314,19 +369,8 @@ pub async fn list_environments(
|
||||
match get_json::<Vec<CodeEnvironment>>(&list_url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: global list -> {} envs", list.len());
|
||||
for e in list {
|
||||
let entry = map
|
||||
.entry(e.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: e.id.clone(),
|
||||
label: e.label.clone(),
|
||||
is_pinned: e.is_pinned.unwrap_or(false),
|
||||
repo_hints: None,
|
||||
});
|
||||
if entry.label.is_none() {
|
||||
entry.label = e.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false);
|
||||
for env in list {
|
||||
merge_environment_row(&mut map, &env, None);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -15,15 +15,17 @@ use ratatui::widgets::ListItem;
|
||||
use ratatui::widgets::ListState;
|
||||
use ratatui::widgets::Padding;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::Wrap;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use crate::app::App;
|
||||
use crate::app::AttemptView;
|
||||
use crate::new_task::AttachmentUploadDisplay;
|
||||
use crate::new_task::SubmitPhase;
|
||||
use chrono::Local;
|
||||
use chrono::Utc;
|
||||
use codex_cloud_tasks_client::AttemptStatus;
|
||||
use codex_cloud_tasks_client::TaskStatus;
|
||||
use codex_tui::render_markdown_text;
|
||||
|
||||
pub fn draw(frame: &mut Frame, app: &mut App) {
|
||||
let area = frame.area();
|
||||
@@ -48,9 +50,6 @@ pub fn draw(frame: &mut Frame, app: &mut App) {
|
||||
if app.env_modal.is_some() {
|
||||
draw_env_modal(frame, area, app);
|
||||
}
|
||||
if app.best_of_modal.is_some() {
|
||||
draw_best_of_modal(frame, area, app);
|
||||
}
|
||||
if app.apply_modal.is_some() {
|
||||
draw_apply_modal(frame, area, app);
|
||||
}
|
||||
@@ -123,16 +122,6 @@ pub fn draw_new_task_page(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
spans.push(" • ".into());
|
||||
spans.push("Env: none (press ctrl-o to choose)".red());
|
||||
}
|
||||
if let Some(page) = app.new_task.as_ref() {
|
||||
spans.push(" • ".into());
|
||||
let attempts = page.best_of_n;
|
||||
let label = format!(
|
||||
"{} attempt{}",
|
||||
attempts,
|
||||
if attempts == 1 { "" } else { "s" }
|
||||
);
|
||||
spans.push(label.cyan());
|
||||
}
|
||||
spans
|
||||
};
|
||||
let block = Block::default()
|
||||
@@ -152,24 +141,201 @@ pub fn draw_new_task_page(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
.unwrap_or(3)
|
||||
.clamp(3, max_allowed);
|
||||
|
||||
// Anchor the composer to the bottom-left by allocating a flexible spacer
|
||||
// above it and a fixed `desired`-height area for the composer.
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Min(1), Constraint::Length(desired)])
|
||||
.split(content);
|
||||
let composer_area = rows[1];
|
||||
let (mention_area, composer_area) = if let Some(page) = app.new_task.as_ref() {
|
||||
compute_new_task_areas(content, desired, page)
|
||||
} else {
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Min(1), Constraint::Length(desired)])
|
||||
.split(content);
|
||||
(None, rows[1])
|
||||
};
|
||||
|
||||
if let Some(page) = app.new_task.as_ref() {
|
||||
page.composer.render_ref(composer_area, frame.buffer_mut());
|
||||
// Composer renders its own footer hints; no extra row here.
|
||||
let submitting = app
|
||||
.new_task
|
||||
.as_ref()
|
||||
.map(|p| p.submit_phase() != SubmitPhase::Idle)
|
||||
.unwrap_or(false);
|
||||
|
||||
if let Some(area) = mention_area
|
||||
&& !submitting
|
||||
&& let Some(page) = app.new_task.as_ref()
|
||||
{
|
||||
draw_mention_picker(frame, area, page);
|
||||
}
|
||||
|
||||
// Place cursor where composer wants it
|
||||
if let Some(page) = app.new_task.as_ref()
|
||||
&& let Some((x, y)) = page.composer.cursor_pos(composer_area)
|
||||
{
|
||||
frame.set_cursor_position((x, y));
|
||||
if submitting {
|
||||
if let Some(page) = app.new_task.as_mut() {
|
||||
draw_submission_status(frame, composer_area, page);
|
||||
}
|
||||
} else if let Some(page) = app.new_task.as_ref() {
|
||||
page.composer.render_ref(composer_area, frame.buffer_mut());
|
||||
if let Some((x, y)) = page.composer.cursor_pos(composer_area) {
|
||||
frame.set_cursor_position((x, y));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_new_task_areas(
|
||||
content: Rect,
|
||||
desired: u16,
|
||||
page: &crate::new_task::NewTaskPage,
|
||||
) -> (Option<Rect>, Rect) {
|
||||
let available_for_mention = content.height.saturating_sub(desired);
|
||||
let mention_height = if page.mention_state.current.is_some() && available_for_mention >= 3 {
|
||||
page.mention_picker
|
||||
.render_height()
|
||||
.min(available_for_mention)
|
||||
.max(3)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
if mention_height > 0 {
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([
|
||||
Constraint::Min(1),
|
||||
Constraint::Length(mention_height),
|
||||
Constraint::Length(desired),
|
||||
])
|
||||
.split(content);
|
||||
(Some(rows[1]), rows[2])
|
||||
} else {
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Min(1), Constraint::Length(desired)])
|
||||
.split(content);
|
||||
(None, rows[1])
|
||||
}
|
||||
}
|
||||
|
||||
fn draw_mention_picker(frame: &mut Frame, area: Rect, page: &crate::new_task::NewTaskPage) {
|
||||
use ratatui::widgets::ListState;
|
||||
|
||||
let mut state = ListState::default().with_selected(Some(page.mention_picker.selected_index()));
|
||||
let block = Block::default()
|
||||
.borders(Borders::ALL)
|
||||
.title("Files".magenta().bold());
|
||||
frame.render_widget(block.clone(), area);
|
||||
let inner = block.inner(area);
|
||||
|
||||
if page.mention_picker.items().is_empty() {
|
||||
let message = if page.mention_search_pending {
|
||||
"Searching…"
|
||||
} else if page
|
||||
.mention_state
|
||||
.current
|
||||
.as_ref()
|
||||
.is_some_and(|tok| tok.query.is_empty())
|
||||
{
|
||||
"Type to search"
|
||||
} else {
|
||||
"No matches"
|
||||
};
|
||||
frame.render_widget(
|
||||
Paragraph::new(Line::from(message.dim())).wrap(Wrap { trim: true }),
|
||||
inner,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let items: Vec<ListItem> = page
|
||||
.mention_picker
|
||||
.items()
|
||||
.iter()
|
||||
.map(|s| {
|
||||
let mut spans: Vec<ratatui::text::Span> = vec![s.label.clone().into()];
|
||||
if s.path != s.label {
|
||||
spans.push(" ".into());
|
||||
spans.push(s.path.clone().dim());
|
||||
}
|
||||
ListItem::new(Line::from(spans))
|
||||
})
|
||||
.collect();
|
||||
frame.render_stateful_widget(
|
||||
List::new(items)
|
||||
.highlight_style(Style::default().add_modifier(Modifier::BOLD))
|
||||
.block(block),
|
||||
area,
|
||||
&mut state,
|
||||
);
|
||||
}
|
||||
|
||||
fn draw_submission_status(frame: &mut Frame, area: Rect, page: &mut crate::new_task::NewTaskPage) {
|
||||
use ratatui::text::Span;
|
||||
use ratatui::widgets::Paragraph;
|
||||
|
||||
let attachments = page.attachment_display_items();
|
||||
let mut constraints: Vec<Constraint> = Vec::new();
|
||||
constraints.push(Constraint::Length(1));
|
||||
for _ in 0..attachments.len() {
|
||||
constraints.push(Constraint::Length(1));
|
||||
}
|
||||
constraints.push(Constraint::Min(0));
|
||||
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints(constraints)
|
||||
.split(area);
|
||||
|
||||
let head_label = match page.submit_phase() {
|
||||
SubmitPhase::WaitingForUploads => "Waiting for uploads…",
|
||||
SubmitPhase::Sending | SubmitPhase::Idle => "Submitting…",
|
||||
};
|
||||
draw_inline_spinner(frame, rows[0], page.submit_throbber_mut(), head_label);
|
||||
|
||||
for (idx, (label, state)) in attachments.iter().enumerate() {
|
||||
let row = rows[idx + 1];
|
||||
match state {
|
||||
AttachmentUploadDisplay::Pending => {
|
||||
draw_inline_spinner(frame, row, page.submit_throbber_mut(), label);
|
||||
}
|
||||
AttachmentUploadDisplay::Uploaded => {
|
||||
let line = Line::from(vec!["✔".green(), " ".into(), Span::from(label.clone())]);
|
||||
frame.render_widget(Paragraph::new(line), row);
|
||||
}
|
||||
AttachmentUploadDisplay::Failed(msg) => {
|
||||
let line = Line::from(vec![
|
||||
"✖".red(),
|
||||
" ".into(),
|
||||
Span::from(label.clone()).red(),
|
||||
": ".into(),
|
||||
Span::from(msg.clone()).red(),
|
||||
]);
|
||||
frame.render_widget(Paragraph::new(line), row);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::attachments::AttachmentUploadMode;
|
||||
use crate::new_task::NewTaskPage;
|
||||
|
||||
#[test]
|
||||
fn mention_area_allocated_when_token_active() {
|
||||
let mut page = NewTaskPage::new(None, AttachmentUploadMode::Disabled);
|
||||
page.mention_state.update_from(Some("@foo".to_string()));
|
||||
page.mention_search_pending = true;
|
||||
let content = Rect::new(0, 0, 80, 8);
|
||||
let desired = page.composer.desired_height(content.width);
|
||||
let (mention, composer) = compute_new_task_areas(content, desired, &page);
|
||||
assert!(mention.is_some());
|
||||
assert!(composer.height > 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mention_area_not_allocated_when_no_space() {
|
||||
let mut page = NewTaskPage::new(None, AttachmentUploadMode::Disabled);
|
||||
page.mention_state.update_from(Some("@foo".to_string()));
|
||||
page.mention_search_pending = true;
|
||||
let content = Rect::new(0, 0, 80, 3);
|
||||
let desired = page.composer.desired_height(content.width);
|
||||
let (mention, _composer) = compute_new_task_areas(content, desired, &page);
|
||||
assert!(mention.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -179,10 +345,7 @@ fn draw_list(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
// Selection reflects the actual task index (no artificial spacer item).
|
||||
let mut state = ListState::default().with_selected(Some(app.selected));
|
||||
// Dim task list when a modal/overlay is active to emphasize focus.
|
||||
let dim_bg = app.env_modal.is_some()
|
||||
|| app.apply_modal.is_some()
|
||||
|| app.best_of_modal.is_some()
|
||||
|| app.diff_overlay.is_some();
|
||||
let dim_bg = app.env_modal.is_some() || app.apply_modal.is_some() || app.diff_overlay.is_some();
|
||||
// Dynamic title includes current environment filter
|
||||
let suffix_span = if let Some(ref id) = app.env_filter {
|
||||
let label = app
|
||||
@@ -261,12 +424,10 @@ fn draw_footer(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
help.push("a".dim());
|
||||
help.push(": Apply ".dim());
|
||||
}
|
||||
help.push("o : Set Env ".dim());
|
||||
help.push("Ctrl+N".dim());
|
||||
help.push(format!(": Attempts {}x ", app.best_of_n).dim());
|
||||
if app.new_task.is_some() {
|
||||
help.push("(editing new task) ".dim());
|
||||
help.push("o : Set Env ".dim());
|
||||
} else {
|
||||
help.push("o : Set Env ".dim());
|
||||
help.push("n : New Task ".dim());
|
||||
}
|
||||
help.extend(vec!["q".dim(), ": Quit ".dim()]);
|
||||
@@ -721,14 +882,7 @@ fn conversation_text_spans(
|
||||
)];
|
||||
}
|
||||
|
||||
let mut rendered = render_markdown_text(display);
|
||||
if rendered.lines.is_empty() {
|
||||
return vec![Span::raw(display.to_string())];
|
||||
}
|
||||
// `render_markdown_text` can yield multiple lines when the input contains
|
||||
// explicit breaks. We only expect a single line here; join the spans of the
|
||||
// first rendered line for styling.
|
||||
rendered.lines.remove(0).spans.into_iter().collect()
|
||||
vec![Span::raw(display.to_string())]
|
||||
}
|
||||
|
||||
fn attempt_status_span(status: AttemptStatus) -> Option<ratatui::text::Span<'static>> {
|
||||
@@ -737,7 +891,7 @@ fn attempt_status_span(status: AttemptStatus) -> Option<ratatui::text::Span<'sta
|
||||
AttemptStatus::Failed => Some("Failed".red().bold()),
|
||||
AttemptStatus::InProgress => Some("In progress".magenta()),
|
||||
AttemptStatus::Pending => Some("Pending".cyan()),
|
||||
AttemptStatus::Cancelled => Some("Cancelled".dim()),
|
||||
AttemptStatus::Cancelled => Some("Cancelled".red().dim()),
|
||||
AttemptStatus::Unknown => None,
|
||||
}
|
||||
}
|
||||
@@ -999,50 +1153,3 @@ pub fn draw_env_modal(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
.block(Block::default().borders(Borders::NONE));
|
||||
frame.render_stateful_widget(list, rows[2], &mut list_state);
|
||||
}
|
||||
|
||||
pub fn draw_best_of_modal(frame: &mut Frame, area: Rect, app: &mut App) {
|
||||
use ratatui::widgets::Wrap;
|
||||
|
||||
let inner = overlay_outer(area);
|
||||
let title = Line::from(vec!["Parallel Attempts".magenta().bold()]);
|
||||
let block = overlay_block().title(title);
|
||||
|
||||
frame.render_widget(Clear, inner);
|
||||
frame.render_widget(block.clone(), inner);
|
||||
let content = overlay_content(inner);
|
||||
|
||||
let rows = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([Constraint::Length(2), Constraint::Min(1)])
|
||||
.split(content);
|
||||
|
||||
let hint = Paragraph::new(Line::from(
|
||||
"Use ↑/↓ to choose, 1-4 jump; Enter confirm, Esc cancel"
|
||||
.cyan()
|
||||
.dim(),
|
||||
))
|
||||
.wrap(Wrap { trim: true });
|
||||
frame.render_widget(hint, rows[0]);
|
||||
|
||||
let selected = app.best_of_modal.as_ref().map(|m| m.selected).unwrap_or(0);
|
||||
let options = [1usize, 2, 3, 4];
|
||||
let mut items: Vec<ListItem> = Vec::new();
|
||||
for &attempts in &options {
|
||||
let mut spans: Vec<ratatui::text::Span> =
|
||||
vec![format!("{attempts} attempt{}", if attempts == 1 { "" } else { "s" }).into()];
|
||||
spans.push(" ".into());
|
||||
spans.push(format!("{attempts}x parallel").dim());
|
||||
if attempts == app.best_of_n {
|
||||
spans.push(" ".into());
|
||||
spans.push("Current".magenta().bold());
|
||||
}
|
||||
items.push(ListItem::new(Line::from(spans)));
|
||||
}
|
||||
let sel = selected.min(options.len().saturating_sub(1));
|
||||
let mut list_state = ListState::default().with_selected(Some(sel));
|
||||
let list = List::new(items)
|
||||
.highlight_symbol("› ")
|
||||
.highlight_style(Style::default().bold())
|
||||
.block(Block::default().borders(Borders::NONE));
|
||||
frame.render_stateful_widget(list, rows[1], &mut list_state);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
@@ -9,15 +12,17 @@ pub fn set_user_agent_suffix(suffix: &str) {
|
||||
}
|
||||
|
||||
pub fn append_error_log(message: impl AsRef<str>) {
|
||||
let ts = Utc::now().to_rfc3339();
|
||||
if let Ok(mut f) = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open("error.log")
|
||||
let message = message.as_ref();
|
||||
let timestamp = Utc::now().to_rfc3339();
|
||||
|
||||
if let Some(path) = log_file_path()
|
||||
&& write_log_line(&path, ×tamp, message)
|
||||
{
|
||||
use std::io::Write as _;
|
||||
let _ = writeln!(f, "[{ts}] {}", message.as_ref());
|
||||
return;
|
||||
}
|
||||
|
||||
let fallback = Path::new("error.log");
|
||||
let _ = write_log_line(fallback, ×tamp, message);
|
||||
}
|
||||
|
||||
/// Normalize the configured base URL to a canonical form used by the backend client.
|
||||
@@ -37,6 +42,31 @@ pub fn normalize_base_url(input: &str) -> String {
|
||||
base_url
|
||||
}
|
||||
|
||||
fn log_file_path() -> Option<PathBuf> {
|
||||
let mut log_dir = codex_core::config::find_codex_home().ok()?;
|
||||
log_dir.push("log");
|
||||
std::fs::create_dir_all(&log_dir).ok()?;
|
||||
Some(log_dir.join("codex-cloud-tasks.log"))
|
||||
}
|
||||
|
||||
fn write_log_line(path: &Path, timestamp: &str, message: &str) -> bool {
|
||||
let mut opts = std::fs::OpenOptions::new();
|
||||
opts.create(true).append(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
opts.mode(0o600);
|
||||
}
|
||||
|
||||
match opts.open(path) {
|
||||
Ok(mut file) => {
|
||||
use std::io::Write as _;
|
||||
writeln!(file, "[{timestamp}] {message}").is_ok()
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the ChatGPT account id from a JWT token, when present.
|
||||
pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
let mut parts = token.split('.');
|
||||
@@ -54,6 +84,90 @@ pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub fn switch_to_branch(branch: &str) -> Result<(), String> {
|
||||
let branch = branch.trim();
|
||||
if branch.is_empty() {
|
||||
return Err("default branch name is empty".to_string());
|
||||
}
|
||||
|
||||
if let Ok(current) = current_branch()
|
||||
&& current == branch
|
||||
{
|
||||
append_error_log(format!("git.switch: already on branch {branch}"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
append_error_log(format!("git.switch: switching to branch {branch}"));
|
||||
match ensure_success(&["checkout", branch]) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) => {
|
||||
append_error_log(format!("git.switch: checkout {branch} failed: {err}"));
|
||||
if ensure_success(&["rev-parse", "--verify", branch]).is_ok() {
|
||||
return Err(err);
|
||||
}
|
||||
if let Err(fetch_err) = ensure_success(&["fetch", "origin", branch]) {
|
||||
append_error_log(format!(
|
||||
"git.switch: fetch origin/{branch} failed: {fetch_err}"
|
||||
));
|
||||
return Err(err);
|
||||
}
|
||||
let tracking = format!("origin/{branch}");
|
||||
ensure_success(&["checkout", "-b", branch, &tracking]).map_err(|create_err| {
|
||||
append_error_log(format!(
|
||||
"git.switch: checkout -b {branch} {tracking} failed: {create_err}"
|
||||
));
|
||||
create_err
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn current_branch() -> Result<String, String> {
|
||||
let output = run_git(&["rev-parse", "--abbrev-ref", "HEAD"])?;
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"git rev-parse --abbrev-ref failed: {}",
|
||||
format_command_failure(output, &["rev-parse", "--abbrev-ref", "HEAD"])
|
||||
));
|
||||
}
|
||||
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||
}
|
||||
|
||||
fn ensure_success(args: &[&str]) -> Result<(), String> {
|
||||
let output = run_git(args)?;
|
||||
if output.status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(format_command_failure(output, args))
|
||||
}
|
||||
|
||||
fn run_git(args: &[&str]) -> Result<std::process::Output, String> {
|
||||
Command::new("git")
|
||||
.args(args)
|
||||
.output()
|
||||
.map_err(|e| format!("failed to launch git {}: {e}", join_args(args)))
|
||||
}
|
||||
|
||||
fn format_command_failure(output: std::process::Output, args: &[&str]) -> String {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
format!(
|
||||
"git {} exited with status {}. stdout: {} stderr: {}",
|
||||
join_args(args),
|
||||
output
|
||||
.status
|
||||
.code()
|
||||
.map(|c| c.to_string())
|
||||
.unwrap_or_else(|| "<signal>".to_string()),
|
||||
stdout.trim(),
|
||||
stderr.trim()
|
||||
)
|
||||
}
|
||||
|
||||
fn join_args(args: &[&str]) -> String {
|
||||
args.join(" ")
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
|
||||
@@ -15,3 +15,4 @@ path = "src/lib.rs"
|
||||
[dependencies]
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
uuid = { version = "1", features = ["serde"] }
|
||||
|
||||
@@ -29,7 +29,7 @@ const PRESETS: &[ModelPreset] = &[
|
||||
label: "gpt-5-codex medium",
|
||||
description: "",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
effort: None,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-high",
|
||||
|
||||
@@ -24,7 +24,6 @@ codex-file-search = { workspace = true }
|
||||
codex-mcp-client = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-otel = { workspace = true, features = ["otel"] }
|
||||
dirs = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
@@ -92,7 +91,6 @@ tempfile = { workspace = true }
|
||||
tokio-test = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
tracing-test = { workspace = true, features = ["no-env-filter"] }
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys"]
|
||||
|
||||
@@ -5,7 +5,6 @@ You are Codex, based on GPT-5. You are running as a coding agent in the Codex CL
|
||||
- The arguments to `shell` will be passed to execvp(). Most terminal commands should be prefixed with ["bash", "-lc"].
|
||||
- Always set the `workdir` param when using the shell function. Do not use `cd` unless absolutely necessary.
|
||||
- When searching for text or files, prefer using `rg` or `rg --files` respectively because `rg` is much faster than alternatives like `grep`. (If the `rg` command is not found, then use alternatives.)
|
||||
- When editing or creating files, you MUST use apply_patch as a standalone tool without going through ["bash", "-lc"], `Python`, `cat`, `sed`, ... Example: functions.shell({"command":["apply_patch","*** Begin Patch\nAdd File: hello.txt\n+Hello, world!\n*** End Patch"]}).
|
||||
|
||||
## Editing constraints
|
||||
|
||||
|
||||
@@ -45,13 +45,12 @@ pub(crate) async fn apply_patch(
|
||||
&turn_context.sandbox_policy,
|
||||
&turn_context.cwd,
|
||||
) {
|
||||
SafetyCheck::AutoApprove {
|
||||
user_explicitly_approved,
|
||||
..
|
||||
} => InternalApplyPatchInvocation::DelegateToExec(ApplyPatchExec {
|
||||
action,
|
||||
user_explicitly_approved_this_action: user_explicitly_approved,
|
||||
}),
|
||||
SafetyCheck::AutoApprove { .. } => {
|
||||
InternalApplyPatchInvocation::DelegateToExec(ApplyPatchExec {
|
||||
action,
|
||||
user_explicitly_approved_this_action: false,
|
||||
})
|
||||
}
|
||||
SafetyCheck::AskUser => {
|
||||
// Compute a readable summary of path changes to include in the
|
||||
// approval request so the user can make an informed decision.
|
||||
|
||||
@@ -1,19 +1,6 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::ModelProviderInfo;
|
||||
use crate::client_common::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
use crate::client_common::ResponseStream;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::openai_tools::create_tools_json_for_chat_completions_api;
|
||||
use crate::util::backoff;
|
||||
use bytes::Bytes;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use eventsource_stream::Eventsource;
|
||||
use futures::Stream;
|
||||
use futures::StreamExt;
|
||||
@@ -28,13 +15,25 @@ use tokio::time::timeout;
|
||||
use tracing::debug;
|
||||
use tracing::trace;
|
||||
|
||||
use crate::ModelProviderInfo;
|
||||
use crate::client_common::Prompt;
|
||||
use crate::client_common::ResponseEvent;
|
||||
use crate::client_common::ResponseStream;
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use crate::model_family::ModelFamily;
|
||||
use crate::openai_tools::create_tools_json_for_chat_completions_api;
|
||||
use crate::util::backoff;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
/// Implementation for the classic Chat Completions API.
|
||||
pub(crate) async fn stream_chat_completions(
|
||||
prompt: &Prompt,
|
||||
model_family: &ModelFamily,
|
||||
client: &reqwest::Client,
|
||||
provider: &ModelProviderInfo,
|
||||
otel_event_manager: &OtelEventManager,
|
||||
) -> Result<ResponseStream> {
|
||||
if prompt.output_schema.is_some() {
|
||||
return Err(CodexErr::UnsupportedOperation(
|
||||
@@ -295,13 +294,10 @@ pub(crate) async fn stream_chat_completions(
|
||||
|
||||
let req_builder = provider.create_request_builder(client, &None).await?;
|
||||
|
||||
let res = otel_event_manager
|
||||
.log_request(attempt, || {
|
||||
req_builder
|
||||
.header(reqwest::header::ACCEPT, "text/event-stream")
|
||||
.json(&payload)
|
||||
.send()
|
||||
})
|
||||
let res = req_builder
|
||||
.header(reqwest::header::ACCEPT, "text/event-stream")
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match res {
|
||||
@@ -312,7 +308,6 @@ pub(crate) async fn stream_chat_completions(
|
||||
stream,
|
||||
tx_event,
|
||||
provider.stream_idle_timeout(),
|
||||
otel_event_manager.clone(),
|
||||
));
|
||||
return Ok(ResponseStream { rx_event });
|
||||
}
|
||||
@@ -356,7 +351,6 @@ async fn process_chat_sse<S>(
|
||||
stream: S,
|
||||
tx_event: mpsc::Sender<Result<ResponseEvent>>,
|
||||
idle_timeout: Duration,
|
||||
otel_event_manager: OtelEventManager,
|
||||
) where
|
||||
S: Stream<Item = Result<Bytes>> + Unpin,
|
||||
{
|
||||
@@ -380,10 +374,7 @@ async fn process_chat_sse<S>(
|
||||
let mut reasoning_text = String::new();
|
||||
|
||||
loop {
|
||||
let sse = match otel_event_manager
|
||||
.log_sse_event(|| timeout(idle_timeout, stream.next()))
|
||||
.await
|
||||
{
|
||||
let sse = match timeout(idle_timeout, stream.next()).await {
|
||||
Ok(Some(Ok(ev))) => ev,
|
||||
Ok(Some(Err(e))) => {
|
||||
let _ = tx_event
|
||||
|
||||
@@ -47,7 +47,6 @@ use crate::protocol::RateLimitWindow;
|
||||
use crate::protocol::TokenUsage;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::util::backoff;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
@@ -74,7 +73,6 @@ struct Error {
|
||||
pub struct ModelClient {
|
||||
config: Arc<Config>,
|
||||
auth_manager: Option<Arc<AuthManager>>,
|
||||
otel_event_manager: OtelEventManager,
|
||||
client: reqwest::Client,
|
||||
provider: ModelProviderInfo,
|
||||
conversation_id: ConversationId,
|
||||
@@ -86,7 +84,6 @@ impl ModelClient {
|
||||
pub fn new(
|
||||
config: Arc<Config>,
|
||||
auth_manager: Option<Arc<AuthManager>>,
|
||||
otel_event_manager: OtelEventManager,
|
||||
provider: ModelProviderInfo,
|
||||
effort: Option<ReasoningEffortConfig>,
|
||||
summary: ReasoningSummaryConfig,
|
||||
@@ -97,7 +94,6 @@ impl ModelClient {
|
||||
Self {
|
||||
config,
|
||||
auth_manager,
|
||||
otel_event_manager,
|
||||
client,
|
||||
provider,
|
||||
conversation_id,
|
||||
@@ -131,7 +127,6 @@ impl ModelClient {
|
||||
&self.config.model_family,
|
||||
&self.client,
|
||||
&self.provider,
|
||||
&self.otel_event_manager,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -168,12 +163,7 @@ impl ModelClient {
|
||||
if let Some(path) = &*CODEX_RS_SSE_FIXTURE {
|
||||
// short circuit for tests
|
||||
warn!(path, "Streaming from fixture");
|
||||
return stream_from_fixture(
|
||||
path,
|
||||
self.provider.clone(),
|
||||
self.otel_event_manager.clone(),
|
||||
)
|
||||
.await;
|
||||
return stream_from_fixture(path, self.provider.clone()).await;
|
||||
}
|
||||
|
||||
let auth_manager = self.auth_manager.clone();
|
||||
@@ -243,7 +233,7 @@ impl ModelClient {
|
||||
let max_attempts = self.provider.request_max_retries();
|
||||
for attempt in 0..=max_attempts {
|
||||
match self
|
||||
.attempt_stream_responses(attempt, &payload_json, &auth_manager)
|
||||
.attempt_stream_responses(&payload_json, &auth_manager)
|
||||
.await
|
||||
{
|
||||
Ok(stream) => {
|
||||
@@ -268,7 +258,6 @@ impl ModelClient {
|
||||
/// Single attempt to start a streaming Responses API call.
|
||||
async fn attempt_stream_responses(
|
||||
&self,
|
||||
attempt: u64,
|
||||
payload_json: &Value,
|
||||
auth_manager: &Option<Arc<AuthManager>>,
|
||||
) -> std::result::Result<ResponseStream, StreamAttemptError> {
|
||||
@@ -302,11 +291,7 @@ impl ModelClient {
|
||||
req_builder = req_builder.header("chatgpt-account-id", account_id);
|
||||
}
|
||||
|
||||
let res = self
|
||||
.otel_event_manager
|
||||
.log_request(attempt, || req_builder.send())
|
||||
.await;
|
||||
|
||||
let res = req_builder.send().await;
|
||||
if let Ok(resp) = &res {
|
||||
trace!(
|
||||
"Response status: {}, cf-ray: {}",
|
||||
@@ -337,7 +322,6 @@ impl ModelClient {
|
||||
stream,
|
||||
tx_event,
|
||||
self.provider.stream_idle_timeout(),
|
||||
self.otel_event_manager.clone(),
|
||||
));
|
||||
|
||||
Ok(ResponseStream { rx_event })
|
||||
@@ -415,10 +399,6 @@ impl ModelClient {
|
||||
self.provider.clone()
|
||||
}
|
||||
|
||||
pub fn get_otel_event_manager(&self) -> OtelEventManager {
|
||||
self.otel_event_manager.clone()
|
||||
}
|
||||
|
||||
/// Returns the currently configured model slug.
|
||||
pub fn get_model(&self) -> String {
|
||||
self.config.model.clone()
|
||||
@@ -579,6 +559,10 @@ fn parse_rate_limit_snapshot(headers: &HeaderMap) -> Option<RateLimitSnapshot> {
|
||||
"x-codex-secondary-reset-after-seconds",
|
||||
);
|
||||
|
||||
if primary.is_none() && secondary.is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(RateLimitSnapshot { primary, secondary })
|
||||
}
|
||||
|
||||
@@ -625,7 +609,6 @@ async fn process_sse<S>(
|
||||
stream: S,
|
||||
tx_event: mpsc::Sender<Result<ResponseEvent>>,
|
||||
idle_timeout: Duration,
|
||||
otel_event_manager: OtelEventManager,
|
||||
) where
|
||||
S: Stream<Item = Result<Bytes>> + Unpin,
|
||||
{
|
||||
@@ -637,10 +620,7 @@ async fn process_sse<S>(
|
||||
let mut response_error: Option<CodexErr> = None;
|
||||
|
||||
loop {
|
||||
let sse = match otel_event_manager
|
||||
.log_sse_event(|| timeout(idle_timeout, stream.next()))
|
||||
.await
|
||||
{
|
||||
let sse = match timeout(idle_timeout, stream.next()).await {
|
||||
Ok(Some(Ok(sse))) => sse,
|
||||
Ok(Some(Err(e))) => {
|
||||
debug!("SSE Error: {e:#}");
|
||||
@@ -654,21 +634,6 @@ async fn process_sse<S>(
|
||||
id: response_id,
|
||||
usage,
|
||||
}) => {
|
||||
if let Some(token_usage) = &usage {
|
||||
otel_event_manager.sse_event_completed(
|
||||
token_usage.input_tokens,
|
||||
token_usage.output_tokens,
|
||||
token_usage
|
||||
.input_tokens_details
|
||||
.as_ref()
|
||||
.map(|d| d.cached_tokens),
|
||||
token_usage
|
||||
.output_tokens_details
|
||||
.as_ref()
|
||||
.map(|d| d.reasoning_tokens),
|
||||
token_usage.total_tokens,
|
||||
);
|
||||
}
|
||||
let event = ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage: usage.map(Into::into),
|
||||
@@ -676,13 +641,12 @@ async fn process_sse<S>(
|
||||
let _ = tx_event.send(Ok(event)).await;
|
||||
}
|
||||
None => {
|
||||
let error = response_error.unwrap_or(CodexErr::Stream(
|
||||
"stream closed before response.completed".into(),
|
||||
None,
|
||||
));
|
||||
otel_event_manager.see_event_completed_failed(&error);
|
||||
|
||||
let _ = tx_event.send(Err(error)).await;
|
||||
let _ = tx_event
|
||||
.send(Err(response_error.unwrap_or(CodexErr::Stream(
|
||||
"stream closed before response.completed".into(),
|
||||
None,
|
||||
))))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
return;
|
||||
@@ -786,9 +750,7 @@ async fn process_sse<S>(
|
||||
response_error = Some(CodexErr::Stream(message, delay));
|
||||
}
|
||||
Err(e) => {
|
||||
let error = format!("failed to parse ErrorResponse: {e}");
|
||||
debug!(error);
|
||||
response_error = Some(CodexErr::Stream(error, None))
|
||||
debug!("failed to parse ErrorResponse: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -802,9 +764,7 @@ async fn process_sse<S>(
|
||||
response_completed = Some(r);
|
||||
}
|
||||
Err(e) => {
|
||||
let error = format!("failed to parse ResponseCompleted: {e}");
|
||||
debug!(error);
|
||||
response_error = Some(CodexErr::Stream(error, None));
|
||||
debug!("failed to parse ResponseCompleted: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@@ -851,7 +811,6 @@ async fn process_sse<S>(
|
||||
async fn stream_from_fixture(
|
||||
path: impl AsRef<Path>,
|
||||
provider: ModelProviderInfo,
|
||||
otel_event_manager: OtelEventManager,
|
||||
) -> Result<ResponseStream> {
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent>>(1600);
|
||||
let f = std::fs::File::open(path.as_ref())?;
|
||||
@@ -870,7 +829,6 @@ async fn stream_from_fixture(
|
||||
stream,
|
||||
tx_event,
|
||||
provider.stream_idle_timeout(),
|
||||
otel_event_manager,
|
||||
));
|
||||
Ok(ResponseStream { rx_event })
|
||||
}
|
||||
@@ -926,7 +884,6 @@ mod tests {
|
||||
async fn collect_events(
|
||||
chunks: &[&[u8]],
|
||||
provider: ModelProviderInfo,
|
||||
otel_event_manager: OtelEventManager,
|
||||
) -> Vec<Result<ResponseEvent>> {
|
||||
let mut builder = IoBuilder::new();
|
||||
for chunk in chunks {
|
||||
@@ -936,12 +893,7 @@ mod tests {
|
||||
let reader = builder.build();
|
||||
let stream = ReaderStream::new(reader).map_err(CodexErr::Io);
|
||||
let (tx, mut rx) = mpsc::channel::<Result<ResponseEvent>>(16);
|
||||
tokio::spawn(process_sse(
|
||||
stream,
|
||||
tx,
|
||||
provider.stream_idle_timeout(),
|
||||
otel_event_manager,
|
||||
));
|
||||
tokio::spawn(process_sse(stream, tx, provider.stream_idle_timeout()));
|
||||
|
||||
let mut events = Vec::new();
|
||||
while let Some(ev) = rx.recv().await {
|
||||
@@ -955,7 +907,6 @@ mod tests {
|
||||
async fn run_sse(
|
||||
events: Vec<serde_json::Value>,
|
||||
provider: ModelProviderInfo,
|
||||
otel_event_manager: OtelEventManager,
|
||||
) -> Vec<ResponseEvent> {
|
||||
let mut body = String::new();
|
||||
for e in events {
|
||||
@@ -972,12 +923,7 @@ mod tests {
|
||||
|
||||
let (tx, mut rx) = mpsc::channel::<Result<ResponseEvent>>(8);
|
||||
let stream = ReaderStream::new(std::io::Cursor::new(body)).map_err(CodexErr::Io);
|
||||
tokio::spawn(process_sse(
|
||||
stream,
|
||||
tx,
|
||||
provider.stream_idle_timeout(),
|
||||
otel_event_manager,
|
||||
));
|
||||
tokio::spawn(process_sse(stream, tx, provider.stream_idle_timeout()));
|
||||
|
||||
let mut out = Vec::new();
|
||||
while let Some(ev) = rx.recv().await {
|
||||
@@ -986,18 +932,6 @@ mod tests {
|
||||
out
|
||||
}
|
||||
|
||||
fn otel_event_manager() -> OtelEventManager {
|
||||
OtelEventManager::new(
|
||||
ConversationId::new(),
|
||||
"test",
|
||||
"test",
|
||||
None,
|
||||
Some(AuthMode::ChatGPT),
|
||||
false,
|
||||
"test".to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
// ────────────────────────────
|
||||
// Tests from `implement-test-for-responses-api-sse-parser`
|
||||
// ────────────────────────────
|
||||
@@ -1049,12 +983,9 @@ mod tests {
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
let otel_event_manager = otel_event_manager();
|
||||
|
||||
let events = collect_events(
|
||||
&[sse1.as_bytes(), sse2.as_bytes(), sse3.as_bytes()],
|
||||
provider,
|
||||
otel_event_manager,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -1112,9 +1043,7 @@ mod tests {
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
let otel_event_manager = otel_event_manager();
|
||||
|
||||
let events = collect_events(&[sse1.as_bytes()], provider, otel_event_manager).await;
|
||||
let events = collect_events(&[sse1.as_bytes()], provider).await;
|
||||
|
||||
assert_eq!(events.len(), 2);
|
||||
|
||||
@@ -1148,9 +1077,7 @@ mod tests {
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
let otel_event_manager = otel_event_manager();
|
||||
|
||||
let events = collect_events(&[sse1.as_bytes()], provider, otel_event_manager).await;
|
||||
let events = collect_events(&[sse1.as_bytes()], provider).await;
|
||||
|
||||
assert_eq!(events.len(), 1);
|
||||
|
||||
@@ -1255,9 +1182,7 @@ mod tests {
|
||||
requires_openai_auth: false,
|
||||
};
|
||||
|
||||
let otel_event_manager = otel_event_manager();
|
||||
|
||||
let out = run_sse(evs, provider, otel_event_manager).await;
|
||||
let out = run_sse(evs, provider).await;
|
||||
assert_eq!(out.len(), case.expected_len, "case {}", case.name);
|
||||
assert!(
|
||||
(case.expect_first)(&out[0]),
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Debug;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -12,7 +11,6 @@ use crate::client_common::REVIEW_PROMPT;
|
||||
use crate::event_mapping::map_response_item_to_event_messages;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::review_format::format_review_findings_block;
|
||||
use crate::terminal;
|
||||
use crate::user_notification::UserNotifier;
|
||||
use async_channel::Receiver;
|
||||
use async_channel::Sender;
|
||||
@@ -127,8 +125,6 @@ use crate::unified_exec::UnifiedExecSessionManager;
|
||||
use crate::user_instructions::UserInstructions;
|
||||
use crate::user_notification::UserNotification;
|
||||
use crate::util::backoff;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_otel::otel_event_manager::ToolDecisionSource;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use codex_protocol::custom_prompts::CustomPrompt;
|
||||
@@ -426,35 +422,11 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
let otel_event_manager = OtelEventManager::new(
|
||||
conversation_id,
|
||||
config.model.as_str(),
|
||||
config.model_family.slug.as_str(),
|
||||
auth_manager.auth().and_then(|a| a.get_account_id()),
|
||||
auth_manager.auth().map(|a| a.mode),
|
||||
config.otel.log_user_prompt,
|
||||
terminal::user_agent(),
|
||||
);
|
||||
|
||||
otel_event_manager.conversation_starts(
|
||||
config.model_provider.name.as_str(),
|
||||
config.model_reasoning_effort,
|
||||
config.model_reasoning_summary,
|
||||
config.model_context_window,
|
||||
config.model_max_output_tokens,
|
||||
config.model_auto_compact_token_limit,
|
||||
config.approval_policy,
|
||||
config.sandbox_policy.clone(),
|
||||
config.mcp_servers.keys().map(String::as_str).collect(),
|
||||
config.active_profile.clone(),
|
||||
);
|
||||
|
||||
// Now that the conversation id is final (may have been updated by resume),
|
||||
// construct the model client.
|
||||
let client = ModelClient::new(
|
||||
config.clone(),
|
||||
Some(auth_manager.clone()),
|
||||
otel_event_manager,
|
||||
provider.clone(),
|
||||
model_reasoning_effort,
|
||||
model_reasoning_summary,
|
||||
@@ -1150,15 +1122,9 @@ async fn submission_loop(
|
||||
updated_config.model_context_window = Some(model_info.context_window);
|
||||
}
|
||||
|
||||
let otel_event_manager = prev.client.get_otel_event_manager().with_model(
|
||||
updated_config.model.as_str(),
|
||||
updated_config.model_family.slug.as_str(),
|
||||
);
|
||||
|
||||
let client = ModelClient::new(
|
||||
Arc::new(updated_config),
|
||||
auth_manager,
|
||||
otel_event_manager,
|
||||
provider,
|
||||
effective_effort,
|
||||
effective_summary,
|
||||
@@ -1210,10 +1176,6 @@ async fn submission_loop(
|
||||
}
|
||||
}
|
||||
Op::UserInput { items } => {
|
||||
turn_context
|
||||
.client
|
||||
.get_otel_event_manager()
|
||||
.user_prompt(&items);
|
||||
// attempt to inject input into current task
|
||||
if let Err(items) = sess.inject_input(items).await {
|
||||
// no current task, spawn a new one
|
||||
@@ -1231,10 +1193,6 @@ async fn submission_loop(
|
||||
summary,
|
||||
final_output_json_schema,
|
||||
} => {
|
||||
turn_context
|
||||
.client
|
||||
.get_otel_event_manager()
|
||||
.user_prompt(&items);
|
||||
// attempt to inject input into current task
|
||||
if let Err(items) = sess.inject_input(items).await {
|
||||
// Derive a fresh TurnContext for this turn using the provided overrides.
|
||||
@@ -1253,18 +1211,11 @@ async fn submission_loop(
|
||||
per_turn_config.model_context_window = Some(model_info.context_window);
|
||||
}
|
||||
|
||||
let otel_event_manager =
|
||||
turn_context.client.get_otel_event_manager().with_model(
|
||||
per_turn_config.model.as_str(),
|
||||
per_turn_config.model_family.slug.as_str(),
|
||||
);
|
||||
|
||||
// Build a new client with per‑turn reasoning settings.
|
||||
// Reuse the same provider and session id; auth defaults to env/API key.
|
||||
let client = ModelClient::new(
|
||||
Arc::new(per_turn_config),
|
||||
auth_manager,
|
||||
otel_event_manager,
|
||||
provider,
|
||||
effort,
|
||||
summary,
|
||||
@@ -1521,19 +1472,10 @@ async fn spawn_review_thread(
|
||||
per_turn_config.model_context_window = Some(model_info.context_window);
|
||||
}
|
||||
|
||||
let otel_event_manager = parent_turn_context
|
||||
.client
|
||||
.get_otel_event_manager()
|
||||
.with_model(
|
||||
per_turn_config.model.as_str(),
|
||||
per_turn_config.model_family.slug.as_str(),
|
||||
);
|
||||
|
||||
let per_turn_config = Arc::new(per_turn_config);
|
||||
let client = ModelClient::new(
|
||||
per_turn_config.clone(),
|
||||
auth_manager,
|
||||
otel_event_manager,
|
||||
provider,
|
||||
per_turn_config.model_reasoning_effort,
|
||||
per_turn_config.model_reasoning_summary,
|
||||
@@ -2198,21 +2140,16 @@ async fn handle_response_item(
|
||||
.await;
|
||||
Some(resp)
|
||||
} else {
|
||||
let result = turn_context
|
||||
.client
|
||||
.get_otel_event_manager()
|
||||
.log_tool_result(name.as_str(), call_id.as_str(), arguments.as_str(), || {
|
||||
handle_function_call(
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
name.to_owned(),
|
||||
arguments.to_owned(),
|
||||
call_id.clone(),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
let result = handle_function_call(
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
name,
|
||||
arguments,
|
||||
call_id.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let output = match result {
|
||||
Ok(content) => FunctionCallOutputPayload {
|
||||
@@ -2233,7 +2170,6 @@ async fn handle_response_item(
|
||||
status: _,
|
||||
action,
|
||||
} => {
|
||||
let name = "local_shell";
|
||||
let LocalShellAction::Exec(action) = action;
|
||||
tracing::info!("LocalShellCall: {action:?}");
|
||||
let params = ShellToolCallParams {
|
||||
@@ -2247,18 +2183,11 @@ async fn handle_response_item(
|
||||
(Some(call_id), _) => call_id,
|
||||
(None, Some(id)) => id,
|
||||
(None, None) => {
|
||||
let error_message = "LocalShellCall without call_id or id";
|
||||
|
||||
turn_context
|
||||
.client
|
||||
.get_otel_event_manager()
|
||||
.log_tool_failed(name, error_message);
|
||||
|
||||
error!(error_message);
|
||||
error!("LocalShellCall without call_id or id");
|
||||
return Ok(Some(ResponseInputItem::FunctionCallOutput {
|
||||
call_id: "".to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: error_message.to_string(),
|
||||
content: "LocalShellCall without call_id or id".to_string(),
|
||||
success: None,
|
||||
},
|
||||
}));
|
||||
@@ -2267,26 +2196,15 @@ async fn handle_response_item(
|
||||
|
||||
let exec_params = to_exec_params(params, turn_context);
|
||||
{
|
||||
let result = turn_context
|
||||
.client
|
||||
.get_otel_event_manager()
|
||||
.log_tool_result(
|
||||
name,
|
||||
effective_call_id.as_str(),
|
||||
exec_params.command.join(" ").as_str(),
|
||||
|| {
|
||||
handle_container_exec_with_params(
|
||||
name,
|
||||
exec_params,
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
effective_call_id.clone(),
|
||||
)
|
||||
},
|
||||
)
|
||||
.await;
|
||||
let result = handle_container_exec_with_params(
|
||||
exec_params,
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
effective_call_id.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let output = match result {
|
||||
Ok(content) => FunctionCallOutputPayload {
|
||||
@@ -2311,21 +2229,16 @@ async fn handle_response_item(
|
||||
input,
|
||||
status: _,
|
||||
} => {
|
||||
let result = turn_context
|
||||
.client
|
||||
.get_otel_event_manager()
|
||||
.log_tool_result(name.as_str(), call_id.as_str(), input.as_str(), || {
|
||||
handle_custom_tool_call(
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
name.to_owned(),
|
||||
input.to_owned(),
|
||||
call_id.clone(),
|
||||
)
|
||||
})
|
||||
.await;
|
||||
let result = handle_custom_tool_call(
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
name,
|
||||
input,
|
||||
call_id.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let output = match result {
|
||||
Ok(content) => content,
|
||||
@@ -2431,7 +2344,6 @@ async fn handle_function_call(
|
||||
"container.exec" | "shell" => {
|
||||
let params = parse_container_exec_arguments(arguments, turn_context, &call_id)?;
|
||||
handle_container_exec_with_params(
|
||||
name.as_str(),
|
||||
params,
|
||||
sess,
|
||||
turn_context,
|
||||
@@ -2495,7 +2407,6 @@ async fn handle_function_call(
|
||||
justification: None,
|
||||
};
|
||||
handle_container_exec_with_params(
|
||||
name.as_str(),
|
||||
exec_params,
|
||||
sess,
|
||||
turn_context,
|
||||
@@ -2568,7 +2479,6 @@ async fn handle_custom_tool_call(
|
||||
};
|
||||
|
||||
handle_container_exec_with_params(
|
||||
name.as_str(),
|
||||
exec_params,
|
||||
sess,
|
||||
turn_context,
|
||||
@@ -2638,7 +2548,6 @@ fn maybe_translate_shell_command(
|
||||
}
|
||||
|
||||
async fn handle_container_exec_with_params(
|
||||
tool_name: &str,
|
||||
params: ExecParams,
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
@@ -2646,8 +2555,6 @@ async fn handle_container_exec_with_params(
|
||||
sub_id: String,
|
||||
call_id: String,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
let otel_event_manager = turn_context.client.get_otel_event_manager();
|
||||
|
||||
if params.with_escalated_permissions.unwrap_or(false)
|
||||
&& !matches!(turn_context.approval_policy, AskForApproval::OnRequest)
|
||||
{
|
||||
@@ -2711,7 +2618,6 @@ async fn handle_container_exec_with_params(
|
||||
let safety = if *user_explicitly_approved_this_action {
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: true,
|
||||
}
|
||||
} else {
|
||||
assess_safety_for_untrusted_command(
|
||||
@@ -2743,23 +2649,7 @@ async fn handle_container_exec_with_params(
|
||||
};
|
||||
|
||||
let sandbox_type = match safety {
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved,
|
||||
} => {
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
ReviewDecision::Approved,
|
||||
if user_explicitly_approved {
|
||||
ToolDecisionSource::User
|
||||
} else {
|
||||
ToolDecisionSource::Config
|
||||
},
|
||||
);
|
||||
|
||||
sandbox_type
|
||||
}
|
||||
SafetyCheck::AutoApprove { sandbox_type } => sandbox_type,
|
||||
SafetyCheck::AskUser => {
|
||||
let decision = sess
|
||||
.request_command_approval(
|
||||
@@ -2771,45 +2661,15 @@ async fn handle_container_exec_with_params(
|
||||
)
|
||||
.await;
|
||||
match decision {
|
||||
ReviewDecision::Approved => {
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
ReviewDecision::Approved,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
}
|
||||
ReviewDecision::Approved => (),
|
||||
ReviewDecision::ApprovedForSession => {
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
ReviewDecision::ApprovedForSession,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
sess.add_approved_command(params.command.clone()).await;
|
||||
}
|
||||
ReviewDecision::Denied => {
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
ReviewDecision::Denied,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"exec command rejected by user".to_string(),
|
||||
));
|
||||
}
|
||||
ReviewDecision::Abort => {
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
ReviewDecision::Abort,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"exec command aborted by user".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
// No sandboxing is applied because the user has given
|
||||
// explicit approval. Often, we end up in this case because
|
||||
@@ -2818,12 +2678,6 @@ async fn handle_container_exec_with_params(
|
||||
SandboxType::None
|
||||
}
|
||||
SafetyCheck::Reject { reason } => {
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
ReviewDecision::Denied,
|
||||
ToolDecisionSource::Config,
|
||||
);
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"exec command rejected: {reason:?}"
|
||||
)));
|
||||
@@ -2882,7 +2736,6 @@ async fn handle_container_exec_with_params(
|
||||
}
|
||||
Err(CodexErr::Sandbox(error)) => {
|
||||
handle_sandbox_error(
|
||||
tool_name,
|
||||
turn_diff_tracker,
|
||||
params,
|
||||
exec_command_context,
|
||||
@@ -2890,7 +2743,6 @@ async fn handle_container_exec_with_params(
|
||||
sandbox_type,
|
||||
sess,
|
||||
turn_context,
|
||||
&otel_event_manager,
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -2900,9 +2752,7 @@ async fn handle_container_exec_with_params(
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn handle_sandbox_error(
|
||||
tool_name: &str,
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
params: ExecParams,
|
||||
exec_command_context: ExecCommandContext,
|
||||
@@ -2910,7 +2760,6 @@ async fn handle_sandbox_error(
|
||||
sandbox_type: SandboxType,
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
otel_event_manager: &OtelEventManager,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
let call_id = exec_command_context.call_id.clone();
|
||||
let sub_id = exec_command_context.sub_id.clone();
|
||||
@@ -2965,13 +2814,6 @@ async fn handle_sandbox_error(
|
||||
sess.notify_background_event(&sub_id, "retrying command without sandbox")
|
||||
.await;
|
||||
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
decision,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
|
||||
// This is an escalated retry; the policy will not be
|
||||
// examined and the sandbox has been set to `None`.
|
||||
let retry_output_result = sess
|
||||
@@ -3012,14 +2854,7 @@ async fn handle_sandbox_error(
|
||||
))),
|
||||
}
|
||||
}
|
||||
decision @ (ReviewDecision::Denied | ReviewDecision::Abort) => {
|
||||
otel_event_manager.tool_decision(
|
||||
tool_name,
|
||||
call_id.as_str(),
|
||||
decision,
|
||||
ToolDecisionSource::User,
|
||||
);
|
||||
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
// Fall through to original failure handling.
|
||||
Err(FunctionCallError::RespondToModel(
|
||||
"exec command rejected by user".to_string(),
|
||||
@@ -3294,17 +3129,13 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::config::ConfigOverrides;
|
||||
use crate::config::ConfigToml;
|
||||
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::InitialHistory;
|
||||
use crate::protocol::ResumedHistory;
|
||||
use crate::state::TaskKind;
|
||||
use crate::tasks::SessionTask;
|
||||
use crate::tasks::SessionTaskContext;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
use mcp_types::ContentBlock;
|
||||
use mcp_types::TextContent;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -3539,18 +3370,6 @@ mod tests {
|
||||
})
|
||||
}
|
||||
|
||||
fn otel_event_manager(conversation_id: ConversationId, config: &Config) -> OtelEventManager {
|
||||
OtelEventManager::new(
|
||||
conversation_id,
|
||||
config.model.as_str(),
|
||||
config.model_family.slug.as_str(),
|
||||
None,
|
||||
Some(AuthMode::ChatGPT),
|
||||
false,
|
||||
"test".to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn make_session_and_context() -> (Session, TurnContext) {
|
||||
let (tx_event, _rx_event) = async_channel::unbounded();
|
||||
let codex_home = tempfile::tempdir().expect("create temp dir");
|
||||
@@ -3562,11 +3381,9 @@ mod tests {
|
||||
.expect("load default test config");
|
||||
let config = Arc::new(config);
|
||||
let conversation_id = ConversationId::default();
|
||||
let otel_event_manager = otel_event_manager(conversation_id, config.as_ref());
|
||||
let client = ModelClient::new(
|
||||
config.clone(),
|
||||
None,
|
||||
otel_event_manager,
|
||||
config.model_provider.clone(),
|
||||
config.model_reasoning_effort,
|
||||
config.model_reasoning_summary,
|
||||
@@ -3631,11 +3448,9 @@ mod tests {
|
||||
.expect("load default test config");
|
||||
let config = Arc::new(config);
|
||||
let conversation_id = ConversationId::default();
|
||||
let otel_event_manager = otel_event_manager(conversation_id, config.as_ref());
|
||||
let client = ModelClient::new(
|
||||
config.clone(),
|
||||
None,
|
||||
otel_event_manager,
|
||||
config.model_provider.clone(),
|
||||
config.model_reasoning_effort,
|
||||
config.model_reasoning_summary,
|
||||
@@ -3926,12 +3741,10 @@ mod tests {
|
||||
|
||||
let mut turn_diff_tracker = TurnDiffTracker::new();
|
||||
|
||||
let tool_name = "shell";
|
||||
let sub_id = "test-sub".to_string();
|
||||
let call_id = "test-call".to_string();
|
||||
|
||||
let resp = handle_container_exec_with_params(
|
||||
tool_name,
|
||||
params,
|
||||
&session,
|
||||
&turn_context,
|
||||
@@ -3957,7 +3770,6 @@ mod tests {
|
||||
turn_context.sandbox_policy = SandboxPolicy::DangerFullAccess;
|
||||
|
||||
let resp2 = handle_container_exec_with_params(
|
||||
tool_name,
|
||||
params2,
|
||||
&session,
|
||||
&turn_context,
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
use crate::config_profile::ConfigProfile;
|
||||
use crate::config_types::DEFAULT_OTEL_ENVIRONMENT;
|
||||
use crate::config_types::History;
|
||||
use crate::config_types::McpServerConfig;
|
||||
use crate::config_types::McpServerTransportConfig;
|
||||
use crate::config_types::Notifications;
|
||||
use crate::config_types::OtelConfig;
|
||||
use crate::config_types::OtelConfigToml;
|
||||
use crate::config_types::OtelExporterKind;
|
||||
use crate::config_types::ReasoningSummaryFormat;
|
||||
use crate::config_types::SandboxWorkspaceWrite;
|
||||
use crate::config_types::ShellEnvironmentPolicy;
|
||||
@@ -203,9 +198,6 @@ pub struct Config {
|
||||
/// All characters are inserted as they are received, and no buffering
|
||||
/// or placeholder replacement will occur for fast keypress bursts.
|
||||
pub disable_paste_burst: bool,
|
||||
|
||||
/// OTEL configuration (exporter type, endpoint, headers, etc.).
|
||||
pub otel: crate::config_types::OtelConfig,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -322,37 +314,27 @@ pub fn write_global_mcp_servers(
|
||||
for (name, config) in servers {
|
||||
let mut entry = TomlTable::new();
|
||||
entry.set_implicit(false);
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
entry["command"] = toml_edit::value(command.clone());
|
||||
entry["command"] = toml_edit::value(config.command.clone());
|
||||
|
||||
if !args.is_empty() {
|
||||
let mut args_array = TomlArray::new();
|
||||
for arg in args {
|
||||
args_array.push(arg.clone());
|
||||
}
|
||||
entry["args"] = TomlItem::Value(args_array.into());
|
||||
}
|
||||
if !config.args.is_empty() {
|
||||
let mut args = TomlArray::new();
|
||||
for arg in &config.args {
|
||||
args.push(arg.clone());
|
||||
}
|
||||
entry["args"] = TomlItem::Value(args.into());
|
||||
}
|
||||
|
||||
if let Some(env) = env
|
||||
&& !env.is_empty()
|
||||
{
|
||||
let mut env_table = TomlTable::new();
|
||||
env_table.set_implicit(false);
|
||||
let mut pairs: Vec<_> = env.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
for (key, value) in pairs {
|
||||
env_table.insert(key, toml_edit::value(value.clone()));
|
||||
}
|
||||
entry["env"] = TomlItem::Table(env_table);
|
||||
}
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
entry["url"] = toml_edit::value(url.clone());
|
||||
if let Some(token) = bearer_token {
|
||||
entry["bearer_token"] = toml_edit::value(token.clone());
|
||||
}
|
||||
if let Some(env) = &config.env
|
||||
&& !env.is_empty()
|
||||
{
|
||||
let mut env_table = TomlTable::new();
|
||||
env_table.set_implicit(false);
|
||||
let mut pairs: Vec<_> = env.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
for (key, value) in pairs {
|
||||
env_table.insert(key, toml_edit::value(value.clone()));
|
||||
}
|
||||
entry["env"] = TomlItem::Table(env_table);
|
||||
}
|
||||
|
||||
if let Some(timeout) = config.startup_timeout_sec {
|
||||
@@ -726,9 +708,6 @@ pub struct ConfigToml {
|
||||
/// All characters are inserted as they are received, and no buffering
|
||||
/// or placeholder replacement will occur for fast keypress bursts.
|
||||
pub disable_paste_burst: Option<bool>,
|
||||
|
||||
/// OTEL configuration.
|
||||
pub otel: Option<crate::config_types::OtelConfigToml>,
|
||||
}
|
||||
|
||||
impl From<ConfigToml> for UserSavedConfig {
|
||||
@@ -1078,19 +1057,6 @@ impl Config {
|
||||
.as_ref()
|
||||
.map(|t| t.notifications.clone())
|
||||
.unwrap_or_default(),
|
||||
otel: {
|
||||
let t: OtelConfigToml = cfg.otel.unwrap_or_default();
|
||||
let log_user_prompt = t.log_user_prompt.unwrap_or(false);
|
||||
let environment = t
|
||||
.environment
|
||||
.unwrap_or(DEFAULT_OTEL_ENVIRONMENT.to_string());
|
||||
let exporter = t.exporter.unwrap_or(OtelExporterKind::None);
|
||||
OtelConfig {
|
||||
log_user_prompt,
|
||||
environment,
|
||||
exporter,
|
||||
}
|
||||
},
|
||||
};
|
||||
Ok(config)
|
||||
}
|
||||
@@ -1328,11 +1294,9 @@ exclude_slash_tmp = true
|
||||
servers.insert(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string()],
|
||||
env: None,
|
||||
},
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string()],
|
||||
env: None,
|
||||
startup_timeout_sec: Some(Duration::from_secs(3)),
|
||||
tool_timeout_sec: Some(Duration::from_secs(5)),
|
||||
},
|
||||
@@ -1343,14 +1307,8 @@ exclude_slash_tmp = true
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
assert_eq!(loaded.len(), 1);
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "echo");
|
||||
assert_eq!(args, &vec!["hello".to_string()]);
|
||||
assert!(env.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
assert_eq!(docs.command, "echo");
|
||||
assert_eq!(docs.args, vec!["hello".to_string()]);
|
||||
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_secs(3)));
|
||||
assert_eq!(docs.tool_timeout_sec, Some(Duration::from_secs(5)));
|
||||
|
||||
@@ -1384,134 +1342,6 @@ startup_timeout_ms = 2500
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_global_mcp_servers_serializes_env_sorted() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let servers = BTreeMap::from([(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: "docs-server".to_string(),
|
||||
args: vec!["--verbose".to_string()],
|
||||
env: Some(HashMap::from([
|
||||
("ZIG_VAR".to_string(), "3".to_string()),
|
||||
("ALPHA_VAR".to_string(), "1".to_string()),
|
||||
])),
|
||||
},
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
let serialized = std::fs::read_to_string(&config_path)?;
|
||||
assert_eq!(
|
||||
serialized,
|
||||
r#"[mcp_servers.docs]
|
||||
command = "docs-server"
|
||||
args = ["--verbose"]
|
||||
|
||||
[mcp_servers.docs.env]
|
||||
ALPHA_VAR = "1"
|
||||
ZIG_VAR = "3"
|
||||
"#
|
||||
);
|
||||
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "docs-server");
|
||||
assert_eq!(args, &vec!["--verbose".to_string()]);
|
||||
let env = env
|
||||
.as_ref()
|
||||
.expect("env should be preserved for stdio transport");
|
||||
assert_eq!(env.get("ALPHA_VAR"), Some(&"1".to_string()));
|
||||
assert_eq!(env.get("ZIG_VAR"), Some(&"3".to_string()));
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_global_mcp_servers_serializes_streamable_http() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut servers = BTreeMap::from([(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: Some("secret-token".to_string()),
|
||||
},
|
||||
startup_timeout_sec: Some(Duration::from_secs(2)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
let serialized = std::fs::read_to_string(&config_path)?;
|
||||
assert_eq!(
|
||||
serialized,
|
||||
r#"[mcp_servers.docs]
|
||||
url = "https://example.com/mcp"
|
||||
bearer_token = "secret-token"
|
||||
startup_timeout_sec = 2.0
|
||||
"#
|
||||
);
|
||||
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert_eq!(bearer_token.as_deref(), Some("secret-token"));
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_secs(2)));
|
||||
|
||||
servers.insert(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: None,
|
||||
},
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
);
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let serialized = std::fs::read_to_string(&config_path)?;
|
||||
assert_eq!(
|
||||
serialized,
|
||||
r#"[mcp_servers.docs]
|
||||
url = "https://example.com/mcp"
|
||||
"#
|
||||
);
|
||||
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert!(bearer_token.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_model_selection_updates_defaults() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -1832,7 +1662,6 @@ model_verbosity = "high"
|
||||
active_profile: Some("o3".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
otel: OtelConfig::default(),
|
||||
},
|
||||
o3_profile_config
|
||||
);
|
||||
@@ -1892,7 +1721,6 @@ model_verbosity = "high"
|
||||
active_profile: Some("gpt3".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
assert_eq!(expected_gpt3_profile_config, gpt3_profile_config);
|
||||
@@ -1967,7 +1795,6 @@ model_verbosity = "high"
|
||||
active_profile: Some("zdr".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
assert_eq!(expected_zdr_profile_config, zdr_profile_config);
|
||||
@@ -2028,7 +1855,6 @@ model_verbosity = "high"
|
||||
active_profile: Some("gpt5".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
assert_eq!(expected_gpt5_profile_config, gpt5_profile_config);
|
||||
|
||||
@@ -3,22 +3,25 @@
|
||||
// Note this file should generally be restricted to simple struct/enum
|
||||
// definitions that do not contain business logic.
|
||||
|
||||
use serde::Deserializer;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
use wildmatch::WildMatchPattern;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
use serde::Serialize;
|
||||
use serde::de::Error as SerdeError;
|
||||
|
||||
pub const DEFAULT_OTEL_ENVIRONMENT: &str = "dev";
|
||||
|
||||
#[derive(Serialize, Debug, Clone, PartialEq)]
|
||||
pub struct McpServerConfig {
|
||||
#[serde(flatten)]
|
||||
pub transport: McpServerTransportConfig,
|
||||
pub command: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub args: Vec<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
|
||||
/// Startup timeout in seconds for initializing MCP server & initially listing tools.
|
||||
#[serde(
|
||||
@@ -40,15 +43,11 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
struct RawMcpServerConfig {
|
||||
command: Option<String>,
|
||||
command: String,
|
||||
#[serde(default)]
|
||||
args: Option<Vec<String>>,
|
||||
args: Vec<String>,
|
||||
#[serde(default)]
|
||||
env: Option<HashMap<String, String>>,
|
||||
|
||||
url: Option<String>,
|
||||
bearer_token: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
startup_timeout_sec: Option<f64>,
|
||||
#[serde(default)]
|
||||
@@ -68,81 +67,16 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
(None, None) => None,
|
||||
};
|
||||
|
||||
fn throw_if_set<E, T>(transport: &str, field: &str, value: Option<&T>) -> Result<(), E>
|
||||
where
|
||||
E: SerdeError,
|
||||
{
|
||||
if value.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(E::custom(format!(
|
||||
"{field} is not supported for {transport}",
|
||||
)))
|
||||
}
|
||||
|
||||
let transport = match raw {
|
||||
RawMcpServerConfig {
|
||||
command: Some(command),
|
||||
args,
|
||||
env,
|
||||
url,
|
||||
bearer_token,
|
||||
..
|
||||
} => {
|
||||
throw_if_set("stdio", "url", url.as_ref())?;
|
||||
throw_if_set("stdio", "bearer_token", bearer_token.as_ref())?;
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args: args.unwrap_or_default(),
|
||||
env,
|
||||
}
|
||||
}
|
||||
RawMcpServerConfig {
|
||||
url: Some(url),
|
||||
bearer_token,
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
..
|
||||
} => {
|
||||
throw_if_set("streamable_http", "command", command.as_ref())?;
|
||||
throw_if_set("streamable_http", "args", args.as_ref())?;
|
||||
throw_if_set("streamable_http", "env", env.as_ref())?;
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token }
|
||||
}
|
||||
_ => return Err(SerdeError::custom("invalid transport")),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
transport,
|
||||
command: raw.command,
|
||||
args: raw.args,
|
||||
env: raw.env,
|
||||
startup_timeout_sec,
|
||||
tool_timeout_sec: raw.tool_timeout_sec,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
||||
#[serde(untagged, deny_unknown_fields, rename_all = "snake_case")]
|
||||
pub enum McpServerTransportConfig {
|
||||
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#stdio
|
||||
Stdio {
|
||||
command: String,
|
||||
#[serde(default)]
|
||||
args: Vec<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
env: Option<HashMap<String, String>>,
|
||||
},
|
||||
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http
|
||||
StreamableHttp {
|
||||
url: String,
|
||||
/// A plain text bearer token to use for authentication.
|
||||
/// This bearer token will be included in the HTTP request header as an `Authorization: Bearer <token>` header.
|
||||
/// This should be used with caution because it lives on disk in clear text.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
bearer_token: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
mod option_duration_secs {
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
@@ -221,64 +155,6 @@ pub enum HistoryPersistence {
|
||||
None,
|
||||
}
|
||||
|
||||
// ===== OTEL configuration =====
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum OtelHttpProtocol {
|
||||
/// Binary payload
|
||||
Binary,
|
||||
/// JSON payload
|
||||
Json,
|
||||
}
|
||||
|
||||
/// Which OTEL exporter to use.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum OtelExporterKind {
|
||||
None,
|
||||
OtlpHttp {
|
||||
endpoint: String,
|
||||
headers: HashMap<String, String>,
|
||||
protocol: OtelHttpProtocol,
|
||||
},
|
||||
OtlpGrpc {
|
||||
endpoint: String,
|
||||
headers: HashMap<String, String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// OTEL settings loaded from config.toml. Fields are optional so we can apply defaults.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Default)]
|
||||
pub struct OtelConfigToml {
|
||||
/// Log user prompt in traces
|
||||
pub log_user_prompt: Option<bool>,
|
||||
|
||||
/// Mark traces with environment (dev, staging, prod, test). Defaults to dev.
|
||||
pub environment: Option<String>,
|
||||
|
||||
/// Exporter to use. Defaults to `otlp-file`.
|
||||
pub exporter: Option<OtelExporterKind>,
|
||||
}
|
||||
|
||||
/// Effective OTEL settings after defaults are applied.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct OtelConfig {
|
||||
pub log_user_prompt: bool,
|
||||
pub environment: String,
|
||||
pub exporter: OtelExporterKind,
|
||||
}
|
||||
|
||||
impl Default for OtelConfig {
|
||||
fn default() -> Self {
|
||||
OtelConfig {
|
||||
log_user_prompt: false,
|
||||
environment: DEFAULT_OTEL_ENVIRONMENT.to_owned(),
|
||||
exporter: OtelExporterKind::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum Notifications {
|
||||
@@ -427,139 +303,3 @@ pub enum ReasoningSummaryFormat {
|
||||
None,
|
||||
Experimental,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec![],
|
||||
env: None
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config_with_args() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
args = ["hello", "world"]
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string(), "world".to_string()],
|
||||
env: None
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config_with_arg_with_args_and_env() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
args = ["hello", "world"]
|
||||
env = { "FOO" = "BAR" }
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string(), "world".to_string()],
|
||||
env: Some(HashMap::from([("FOO".to_string(), "BAR".to_string())]))
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_streamable_http_server_config() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
url = "https://example.com/mcp"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize http config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: None
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_streamable_http_server_config_with_bearer_token() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
url = "https://example.com/mcp"
|
||||
bearer_token = "secret"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize http config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: Some("secret".to_string())
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_command_and_url() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
command = "echo"
|
||||
url = "https://example.com"
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject command+url");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_env_for_http_transport() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
url = "https://example.com"
|
||||
env = { "FOO" = "BAR" }
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject env for http transport");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_bearer_token_for_stdio_transport() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
command = "echo"
|
||||
bearer_token = "secret"
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject bearer token for stdio transport");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,88 +63,16 @@ pub async fn discover_prompts_in_excluding(
|
||||
Ok(s) => s,
|
||||
Err(_) => continue,
|
||||
};
|
||||
let (description, argument_hint, body) = parse_frontmatter(&content);
|
||||
out.push(CustomPrompt {
|
||||
name,
|
||||
path,
|
||||
content: body,
|
||||
description,
|
||||
argument_hint,
|
||||
content,
|
||||
});
|
||||
}
|
||||
out.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
out
|
||||
}
|
||||
|
||||
/// Parse optional YAML-like frontmatter at the beginning of `content`.
|
||||
/// Supported keys:
|
||||
/// - `description`: short description shown in the slash popup
|
||||
/// - `argument-hint` or `argument_hint`: brief hint string shown after the description
|
||||
/// Returns (description, argument_hint, body_without_frontmatter).
|
||||
fn parse_frontmatter(content: &str) -> (Option<String>, Option<String>, String) {
|
||||
let mut segments = content.split_inclusive('\n');
|
||||
let Some(first_segment) = segments.next() else {
|
||||
return (None, None, String::new());
|
||||
};
|
||||
let first_line = first_segment.trim_end_matches(['\r', '\n']);
|
||||
if first_line.trim() != "---" {
|
||||
return (None, None, content.to_string());
|
||||
}
|
||||
|
||||
let mut desc: Option<String> = None;
|
||||
let mut hint: Option<String> = None;
|
||||
let mut frontmatter_closed = false;
|
||||
let mut consumed = first_segment.len();
|
||||
|
||||
for segment in segments {
|
||||
let line = segment.trim_end_matches(['\r', '\n']);
|
||||
let trimmed = line.trim();
|
||||
|
||||
if trimmed == "---" {
|
||||
frontmatter_closed = true;
|
||||
consumed += segment.len();
|
||||
break;
|
||||
}
|
||||
|
||||
if trimmed.is_empty() || trimmed.starts_with('#') {
|
||||
consumed += segment.len();
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some((k, v)) = trimmed.split_once(':') {
|
||||
let key = k.trim().to_ascii_lowercase();
|
||||
let mut val = v.trim().to_string();
|
||||
if val.len() >= 2 {
|
||||
let bytes = val.as_bytes();
|
||||
let first = bytes[0];
|
||||
let last = bytes[bytes.len() - 1];
|
||||
if (first == b'\"' && last == b'\"') || (first == b'\'' && last == b'\'') {
|
||||
val = val[1..val.len().saturating_sub(1)].to_string();
|
||||
}
|
||||
}
|
||||
match key.as_str() {
|
||||
"description" => desc = Some(val),
|
||||
"argument-hint" | "argument_hint" => hint = Some(val),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
consumed += segment.len();
|
||||
}
|
||||
|
||||
if !frontmatter_closed {
|
||||
// Unterminated frontmatter: treat input as-is.
|
||||
return (None, None, content.to_string());
|
||||
}
|
||||
|
||||
let body = if consumed >= content.len() {
|
||||
String::new()
|
||||
} else {
|
||||
content[consumed..].to_string()
|
||||
};
|
||||
(desc, hint, body)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -196,31 +124,4 @@ mod tests {
|
||||
let names: Vec<String> = found.into_iter().map(|e| e.name).collect();
|
||||
assert_eq!(names, vec!["good"]);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn parses_frontmatter_and_strips_from_body() {
|
||||
let tmp = tempdir().expect("create TempDir");
|
||||
let dir = tmp.path();
|
||||
let file = dir.join("withmeta.md");
|
||||
let text = "---\nname: ignored\ndescription: \"Quick review command\"\nargument-hint: \"[file] [priority]\"\n---\nActual body with $1 and $ARGUMENTS";
|
||||
fs::write(&file, text).unwrap();
|
||||
|
||||
let found = discover_prompts_in(dir).await;
|
||||
assert_eq!(found.len(), 1);
|
||||
let p = &found[0];
|
||||
assert_eq!(p.name, "withmeta");
|
||||
assert_eq!(p.description.as_deref(), Some("Quick review command"));
|
||||
assert_eq!(p.argument_hint.as_deref(), Some("[file] [priority]"));
|
||||
// Body should not include the frontmatter delimiters.
|
||||
assert_eq!(p.content, "Actual body with $1 and $ARGUMENTS");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_frontmatter_preserves_body_newlines() {
|
||||
let content = "---\r\ndescription: \"Line endings\"\r\nargument_hint: \"[arg]\"\r\n---\r\nFirst line\r\nSecond line\r\n";
|
||||
let (desc, hint, body) = parse_frontmatter(content);
|
||||
assert_eq!(desc.as_deref(), Some("Line endings"));
|
||||
assert_eq!(hint.as_deref(), Some("[arg]"));
|
||||
assert_eq!(body, "First line\r\nSecond line\r\n");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use reqwest::header::HeaderValue;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
/// Set this to add a suffix to the User-Agent string.
|
||||
///
|
||||
@@ -27,15 +26,8 @@ pub struct Originator {
|
||||
pub value: String,
|
||||
pub header_value: HeaderValue,
|
||||
}
|
||||
static ORIGINATOR: OnceLock<Originator> = OnceLock::new();
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetOriginatorError {
|
||||
InvalidHeaderValue,
|
||||
AlreadyInitialized,
|
||||
}
|
||||
|
||||
fn init_originator_from_env() -> Originator {
|
||||
pub static ORIGINATOR: LazyLock<Originator> = LazyLock::new(|| {
|
||||
let default = "codex_cli_rs";
|
||||
let value = std::env::var(CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR)
|
||||
.unwrap_or_else(|_| default.to_string());
|
||||
@@ -53,34 +45,14 @@ fn init_originator_from_env() -> Originator {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_originator(value: String) -> Result<Originator, SetOriginatorError> {
|
||||
let header_value =
|
||||
HeaderValue::from_str(&value).map_err(|_| SetOriginatorError::InvalidHeaderValue)?;
|
||||
Ok(Originator {
|
||||
value,
|
||||
header_value,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_default_originator(value: &str) -> Result<(), SetOriginatorError> {
|
||||
let originator = build_originator(value.to_string())?;
|
||||
ORIGINATOR
|
||||
.set(originator)
|
||||
.map_err(|_| SetOriginatorError::AlreadyInitialized)
|
||||
}
|
||||
|
||||
pub fn originator() -> &'static Originator {
|
||||
ORIGINATOR.get_or_init(init_originator_from_env)
|
||||
}
|
||||
});
|
||||
|
||||
pub fn get_codex_user_agent() -> String {
|
||||
let build_version = env!("CARGO_PKG_VERSION");
|
||||
let os_info = os_info::get();
|
||||
let prefix = format!(
|
||||
"{}/{build_version} ({} {}; {}) {}",
|
||||
originator().value.as_str(),
|
||||
ORIGINATOR.value.as_str(),
|
||||
os_info.os_type(),
|
||||
os_info.version(),
|
||||
os_info.architecture().unwrap_or("unknown"),
|
||||
@@ -128,7 +100,7 @@ fn sanitize_user_agent(candidate: String, fallback: &str) -> String {
|
||||
tracing::warn!(
|
||||
"Falling back to default Codex originator because base user agent string is invalid"
|
||||
);
|
||||
originator().value.clone()
|
||||
ORIGINATOR.value.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,7 +109,7 @@ pub fn create_client() -> reqwest::Client {
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("originator", originator().header_value.clone());
|
||||
headers.insert("originator", ORIGINATOR.header_value.clone());
|
||||
let ua = get_codex_user_agent();
|
||||
|
||||
let mut builder = reqwest::Client::builder()
|
||||
|
||||
89
codex-rs/core/src/internal_storage.rs
Normal file
89
codex-rs/core/src/internal_storage.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use anyhow::Context;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub(crate) const INTERNAL_STORAGE_FILE: &str = "internal_storage.json";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct InternalStorage {
|
||||
#[serde(skip)]
|
||||
storage_path: PathBuf,
|
||||
#[serde(default = "default_gpt_5_codex_model_prompt_seen")]
|
||||
pub gpt_5_codex_model_prompt_seen: bool,
|
||||
}
|
||||
|
||||
const fn default_gpt_5_codex_model_prompt_seen() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
impl Default for InternalStorage {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
storage_path: PathBuf::new(),
|
||||
gpt_5_codex_model_prompt_seen: default_gpt_5_codex_model_prompt_seen(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(jif) generalise all the file writers and build proper async channel inserters.
|
||||
impl InternalStorage {
|
||||
pub fn load(codex_home: &Path) -> Self {
|
||||
let storage_path = codex_home.join(INTERNAL_STORAGE_FILE);
|
||||
|
||||
match std::fs::read_to_string(&storage_path) {
|
||||
Ok(serialized) => match serde_json::from_str::<Self>(&serialized) {
|
||||
Ok(mut storage) => {
|
||||
storage.storage_path = storage_path;
|
||||
storage
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to parse internal storage: {error:?}");
|
||||
Self::empty(storage_path)
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
if error.kind() == ErrorKind::NotFound {
|
||||
tracing::debug!(
|
||||
"internal storage not found at {}; initializing defaults",
|
||||
storage_path.display()
|
||||
);
|
||||
} else {
|
||||
tracing::warn!("failed to read internal storage: {error:?}");
|
||||
}
|
||||
Self::empty(storage_path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(storage_path: PathBuf) -> Self {
|
||||
Self {
|
||||
storage_path,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn persist(&self) -> anyhow::Result<()> {
|
||||
let serialized = serde_json::to_string_pretty(self)?;
|
||||
|
||||
if let Some(parent) = self.storage_path.parent() {
|
||||
tokio::fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"failed to create internal storage directory at {}",
|
||||
parent.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
tokio::fs::write(&self.storage_path, serialized)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to persist internal storage at {}",
|
||||
self.storage_path.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -29,6 +29,7 @@ mod exec_command;
|
||||
pub mod exec_env;
|
||||
mod flags;
|
||||
pub mod git_info;
|
||||
pub mod internal_storage;
|
||||
pub mod landlock;
|
||||
mod mcp_connection_manager;
|
||||
mod mcp_tool_call;
|
||||
@@ -103,5 +104,3 @@ pub use codex_protocol::models::LocalShellExecAction;
|
||||
pub use codex_protocol::models::LocalShellStatus;
|
||||
pub use codex_protocol::models::ReasoningItemContent;
|
||||
pub use codex_protocol::models::ResponseItem;
|
||||
|
||||
pub mod otel_init;
|
||||
|
||||
@@ -29,7 +29,6 @@ use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config_types::McpServerConfig;
|
||||
use crate::config_types::McpServerTransportConfig;
|
||||
|
||||
/// Delimiter used to separate the server name from the tool name in a fully
|
||||
/// qualified tool name.
|
||||
@@ -108,7 +107,7 @@ impl McpClientAdapter {
|
||||
params: mcp_types::InitializeRequestParams,
|
||||
startup_timeout: Duration,
|
||||
) -> Result<Self> {
|
||||
info!(
|
||||
tracing::error!(
|
||||
"new_stdio_client use_rmcp_client: {use_rmcp_client} program: {program:?} args: {args:?} env: {env:?} params: {params:?} startup_timeout: {startup_timeout:?}"
|
||||
);
|
||||
if use_rmcp_client {
|
||||
@@ -122,17 +121,6 @@ impl McpClientAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async fn new_streamable_http_client(
|
||||
url: String,
|
||||
bearer_token: Option<String>,
|
||||
params: mcp_types::InitializeRequestParams,
|
||||
startup_timeout: Duration,
|
||||
) -> Result<Self> {
|
||||
let client = Arc::new(RmcpClient::new_streamable_http_client(url, bearer_token)?);
|
||||
client.initialize(params, Some(startup_timeout)).await?;
|
||||
Ok(McpClientAdapter::Rmcp(client))
|
||||
}
|
||||
|
||||
async fn list_tools(
|
||||
&self,
|
||||
params: Option<mcp_types::ListToolsRequestParams>,
|
||||
@@ -188,6 +176,8 @@ impl McpConnectionManager {
|
||||
return Ok((Self::default(), ClientStartErrors::default()));
|
||||
}
|
||||
|
||||
tracing::error!("new mcp_servers: {mcp_servers:?} use_rmcp_client: {use_rmcp_client}");
|
||||
|
||||
// Launch all configured servers concurrently.
|
||||
let mut join_set = JoinSet::new();
|
||||
let mut errors = ClientStartErrors::new();
|
||||
@@ -202,24 +192,16 @@ impl McpConnectionManager {
|
||||
continue;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp { .. }
|
||||
) && !use_rmcp_client
|
||||
{
|
||||
info!(
|
||||
"skipping MCP server `{}` configured with url because rmcp client is disabled",
|
||||
server_name
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let startup_timeout = cfg.startup_timeout_sec.unwrap_or(DEFAULT_STARTUP_TIMEOUT);
|
||||
let tool_timeout = cfg.tool_timeout_sec.unwrap_or(DEFAULT_TOOL_TIMEOUT);
|
||||
|
||||
let use_rmcp_client_flag = use_rmcp_client;
|
||||
join_set.spawn(async move {
|
||||
let McpServerConfig { transport, .. } = cfg;
|
||||
let McpServerConfig {
|
||||
command, args, env, ..
|
||||
} = cfg;
|
||||
let command_os: OsString = command.into();
|
||||
let args_os: Vec<OsString> = args.into_iter().map(Into::into).collect();
|
||||
let params = mcp_types::InitializeRequestParams {
|
||||
capabilities: ClientCapabilities {
|
||||
experimental: None,
|
||||
@@ -241,30 +223,15 @@ impl McpConnectionManager {
|
||||
protocol_version: mcp_types::MCP_SCHEMA_VERSION.to_owned(),
|
||||
};
|
||||
|
||||
let client = match transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
let command_os: OsString = command.into();
|
||||
let args_os: Vec<OsString> = args.into_iter().map(Into::into).collect();
|
||||
McpClientAdapter::new_stdio_client(
|
||||
use_rmcp_client_flag,
|
||||
command_os,
|
||||
args_os,
|
||||
env,
|
||||
params.clone(),
|
||||
startup_timeout,
|
||||
)
|
||||
.await
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpClientAdapter::new_streamable_http_client(
|
||||
url,
|
||||
bearer_token,
|
||||
params,
|
||||
startup_timeout,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
let client = McpClientAdapter::new_stdio_client(
|
||||
use_rmcp_client_flag,
|
||||
command_os,
|
||||
args_os,
|
||||
env,
|
||||
params,
|
||||
startup_timeout,
|
||||
)
|
||||
.await
|
||||
.map(|c| (c, startup_timeout));
|
||||
|
||||
((server_name, tool_timeout), client)
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
use crate::config::Config;
|
||||
use crate::config_types::OtelExporterKind as Kind;
|
||||
use crate::config_types::OtelHttpProtocol as Protocol;
|
||||
use crate::default_client::originator;
|
||||
use codex_otel::config::OtelExporter;
|
||||
use codex_otel::config::OtelHttpProtocol;
|
||||
use codex_otel::config::OtelSettings;
|
||||
use codex_otel::otel_provider::OtelProvider;
|
||||
use std::error::Error;
|
||||
|
||||
/// Build an OpenTelemetry provider from the app Config.
|
||||
///
|
||||
/// Returns `None` when OTEL export is disabled.
|
||||
pub fn build_provider(
|
||||
config: &Config,
|
||||
service_version: &str,
|
||||
) -> Result<Option<OtelProvider>, Box<dyn Error>> {
|
||||
let exporter = match &config.otel.exporter {
|
||||
Kind::None => OtelExporter::None,
|
||||
Kind::OtlpHttp {
|
||||
endpoint,
|
||||
headers,
|
||||
protocol,
|
||||
} => {
|
||||
let protocol = match protocol {
|
||||
Protocol::Json => OtelHttpProtocol::Json,
|
||||
Protocol::Binary => OtelHttpProtocol::Binary,
|
||||
};
|
||||
|
||||
OtelExporter::OtlpHttp {
|
||||
endpoint: endpoint.clone(),
|
||||
headers: headers
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
protocol,
|
||||
}
|
||||
}
|
||||
Kind::OtlpGrpc { endpoint, headers } => OtelExporter::OtlpGrpc {
|
||||
endpoint: endpoint.clone(),
|
||||
headers: headers
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect(),
|
||||
},
|
||||
};
|
||||
|
||||
OtelProvider::from(&OtelSettings {
|
||||
service_name: originator().value.to_owned(),
|
||||
service_version: service_version.to_string(),
|
||||
codex_home: config.codex_home.clone(),
|
||||
environment: config.otel.environment.to_string(),
|
||||
exporter,
|
||||
})
|
||||
}
|
||||
|
||||
/// Filter predicate for exporting only Codex-owned events via OTEL.
|
||||
/// Keeps events that originated from codex_otel module
|
||||
pub fn codex_export_filter(meta: &tracing::Metadata<'_>) -> bool {
|
||||
meta.target().starts_with("codex_otel")
|
||||
}
|
||||
@@ -36,16 +36,13 @@ pub struct ConversationsPage {
|
||||
pub struct ConversationItem {
|
||||
/// Absolute path to the rollout file.
|
||||
pub path: PathBuf,
|
||||
/// First up to `HEAD_RECORD_LIMIT` JSONL records parsed as JSON (includes meta line).
|
||||
/// First up to 5 JSONL records parsed as JSON (includes meta line).
|
||||
pub head: Vec<serde_json::Value>,
|
||||
/// Last up to `TAIL_RECORD_LIMIT` JSONL response records parsed as JSON.
|
||||
pub tail: Vec<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Hard cap to bound worst‑case work per request.
|
||||
const MAX_SCAN_FILES: usize = 100;
|
||||
const HEAD_RECORD_LIMIT: usize = 10;
|
||||
const TAIL_RECORD_LIMIT: usize = 10;
|
||||
|
||||
/// Pagination cursor identifying a file by timestamp and UUID.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -179,13 +176,13 @@ async fn traverse_directories_for_paths(
|
||||
}
|
||||
// Read head and simultaneously detect message events within the same
|
||||
// first N JSONL records to avoid a second file read.
|
||||
let (head, tail, saw_session_meta, saw_user_event) =
|
||||
read_head_and_tail(&path, HEAD_RECORD_LIMIT, TAIL_RECORD_LIMIT)
|
||||
let (head, saw_session_meta, saw_user_event) =
|
||||
read_head_and_flags(&path, HEAD_RECORD_LIMIT)
|
||||
.await
|
||||
.unwrap_or((Vec::new(), Vec::new(), false, false));
|
||||
.unwrap_or((Vec::new(), false, false));
|
||||
// Apply filters: must have session meta and at least one user message event
|
||||
if saw_session_meta && saw_user_event {
|
||||
items.push(ConversationItem { path, head, tail });
|
||||
items.push(ConversationItem { path, head });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -289,11 +286,10 @@ fn parse_timestamp_uuid_from_filename(name: &str) -> Option<(OffsetDateTime, Uui
|
||||
Some((ts, uuid))
|
||||
}
|
||||
|
||||
async fn read_head_and_tail(
|
||||
async fn read_head_and_flags(
|
||||
path: &Path,
|
||||
head_limit: usize,
|
||||
tail_limit: usize,
|
||||
) -> io::Result<(Vec<serde_json::Value>, Vec<serde_json::Value>, bool, bool)> {
|
||||
max_records: usize,
|
||||
) -> io::Result<(Vec<serde_json::Value>, bool, bool)> {
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
|
||||
let file = tokio::fs::File::open(path).await?;
|
||||
@@ -303,7 +299,7 @@ async fn read_head_and_tail(
|
||||
let mut saw_session_meta = false;
|
||||
let mut saw_user_event = false;
|
||||
|
||||
while head.len() < head_limit {
|
||||
while head.len() < max_records {
|
||||
let line_opt = lines.next_line().await?;
|
||||
let Some(line) = line_opt else { break };
|
||||
let trimmed = line.trim();
|
||||
@@ -340,84 +336,7 @@ async fn read_head_and_tail(
|
||||
}
|
||||
}
|
||||
|
||||
let tail = if tail_limit == 0 {
|
||||
Vec::new()
|
||||
} else {
|
||||
read_tail_records(path, tail_limit).await?
|
||||
};
|
||||
|
||||
Ok((head, tail, saw_session_meta, saw_user_event))
|
||||
}
|
||||
|
||||
async fn read_tail_records(path: &Path, max_records: usize) -> io::Result<Vec<serde_json::Value>> {
|
||||
use std::io::SeekFrom;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tokio::io::AsyncSeekExt;
|
||||
|
||||
if max_records == 0 {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
const CHUNK_SIZE: usize = 8192;
|
||||
|
||||
let mut file = tokio::fs::File::open(path).await?;
|
||||
let mut pos = file.seek(SeekFrom::End(0)).await?;
|
||||
if pos == 0 {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut buffer: Vec<u8> = Vec::new();
|
||||
|
||||
loop {
|
||||
let slice_start = match (pos > 0, buffer.iter().position(|&b| b == b'\n')) {
|
||||
(true, Some(idx)) => idx + 1,
|
||||
_ => 0,
|
||||
};
|
||||
let tail = collect_last_response_values(&buffer[slice_start..], max_records);
|
||||
if tail.len() >= max_records || pos == 0 {
|
||||
return Ok(tail);
|
||||
}
|
||||
|
||||
let read_size = CHUNK_SIZE.min(pos as usize);
|
||||
if read_size == 0 {
|
||||
return Ok(tail);
|
||||
}
|
||||
pos -= read_size as u64;
|
||||
file.seek(SeekFrom::Start(pos)).await?;
|
||||
let mut chunk = vec![0; read_size];
|
||||
file.read_exact(&mut chunk).await?;
|
||||
chunk.extend_from_slice(&buffer);
|
||||
buffer = chunk;
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_last_response_values(buffer: &[u8], max_records: usize) -> Vec<serde_json::Value> {
|
||||
use std::borrow::Cow;
|
||||
|
||||
if buffer.is_empty() || max_records == 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let text: Cow<'_, str> = String::from_utf8_lossy(buffer);
|
||||
let mut collected_rev: Vec<serde_json::Value> = Vec::new();
|
||||
for line in text.lines().rev() {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let parsed: serde_json::Result<RolloutLine> = serde_json::from_str(trimmed);
|
||||
let Ok(rollout_line) = parsed else { continue };
|
||||
if let RolloutItem::ResponseItem(item) = rollout_line.item
|
||||
&& let Ok(val) = serde_json::to_value(item)
|
||||
{
|
||||
collected_rev.push(val);
|
||||
if collected_rev.len() == max_records {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
collected_rev.reverse();
|
||||
collected_rev
|
||||
Ok((head, saw_session_meta, saw_user_event))
|
||||
}
|
||||
|
||||
/// Locate a recorded conversation rollout file by its UUID string using the existing
|
||||
|
||||
@@ -24,7 +24,7 @@ use super::list::Cursor;
|
||||
use super::list::get_conversations;
|
||||
use super::policy::is_persisted_response_item;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::originator;
|
||||
use crate::default_client::ORIGINATOR;
|
||||
use crate::git_info::collect_git_info;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::ResumedHistory;
|
||||
@@ -124,7 +124,7 @@ impl RolloutRecorder {
|
||||
id: session_id,
|
||||
timestamp,
|
||||
cwd: config.cwd.clone(),
|
||||
originator: originator().value.clone(),
|
||||
originator: ORIGINATOR.value.clone(),
|
||||
cli_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
instructions,
|
||||
}),
|
||||
|
||||
@@ -17,18 +17,6 @@ use crate::rollout::list::ConversationsPage;
|
||||
use crate::rollout::list::Cursor;
|
||||
use crate::rollout::list::get_conversation;
|
||||
use crate::rollout::list::get_conversations;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::CompactedItem;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
|
||||
fn write_session_file(
|
||||
root: &Path,
|
||||
@@ -158,17 +146,14 @@ async fn test_list_conversations_latest_first() {
|
||||
ConversationItem {
|
||||
path: p1,
|
||||
head: head_3,
|
||||
tail: Vec::new(),
|
||||
},
|
||||
ConversationItem {
|
||||
path: p2,
|
||||
head: head_2,
|
||||
tail: Vec::new(),
|
||||
},
|
||||
ConversationItem {
|
||||
path: p3,
|
||||
head: head_1,
|
||||
tail: Vec::new(),
|
||||
},
|
||||
],
|
||||
next_cursor: Some(expected_cursor),
|
||||
@@ -234,12 +219,10 @@ async fn test_pagination_cursor() {
|
||||
ConversationItem {
|
||||
path: p5,
|
||||
head: head_5,
|
||||
tail: Vec::new(),
|
||||
},
|
||||
ConversationItem {
|
||||
path: p4,
|
||||
head: head_4,
|
||||
tail: Vec::new(),
|
||||
},
|
||||
],
|
||||
next_cursor: Some(expected_cursor1.clone()),
|
||||
@@ -286,12 +269,10 @@ async fn test_pagination_cursor() {
|
||||
ConversationItem {
|
||||
path: p3,
|
||||
head: head_3,
|
||||
tail: Vec::new(),
|
||||
},
|
||||
ConversationItem {
|
||||
path: p2,
|
||||
head: head_2,
|
||||
tail: Vec::new(),
|
||||
},
|
||||
],
|
||||
next_cursor: Some(expected_cursor2.clone()),
|
||||
@@ -323,7 +304,6 @@ async fn test_pagination_cursor() {
|
||||
items: vec![ConversationItem {
|
||||
path: p1,
|
||||
head: head_1,
|
||||
tail: Vec::new(),
|
||||
}],
|
||||
next_cursor: Some(expected_cursor3),
|
||||
num_scanned_files: 5, // scanned 05, 04 (anchor), 03, 02 (anchor), 01
|
||||
@@ -366,7 +346,6 @@ async fn test_get_conversation_contents() {
|
||||
items: vec![ConversationItem {
|
||||
path: expected_path,
|
||||
head: expected_head,
|
||||
tail: Vec::new(),
|
||||
}],
|
||||
next_cursor: Some(expected_cursor),
|
||||
num_scanned_files: 1,
|
||||
@@ -387,250 +366,6 @@ async fn test_get_conversation_contents() {
|
||||
assert_eq!(content, expected_content);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_tail_includes_last_response_items() -> Result<()> {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let home = temp.path();
|
||||
|
||||
let ts = "2025-06-01T08-00-00";
|
||||
let uuid = Uuid::from_u128(42);
|
||||
let day_dir = home.join("sessions").join("2025").join("06").join("01");
|
||||
fs::create_dir_all(&day_dir)?;
|
||||
let file_path = day_dir.join(format!("rollout-{ts}-{uuid}.jsonl"));
|
||||
let mut file = File::create(&file_path)?;
|
||||
|
||||
let conversation_id = ConversationId::from_string(&uuid.to_string())?;
|
||||
let meta_line = RolloutLine {
|
||||
timestamp: ts.to_string(),
|
||||
item: RolloutItem::SessionMeta(SessionMetaLine {
|
||||
meta: SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: ts.to_string(),
|
||||
instructions: None,
|
||||
cwd: ".".into(),
|
||||
originator: "test_originator".into(),
|
||||
cli_version: "test_version".into(),
|
||||
},
|
||||
git: None,
|
||||
}),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&meta_line)?)?;
|
||||
|
||||
let user_event_line = RolloutLine {
|
||||
timestamp: ts.to_string(),
|
||||
item: RolloutItem::EventMsg(EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "hello".into(),
|
||||
kind: Some(InputMessageKind::Plain),
|
||||
images: None,
|
||||
})),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&user_event_line)?)?;
|
||||
|
||||
let total_messages = 12usize;
|
||||
for idx in 0..total_messages {
|
||||
let response_line = RolloutLine {
|
||||
timestamp: format!("{ts}-{idx:02}"),
|
||||
item: RolloutItem::ResponseItem(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".into(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: format!("reply-{idx}"),
|
||||
}],
|
||||
}),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&response_line)?)?;
|
||||
}
|
||||
drop(file);
|
||||
|
||||
let page = get_conversations(home, 1, None).await?;
|
||||
let item = page.items.first().expect("conversation item");
|
||||
let tail_len = item.tail.len();
|
||||
assert_eq!(tail_len, 10usize.min(total_messages));
|
||||
|
||||
let expected: Vec<serde_json::Value> = (total_messages - tail_len..total_messages)
|
||||
.map(|idx| {
|
||||
serde_json::to_value(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".into(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: format!("reply-{idx}"),
|
||||
}],
|
||||
})
|
||||
.expect("serialize response item")
|
||||
})
|
||||
.collect();
|
||||
|
||||
assert_eq!(item.tail, expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_tail_handles_short_sessions() -> Result<()> {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let home = temp.path();
|
||||
|
||||
let ts = "2025-06-02T08-30-00";
|
||||
let uuid = Uuid::from_u128(7);
|
||||
let day_dir = home.join("sessions").join("2025").join("06").join("02");
|
||||
fs::create_dir_all(&day_dir)?;
|
||||
let file_path = day_dir.join(format!("rollout-{ts}-{uuid}.jsonl"));
|
||||
let mut file = File::create(&file_path)?;
|
||||
|
||||
let conversation_id = ConversationId::from_string(&uuid.to_string())?;
|
||||
let meta_line = RolloutLine {
|
||||
timestamp: ts.to_string(),
|
||||
item: RolloutItem::SessionMeta(SessionMetaLine {
|
||||
meta: SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: ts.to_string(),
|
||||
instructions: None,
|
||||
cwd: ".".into(),
|
||||
originator: "test_originator".into(),
|
||||
cli_version: "test_version".into(),
|
||||
},
|
||||
git: None,
|
||||
}),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&meta_line)?)?;
|
||||
|
||||
let user_event_line = RolloutLine {
|
||||
timestamp: ts.to_string(),
|
||||
item: RolloutItem::EventMsg(EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "hi".into(),
|
||||
kind: Some(InputMessageKind::Plain),
|
||||
images: None,
|
||||
})),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&user_event_line)?)?;
|
||||
|
||||
for idx in 0..3 {
|
||||
let response_line = RolloutLine {
|
||||
timestamp: format!("{ts}-{idx:02}"),
|
||||
item: RolloutItem::ResponseItem(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".into(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: format!("short-{idx}"),
|
||||
}],
|
||||
}),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&response_line)?)?;
|
||||
}
|
||||
drop(file);
|
||||
|
||||
let page = get_conversations(home, 1, None).await?;
|
||||
let tail = &page.items.first().expect("conversation item").tail;
|
||||
|
||||
assert_eq!(tail.len(), 3);
|
||||
|
||||
let expected: Vec<serde_json::Value> = (0..3)
|
||||
.map(|idx| {
|
||||
serde_json::to_value(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".into(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: format!("short-{idx}"),
|
||||
}],
|
||||
})
|
||||
.expect("serialize response item")
|
||||
})
|
||||
.collect();
|
||||
|
||||
assert_eq!(tail, &expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_tail_skips_trailing_non_responses() -> Result<()> {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let home = temp.path();
|
||||
|
||||
let ts = "2025-06-03T10-00-00";
|
||||
let uuid = Uuid::from_u128(11);
|
||||
let day_dir = home.join("sessions").join("2025").join("06").join("03");
|
||||
fs::create_dir_all(&day_dir)?;
|
||||
let file_path = day_dir.join(format!("rollout-{ts}-{uuid}.jsonl"));
|
||||
let mut file = File::create(&file_path)?;
|
||||
|
||||
let conversation_id = ConversationId::from_string(&uuid.to_string())?;
|
||||
let meta_line = RolloutLine {
|
||||
timestamp: ts.to_string(),
|
||||
item: RolloutItem::SessionMeta(SessionMetaLine {
|
||||
meta: SessionMeta {
|
||||
id: conversation_id,
|
||||
timestamp: ts.to_string(),
|
||||
instructions: None,
|
||||
cwd: ".".into(),
|
||||
originator: "test_originator".into(),
|
||||
cli_version: "test_version".into(),
|
||||
},
|
||||
git: None,
|
||||
}),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&meta_line)?)?;
|
||||
|
||||
let user_event_line = RolloutLine {
|
||||
timestamp: ts.to_string(),
|
||||
item: RolloutItem::EventMsg(EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "hello".into(),
|
||||
kind: Some(InputMessageKind::Plain),
|
||||
images: None,
|
||||
})),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&user_event_line)?)?;
|
||||
|
||||
for idx in 0..4 {
|
||||
let response_line = RolloutLine {
|
||||
timestamp: format!("{ts}-{idx:02}"),
|
||||
item: RolloutItem::ResponseItem(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".into(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: format!("response-{idx}"),
|
||||
}],
|
||||
}),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&response_line)?)?;
|
||||
}
|
||||
|
||||
let compacted_line = RolloutLine {
|
||||
timestamp: format!("{ts}-compacted"),
|
||||
item: RolloutItem::Compacted(CompactedItem {
|
||||
message: "compacted".into(),
|
||||
}),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&compacted_line)?)?;
|
||||
|
||||
let shutdown_event = RolloutLine {
|
||||
timestamp: format!("{ts}-shutdown"),
|
||||
item: RolloutItem::EventMsg(EventMsg::ShutdownComplete),
|
||||
};
|
||||
writeln!(file, "{}", serde_json::to_string(&shutdown_event)?)?;
|
||||
drop(file);
|
||||
|
||||
let page = get_conversations(home, 1, None).await?;
|
||||
let tail = &page.items.first().expect("conversation item").tail;
|
||||
|
||||
let expected: Vec<serde_json::Value> = (0..4)
|
||||
.map(|idx| {
|
||||
serde_json::to_value(ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".into(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: format!("response-{idx}"),
|
||||
}],
|
||||
})
|
||||
.expect("serialize response item")
|
||||
})
|
||||
.collect();
|
||||
|
||||
assert_eq!(tail, &expected);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_stable_ordering_same_second_pagination() {
|
||||
let temp = TempDir::new().unwrap();
|
||||
@@ -675,12 +410,10 @@ async fn test_stable_ordering_same_second_pagination() {
|
||||
ConversationItem {
|
||||
path: p3,
|
||||
head: head(u3),
|
||||
tail: Vec::new(),
|
||||
},
|
||||
ConversationItem {
|
||||
path: p2,
|
||||
head: head(u2),
|
||||
tail: Vec::new(),
|
||||
},
|
||||
],
|
||||
next_cursor: Some(expected_cursor1.clone()),
|
||||
@@ -703,7 +436,6 @@ async fn test_stable_ordering_same_second_pagination() {
|
||||
items: vec![ConversationItem {
|
||||
path: p1,
|
||||
head: head(u1),
|
||||
tail: Vec::new(),
|
||||
}],
|
||||
next_cursor: Some(expected_cursor2),
|
||||
num_scanned_files: 3, // scanned u3, u2 (anchor), u1
|
||||
|
||||
@@ -15,14 +15,9 @@ use crate::protocol::SandboxPolicy;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum SafetyCheck {
|
||||
AutoApprove {
|
||||
sandbox_type: SandboxType,
|
||||
user_explicitly_approved: bool,
|
||||
},
|
||||
AutoApprove { sandbox_type: SandboxType },
|
||||
AskUser,
|
||||
Reject {
|
||||
reason: String,
|
||||
},
|
||||
Reject { reason: String },
|
||||
}
|
||||
|
||||
pub fn assess_patch_safety(
|
||||
@@ -59,16 +54,12 @@ pub fn assess_patch_safety(
|
||||
// fall back to asking the user because the patch may touch arbitrary
|
||||
// paths outside the project.
|
||||
match get_platform_sandbox() {
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved: false,
|
||||
},
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove { sandbox_type },
|
||||
None if sandbox_policy == &SandboxPolicy::DangerFullAccess => {
|
||||
// If the user has explicitly requested DangerFullAccess, then
|
||||
// we can auto-approve even without a sandbox.
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: false,
|
||||
}
|
||||
}
|
||||
None => SafetyCheck::AskUser,
|
||||
@@ -98,15 +89,8 @@ pub fn assess_command_safety(
|
||||
) -> SafetyCheck {
|
||||
// Some commands look dangerous. Even if they are run inside a sandbox,
|
||||
// unless the user has explicitly approved them, we should ask,
|
||||
// or reject if the approval_policy tells us not to ask.
|
||||
// regardless of the approval policy and sandbox policy.
|
||||
if command_might_be_dangerous(command) && !approved.contains(command) {
|
||||
if approval_policy == AskForApproval::Never {
|
||||
return SafetyCheck::Reject {
|
||||
reason: "dangerous command detected; rejected by user approval settings"
|
||||
.to_string(),
|
||||
};
|
||||
}
|
||||
|
||||
return SafetyCheck::AskUser;
|
||||
}
|
||||
|
||||
@@ -127,7 +111,6 @@ pub fn assess_command_safety(
|
||||
if is_known_safe_command(command) || approved.contains(command) {
|
||||
return SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: false,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -153,17 +136,13 @@ pub(crate) fn assess_safety_for_untrusted_command(
|
||||
| (Never, DangerFullAccess)
|
||||
| (OnRequest, DangerFullAccess) => SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: false,
|
||||
},
|
||||
(OnRequest, ReadOnly) | (OnRequest, WorkspaceWrite { .. }) => {
|
||||
if with_escalated_permissions {
|
||||
SafetyCheck::AskUser
|
||||
} else {
|
||||
match get_platform_sandbox() {
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved: false,
|
||||
},
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove { sandbox_type },
|
||||
// Fall back to asking since the command is untrusted and
|
||||
// we do not have a sandbox available
|
||||
None => SafetyCheck::AskUser,
|
||||
@@ -175,10 +154,7 @@ pub(crate) fn assess_safety_for_untrusted_command(
|
||||
| (OnFailure, ReadOnly)
|
||||
| (OnFailure, WorkspaceWrite { .. }) => {
|
||||
match get_platform_sandbox() {
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved: false,
|
||||
},
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove { sandbox_type },
|
||||
None => {
|
||||
if matches!(approval_policy, OnFailure) {
|
||||
// Since the command is not trusted, even though the
|
||||
@@ -379,8 +355,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
safety_check,
|
||||
SafetyCheck::AutoApprove {
|
||||
sandbox_type: SandboxType::None,
|
||||
user_explicitly_approved: false,
|
||||
sandbox_type: SandboxType::None
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -401,13 +376,7 @@ mod tests {
|
||||
request_escalated_privileges,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
safety_check,
|
||||
SafetyCheck::Reject {
|
||||
reason: "dangerous command detected; rejected by user approval settings"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(safety_check, SafetyCheck::AskUser);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -427,10 +396,7 @@ mod tests {
|
||||
);
|
||||
|
||||
let expected = match get_platform_sandbox() {
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove {
|
||||
sandbox_type,
|
||||
user_explicitly_approved: false,
|
||||
},
|
||||
Some(sandbox_type) => SafetyCheck::AutoApprove { sandbox_type },
|
||||
None => SafetyCheck::AskUser,
|
||||
};
|
||||
assert_eq!(safety_check, expected);
|
||||
|
||||
@@ -11,8 +11,6 @@ use codex_core::ReasoningItemContent;
|
||||
use codex_core::ResponseItem;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use futures::StreamExt;
|
||||
@@ -72,26 +70,13 @@ async fn run_request(input: Vec<ResponseItem>) -> Value {
|
||||
let summary = config.model_reasoning_summary;
|
||||
let config = Arc::new(config);
|
||||
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
let otel_event_manager = OtelEventManager::new(
|
||||
conversation_id,
|
||||
config.model.as_str(),
|
||||
config.model_family.slug.as_str(),
|
||||
None,
|
||||
Some(AuthMode::ChatGPT),
|
||||
false,
|
||||
"test".to_string(),
|
||||
);
|
||||
|
||||
let client = ModelClient::new(
|
||||
Arc::clone(&config),
|
||||
None,
|
||||
otel_event_manager,
|
||||
provider,
|
||||
effort,
|
||||
summary,
|
||||
conversation_id,
|
||||
ConversationId::new(),
|
||||
);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::sync::Arc;
|
||||
use tracing_test::traced_test;
|
||||
|
||||
use codex_core::ContentItem;
|
||||
use codex_core::ModelClient;
|
||||
@@ -9,8 +8,6 @@ use codex_core::ResponseEvent;
|
||||
use codex_core::ResponseItem;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use futures::StreamExt;
|
||||
@@ -26,15 +23,11 @@ fn network_disabled() -> bool {
|
||||
}
|
||||
|
||||
async fn run_stream(sse_body: &str) -> Vec<ResponseEvent> {
|
||||
run_stream_with_bytes(sse_body.as_bytes()).await
|
||||
}
|
||||
|
||||
async fn run_stream_with_bytes(sse_body: &[u8]) -> Vec<ResponseEvent> {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let template = ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_bytes(sse_body.to_vec());
|
||||
.set_body_raw(sse_body.to_string(), "text/event-stream");
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
@@ -70,26 +63,13 @@ async fn run_stream_with_bytes(sse_body: &[u8]) -> Vec<ResponseEvent> {
|
||||
let summary = config.model_reasoning_summary;
|
||||
let config = Arc::new(config);
|
||||
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
let otel_event_manager = OtelEventManager::new(
|
||||
conversation_id,
|
||||
config.model.as_str(),
|
||||
config.model_family.slug.as_str(),
|
||||
None,
|
||||
Some(AuthMode::ChatGPT),
|
||||
false,
|
||||
"test".to_string(),
|
||||
);
|
||||
|
||||
let client = ModelClient::new(
|
||||
Arc::clone(&config),
|
||||
None,
|
||||
otel_event_manager,
|
||||
provider,
|
||||
effort,
|
||||
summary,
|
||||
conversation_id,
|
||||
ConversationId::new(),
|
||||
);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
@@ -109,8 +89,7 @@ async fn run_stream_with_bytes(sse_body: &[u8]) -> Vec<ResponseEvent> {
|
||||
while let Some(event) = stream.next().await {
|
||||
match event {
|
||||
Ok(ev) => events.push(ev),
|
||||
// We still collect the error to exercise telemetry and complete the task.
|
||||
Err(_e) => break,
|
||||
Err(e) => panic!("stream event error: {e}"),
|
||||
}
|
||||
}
|
||||
events
|
||||
@@ -339,88 +318,3 @@ async fn streams_reasoning_before_tool_call() {
|
||||
|
||||
assert!(matches!(events[3], ResponseEvent::Completed { .. }));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[traced_test]
|
||||
async fn chat_sse_emits_failed_on_parse_error() {
|
||||
if network_disabled() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let sse_body = concat!("data: not-json\n\n", "data: [DONE]\n\n");
|
||||
|
||||
let _ = run_stream(sse_body).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
.iter()
|
||||
.find(|line| {
|
||||
line.contains("codex.api_request") && line.contains("http.response.status_code=200")
|
||||
})
|
||||
.map(|_| Ok(()))
|
||||
.unwrap_or(Err("cannot find codex.api_request event".to_string()))
|
||||
});
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
.iter()
|
||||
.find(|line| {
|
||||
line.contains("codex.sse_event")
|
||||
&& line.contains("error.message")
|
||||
&& line.contains("expected ident at line 1 column 2")
|
||||
})
|
||||
.map(|_| Ok(()))
|
||||
.unwrap_or(Err("cannot find SSE event".to_string()))
|
||||
});
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[traced_test]
|
||||
async fn chat_sse_done_chunk_emits_event() {
|
||||
if network_disabled() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let sse_body = "data: [DONE]\n\n";
|
||||
|
||||
let _ = run_stream(sse_body).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
.iter()
|
||||
.find(|line| line.contains("codex.sse_event") && line.contains("event.kind=message"))
|
||||
.map(|_| Ok(()))
|
||||
.unwrap_or(Err("cannot find SSE event".to_string()))
|
||||
});
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[traced_test]
|
||||
async fn chat_sse_emits_error_on_invalid_utf8() {
|
||||
if network_disabled() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = run_stream_with_bytes(b"data: \x80\x80\n\n").await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
.iter()
|
||||
.find(|line| {
|
||||
line.contains("codex.sse_event")
|
||||
&& line.contains("error.message")
|
||||
&& line.contains("UTF8 error: invalid utf-8 sequence of 1 bytes from index 0")
|
||||
})
|
||||
.map(|_| Ok(()))
|
||||
.unwrap_or(Err("cannot find SSE event".to_string()))
|
||||
});
|
||||
}
|
||||
|
||||
@@ -75,33 +75,6 @@ pub fn ev_function_call(call_id: &str, name: &str, arguments: &str) -> Value {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn ev_custom_tool_call(call_id: &str, name: &str, input: &str) -> Value {
|
||||
serde_json::json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "custom_tool_call",
|
||||
"call_id": call_id,
|
||||
"name": name,
|
||||
"input": input
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn ev_local_shell_call(call_id: &str, status: &str, command: Vec<&str>) -> Value {
|
||||
serde_json::json!({
|
||||
"type": "response.output_item.done",
|
||||
"item": {
|
||||
"type": "local_shell_call",
|
||||
"call_id": call_id,
|
||||
"status": status,
|
||||
"action": {
|
||||
"type": "exec",
|
||||
"command": command,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Convenience: SSE event for an `apply_patch` custom tool call with raw patch
|
||||
/// text. This mirrors the payload produced by the Responses API when the model
|
||||
/// invokes `apply_patch` directly (before we convert it to a function call).
|
||||
@@ -141,7 +114,7 @@ pub fn sse_response(body: String) -> ResponseTemplate {
|
||||
.set_body_raw(body, "text/event-stream")
|
||||
}
|
||||
|
||||
pub async fn mount_sse_once_match<M>(server: &MockServer, matcher: M, body: String)
|
||||
pub async fn mount_sse_once<M>(server: &MockServer, matcher: M, body: String)
|
||||
where
|
||||
M: wiremock::Match + Send + Sync + 'static,
|
||||
{
|
||||
@@ -154,23 +127,6 @@ where
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn mount_sse_once(server: &MockServer, body: String) {
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(sse_response(body))
|
||||
.expect(1)
|
||||
.mount(server)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn mount_sse(server: &MockServer, body: String) {
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/responses"))
|
||||
.respond_with(sse_response(body))
|
||||
.mount(server)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn start_mock_server() -> MockServer {
|
||||
MockServer::builder()
|
||||
.body_print_limit(BodyPrintLimit::Limited(80_000))
|
||||
|
||||
@@ -4,7 +4,7 @@ use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
@@ -30,7 +30,7 @@ async fn interrupt_long_running_tool_emits_turn_aborted() {
|
||||
let body = sse(vec![ev_function_call("call_sleep", "shell", &args)]);
|
||||
|
||||
let server = start_mock_server().await;
|
||||
mount_sse_once_match(&server, body_string_contains("start sleep"), body).await;
|
||||
mount_sse_once(&server, body_string_contains("start sleep"), body).await;
|
||||
|
||||
let codex = test_codex().build(&server).await.unwrap().codex;
|
||||
|
||||
|
||||
@@ -16,8 +16,6 @@ use codex_core::built_in_model_providers;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
@@ -363,7 +361,6 @@ async fn includes_conversation_id_and_model_headers_in_request() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn includes_base_instructions_override_in_request() {
|
||||
skip_if_no_network!();
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
|
||||
@@ -561,7 +558,6 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn includes_user_instructions_message_in_request() {
|
||||
skip_if_no_network!();
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let first = ResponseTemplate::new(200)
|
||||
@@ -666,26 +662,13 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
let summary = config.model_reasoning_summary;
|
||||
let config = Arc::new(config);
|
||||
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
let otel_event_manager = OtelEventManager::new(
|
||||
conversation_id,
|
||||
config.model.as_str(),
|
||||
config.model_family.slug.as_str(),
|
||||
None,
|
||||
Some(AuthMode::ChatGPT),
|
||||
false,
|
||||
"test".to_string(),
|
||||
);
|
||||
|
||||
let client = ModelClient::new(
|
||||
Arc::clone(&config),
|
||||
None,
|
||||
otel_event_manager,
|
||||
provider,
|
||||
effort,
|
||||
summary,
|
||||
conversation_id,
|
||||
ConversationId::new(),
|
||||
);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
@@ -772,7 +755,6 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn token_count_includes_rate_limits_snapshot() {
|
||||
skip_if_no_network!();
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let sse_body = responses::sse(vec![responses::ev_completed_with_tokens("resp_rate", 123)]);
|
||||
@@ -917,7 +899,6 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn usage_limit_error_emits_rate_limit_event() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let response = ResponseTemplate::new(429)
|
||||
@@ -997,7 +978,6 @@ async fn usage_limit_error_emits_rate_limit_event() -> anyhow::Result<()> {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
skip_if_no_network!();
|
||||
let existing_env_var_with_random_value = if cfg!(windows) { "USERNAME" } else { "USER" };
|
||||
|
||||
// Mock server
|
||||
@@ -1074,7 +1054,6 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn env_var_overrides_loaded_auth() {
|
||||
skip_if_no_network!();
|
||||
let existing_env_var_with_random_value = if cfg!(windows) { "USERNAME" } else { "USER" };
|
||||
|
||||
// Mock server
|
||||
|
||||
@@ -25,7 +25,7 @@ use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_completed_with_tokens;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::sse_response;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
@@ -79,19 +79,19 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
body.contains("\"text\":\"hello world\"")
|
||||
&& !body.contains("You have exceeded the maximum number of tokens")
|
||||
};
|
||||
mount_sse_once_match(&server, first_matcher, sse1).await;
|
||||
mount_sse_once(&server, first_matcher, sse1).await;
|
||||
|
||||
let second_matcher = |req: &wiremock::Request| {
|
||||
let body = std::str::from_utf8(&req.body).unwrap_or("");
|
||||
body.contains("You have exceeded the maximum number of tokens")
|
||||
};
|
||||
mount_sse_once_match(&server, second_matcher, sse2).await;
|
||||
mount_sse_once(&server, second_matcher, sse2).await;
|
||||
|
||||
let third_matcher = |req: &wiremock::Request| {
|
||||
let body = std::str::from_utf8(&req.body).unwrap_or("");
|
||||
body.contains(&format!("\"text\":\"{THIRD_USER_MSG}\""))
|
||||
};
|
||||
mount_sse_once_match(&server, third_matcher, sse3).await;
|
||||
mount_sse_once(&server, third_matcher, sse3).await;
|
||||
|
||||
// Build config pointing to the mock server and spawn Codex.
|
||||
let model_provider = ModelProviderInfo {
|
||||
|
||||
@@ -25,7 +25,7 @@ use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -702,13 +702,13 @@ async fn mount_initial_flow(server: &MockServer) {
|
||||
&& !body.contains("\"text\":\"AFTER_RESUME\"")
|
||||
&& !body.contains("\"text\":\"AFTER_FORK\"")
|
||||
};
|
||||
mount_sse_once_match(server, match_first, sse1).await;
|
||||
mount_sse_once(server, match_first, sse1).await;
|
||||
|
||||
let match_compact = |req: &wiremock::Request| {
|
||||
let body = std::str::from_utf8(&req.body).unwrap_or("");
|
||||
body.contains("You have exceeded the maximum number of tokens")
|
||||
};
|
||||
mount_sse_once_match(server, match_compact, sse2).await;
|
||||
mount_sse_once(server, match_compact, sse2).await;
|
||||
|
||||
let match_after_compact = |req: &wiremock::Request| {
|
||||
let body = std::str::from_utf8(&req.body).unwrap_or("");
|
||||
@@ -716,19 +716,19 @@ async fn mount_initial_flow(server: &MockServer) {
|
||||
&& !body.contains("\"text\":\"AFTER_RESUME\"")
|
||||
&& !body.contains("\"text\":\"AFTER_FORK\"")
|
||||
};
|
||||
mount_sse_once_match(server, match_after_compact, sse3).await;
|
||||
mount_sse_once(server, match_after_compact, sse3).await;
|
||||
|
||||
let match_after_resume = |req: &wiremock::Request| {
|
||||
let body = std::str::from_utf8(&req.body).unwrap_or("");
|
||||
body.contains("\"text\":\"AFTER_RESUME\"")
|
||||
};
|
||||
mount_sse_once_match(server, match_after_resume, sse4).await;
|
||||
mount_sse_once(server, match_after_resume, sse4).await;
|
||||
|
||||
let match_after_fork = |req: &wiremock::Request| {
|
||||
let body = std::str::from_utf8(&req.body).unwrap_or("");
|
||||
body.contains("\"text\":\"AFTER_FORK\"")
|
||||
};
|
||||
mount_sse_once_match(server, match_after_fork, sse5).await;
|
||||
mount_sse_once(server, match_after_fork, sse5).await;
|
||||
}
|
||||
|
||||
async fn mount_second_compact_flow(server: &MockServer) {
|
||||
@@ -743,13 +743,13 @@ async fn mount_second_compact_flow(server: &MockServer) {
|
||||
body.contains("You have exceeded the maximum number of tokens")
|
||||
&& body.contains("AFTER_FORK")
|
||||
};
|
||||
mount_sse_once_match(server, match_second_compact, sse6).await;
|
||||
mount_sse_once(server, match_second_compact, sse6).await;
|
||||
|
||||
let match_after_second_resume = |req: &wiremock::Request| {
|
||||
let body = std::str::from_utf8(&req.body).unwrap_or("");
|
||||
body.contains(&format!("\"text\":\"{AFTER_SECOND_RESUME}\""))
|
||||
};
|
||||
mount_sse_once_match(server, match_after_second_resume, sse7).await;
|
||||
mount_sse_once(server, match_after_second_resume, sse7).await;
|
||||
}
|
||||
|
||||
async fn start_test_conversation(
|
||||
|
||||
@@ -67,7 +67,7 @@ async fn codex_returns_json_result(model: String) -> anyhow::Result<()> {
|
||||
&& format.get("strict") == Some(&serde_json::Value::Bool(true))
|
||||
&& format.get("schema") == Some(&expected_schema)
|
||||
};
|
||||
responses::mount_sse_once_match(&server, match_json_text_param, sse1).await;
|
||||
responses::mount_sse_once(&server, match_json_text_param, sse1).await;
|
||||
|
||||
let TestCodex { codex, cwd, .. } = test_codex().build(&server).await?;
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ mod fork_conversation;
|
||||
mod json_result;
|
||||
mod live_cli;
|
||||
mod model_overrides;
|
||||
mod otel;
|
||||
mod prompt_caching;
|
||||
mod review;
|
||||
mod rmcp_client;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::net::TcpListener;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
@@ -12,22 +9,17 @@ use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use core_test_support::wait_for_event_with_timeout;
|
||||
use escargot::CargoBuild;
|
||||
use serde_json::Value;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::process::Child;
|
||||
use tokio::process::Command;
|
||||
use tokio::time::Instant;
|
||||
use tokio::time::sleep;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
||||
async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn rmcp_tool_call_round_trip() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
@@ -36,7 +28,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
let server_name = "rmcp";
|
||||
let tool_name = format!("{server_name}__echo");
|
||||
|
||||
mount_sse_once_match(
|
||||
mount_sse_once(
|
||||
&server,
|
||||
any(),
|
||||
responses::sse(vec![
|
||||
@@ -49,7 +41,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
mount_sse_once_match(
|
||||
mount_sse_once(
|
||||
&server,
|
||||
any(),
|
||||
responses::sse(vec![
|
||||
@@ -62,7 +54,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
let expected_env_value = "propagated-env";
|
||||
let rmcp_test_server_bin = CargoBuild::new()
|
||||
.package("codex-rmcp-client")
|
||||
.bin("test_stdio_server")
|
||||
.bin("rmcp_test_server")
|
||||
.run()?
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
@@ -74,14 +66,12 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: rmcp_test_server_bin.clone(),
|
||||
args: Vec::new(),
|
||||
env: Some(HashMap::from([(
|
||||
"MCP_TEST_VALUE".to_string(),
|
||||
expected_env_value.to_string(),
|
||||
)])),
|
||||
},
|
||||
command: rmcp_test_server_bin.clone(),
|
||||
args: Vec::new(),
|
||||
env: Some(HashMap::from([(
|
||||
"MCP_TEST_VALUE".to_string(),
|
||||
expected_env_value.to_string(),
|
||||
)])),
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
@@ -107,13 +97,18 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
})
|
||||
.await?;
|
||||
|
||||
eprintln!("waiting for mcp tool call begin event");
|
||||
let begin_event = wait_for_event_with_timeout(
|
||||
&fixture.codex,
|
||||
|ev| matches!(ev, EventMsg::McpToolCallBegin(_)),
|
||||
|ev| {
|
||||
eprintln!("ev: {ev:?}");
|
||||
matches!(ev, EventMsg::McpToolCallBegin(_))
|
||||
},
|
||||
Duration::from_secs(10),
|
||||
)
|
||||
.await;
|
||||
|
||||
eprintln!("mcp tool call begin event: {begin_event:?}");
|
||||
let EventMsg::McpToolCallBegin(begin) = begin_event else {
|
||||
unreachable!("event guard guarantees McpToolCallBegin");
|
||||
};
|
||||
@@ -124,6 +119,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
matches!(ev, EventMsg::McpToolCallEnd(_))
|
||||
})
|
||||
.await;
|
||||
eprintln!("end_event: {end_event:?}");
|
||||
let EventMsg::McpToolCallEnd(end) = end_event else {
|
||||
unreachable!("event guard guarantees McpToolCallEnd");
|
||||
};
|
||||
@@ -149,223 +145,18 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
.get("echo")
|
||||
.and_then(Value::as_str)
|
||||
.expect("echo payload present");
|
||||
assert_eq!(echo_value, "ECHOING: ping");
|
||||
assert_eq!(echo_value, "ping");
|
||||
let env_value = map
|
||||
.get("env")
|
||||
.and_then(Value::as_str)
|
||||
.expect("env snapshot inserted");
|
||||
assert_eq!(env_value, expected_env_value);
|
||||
|
||||
wait_for_event(&fixture.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
let task_complete_event =
|
||||
wait_for_event(&fixture.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
eprintln!("task_complete_event: {task_complete_event:?}");
|
||||
|
||||
server.verify().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
||||
async fn streamable_http_tool_call_round_trip() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
|
||||
let call_id = "call-456";
|
||||
let server_name = "rmcp_http";
|
||||
let tool_name = format!("{server_name}__echo");
|
||||
|
||||
mount_sse_once_match(
|
||||
&server,
|
||||
any(),
|
||||
responses::sse(vec![
|
||||
serde_json::json!({
|
||||
"type": "response.created",
|
||||
"response": {"id": "resp-1"}
|
||||
}),
|
||||
responses::ev_function_call(call_id, &tool_name, "{\"message\":\"ping\"}"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
mount_sse_once_match(
|
||||
&server,
|
||||
any(),
|
||||
responses::sse(vec![
|
||||
responses::ev_assistant_message(
|
||||
"msg-1",
|
||||
"rmcp streamable http echo tool completed successfully.",
|
||||
),
|
||||
responses::ev_completed("resp-2"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
|
||||
let expected_env_value = "propagated-env-http";
|
||||
let rmcp_http_server_bin = CargoBuild::new()
|
||||
.package("codex-rmcp-client")
|
||||
.bin("test_streamable_http_server")
|
||||
.run()?
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
|
||||
let listener = TcpListener::bind("127.0.0.1:0")?;
|
||||
let port = listener.local_addr()?.port();
|
||||
drop(listener);
|
||||
let bind_addr = format!("127.0.0.1:{port}");
|
||||
let server_url = format!("http://{bind_addr}/mcp");
|
||||
|
||||
let mut http_server_child = Command::new(&rmcp_http_server_bin)
|
||||
.kill_on_drop(true)
|
||||
.env("MCP_STREAMABLE_HTTP_BIND_ADDR", &bind_addr)
|
||||
.env("MCP_TEST_VALUE", expected_env_value)
|
||||
.spawn()?;
|
||||
|
||||
wait_for_streamable_http_server(&mut http_server_child, &bind_addr, Duration::from_secs(5))
|
||||
.await?;
|
||||
|
||||
let fixture = test_codex()
|
||||
.with_config(move |config| {
|
||||
config.use_experimental_use_rmcp_client = true;
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: server_url,
|
||||
bearer_token: None,
|
||||
},
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
);
|
||||
})
|
||||
.build(&server)
|
||||
.await?;
|
||||
let session_model = fixture.session_configured.model.clone();
|
||||
|
||||
fixture
|
||||
.codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: "call the rmcp streamable http echo tool".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: fixture.cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let begin_event = wait_for_event_with_timeout(
|
||||
&fixture.codex,
|
||||
|ev| matches!(ev, EventMsg::McpToolCallBegin(_)),
|
||||
Duration::from_secs(10),
|
||||
)
|
||||
.await;
|
||||
|
||||
let EventMsg::McpToolCallBegin(begin) = begin_event else {
|
||||
unreachable!("event guard guarantees McpToolCallBegin");
|
||||
};
|
||||
assert_eq!(begin.invocation.server, server_name);
|
||||
assert_eq!(begin.invocation.tool, "echo");
|
||||
|
||||
let end_event = wait_for_event(&fixture.codex, |ev| {
|
||||
matches!(ev, EventMsg::McpToolCallEnd(_))
|
||||
})
|
||||
.await;
|
||||
let EventMsg::McpToolCallEnd(end) = end_event else {
|
||||
unreachable!("event guard guarantees McpToolCallEnd");
|
||||
};
|
||||
|
||||
let result = end
|
||||
.result
|
||||
.as_ref()
|
||||
.expect("rmcp echo tool should return success");
|
||||
assert_eq!(result.is_error, Some(false));
|
||||
assert!(
|
||||
result.content.is_empty(),
|
||||
"content should default to an empty array"
|
||||
);
|
||||
|
||||
let structured = result
|
||||
.structured_content
|
||||
.as_ref()
|
||||
.expect("structured content");
|
||||
let Value::Object(map) = structured else {
|
||||
panic!("structured content should be an object: {structured:?}");
|
||||
};
|
||||
let echo_value = map
|
||||
.get("echo")
|
||||
.and_then(Value::as_str)
|
||||
.expect("echo payload present");
|
||||
assert_eq!(echo_value, "ECHOING: ping");
|
||||
let env_value = map
|
||||
.get("env")
|
||||
.and_then(Value::as_str)
|
||||
.expect("env snapshot inserted");
|
||||
assert_eq!(env_value, expected_env_value);
|
||||
|
||||
wait_for_event(&fixture.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
server.verify().await;
|
||||
|
||||
match http_server_child.try_wait() {
|
||||
Ok(Some(_)) => {}
|
||||
Ok(None) => {
|
||||
let _ = http_server_child.kill().await;
|
||||
}
|
||||
Err(error) => {
|
||||
eprintln!("failed to check streamable http server status: {error}");
|
||||
let _ = http_server_child.kill().await;
|
||||
}
|
||||
}
|
||||
if let Err(error) = http_server_child.wait().await {
|
||||
eprintln!("failed to await streamable http server shutdown: {error}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn wait_for_streamable_http_server(
|
||||
server_child: &mut Child,
|
||||
address: &str,
|
||||
timeout: Duration,
|
||||
) -> anyhow::Result<()> {
|
||||
let deadline = Instant::now() + timeout;
|
||||
|
||||
loop {
|
||||
if let Some(status) = server_child.try_wait()? {
|
||||
return Err(anyhow::anyhow!(
|
||||
"streamable HTTP server exited early with status {status}"
|
||||
));
|
||||
}
|
||||
|
||||
let remaining = deadline.saturating_duration_since(Instant::now());
|
||||
|
||||
if remaining.is_zero() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"timed out waiting for streamable HTTP server at {address}: deadline reached"
|
||||
));
|
||||
}
|
||||
|
||||
match tokio::time::timeout(remaining, TcpStream::connect(address)).await {
|
||||
Ok(Ok(_)) => return Ok(()),
|
||||
Ok(Err(error)) => {
|
||||
if Instant::now() >= deadline {
|
||||
return Err(anyhow::anyhow!(
|
||||
"timed out waiting for streamable HTTP server at {address}: {error}"
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(anyhow::anyhow!(
|
||||
"timed out waiting for streamable HTTP server at {address}: connect call timed out"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
sleep(Duration::from_millis(50)).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ async fn summarize_context_three_requests_and_instructions() -> anyhow::Result<(
|
||||
|
||||
let sse1 = sse(vec![ev_assistant_message("m1", "Done"), ev_completed("r1")]);
|
||||
|
||||
responses::mount_sse_once_match(&server, any(), sse1).await;
|
||||
responses::mount_sse_once(&server, any(), sse1).await;
|
||||
|
||||
let notify_dir = TempDir::new()?;
|
||||
// write a script to the notify that touches a file next to it
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
This document describes Codex’s experimental MCP interface: a JSON‑RPC API that runs over the Model Context Protocol (MCP) transport to control a local Codex engine.
|
||||
|
||||
- Status: experimental and subject to change without notice
|
||||
- Server binary: `codex mcp-server` (or `codex-mcp-server`)
|
||||
- Server binary: `codex mcp` (or `codex-mcp-server`)
|
||||
- Transport: standard MCP over stdio (JSON‑RPC 2.0, line‑delimited)
|
||||
|
||||
## Overview
|
||||
@@ -36,17 +36,15 @@ See code for full type definitions and exact shapes: `protocol/src/mcp_protocol.
|
||||
Run Codex as an MCP server and connect an MCP client:
|
||||
|
||||
```bash
|
||||
codex mcp-server | your_mcp_client
|
||||
codex mcp | your_mcp_client
|
||||
```
|
||||
|
||||
For a simple inspection UI, you can also try:
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector codex mcp-server
|
||||
npx @modelcontextprotocol/inspector codex mcp
|
||||
```
|
||||
|
||||
Use the separate `codex mcp` subcommand to manage configured MCP server launchers in `config.toml`.
|
||||
|
||||
## Conversations
|
||||
|
||||
Start a new session with optional overrides:
|
||||
|
||||
@@ -40,14 +40,12 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
||||
opentelemetry-appender-tracing = { workspace = true }
|
||||
ts-rs = { workspace = true, features = [
|
||||
"uuid-impl",
|
||||
"serde-json-impl",
|
||||
"no-serde-warnings",
|
||||
] }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
@@ -58,4 +56,3 @@ tempfile = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
|
||||
@@ -2,18 +2,12 @@ use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use ts_rs::TS;
|
||||
|
||||
/// Top-level events emitted on the Codex Exec thread stream.
|
||||
/// Top-level events emitted on the Codex Exec conversation stream.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ThreadEvent {
|
||||
#[serde(rename = "thread.started")]
|
||||
ThreadStarted(ThreadStartedEvent),
|
||||
#[serde(rename = "turn.started")]
|
||||
TurnStarted(TurnStartedEvent),
|
||||
#[serde(rename = "turn.completed")]
|
||||
TurnCompleted(TurnCompletedEvent),
|
||||
#[serde(rename = "turn.failed")]
|
||||
TurnFailed(TurnFailedEvent),
|
||||
pub enum ConversationEvent {
|
||||
#[serde(rename = "session.created")]
|
||||
SessionCreated(SessionCreatedEvent),
|
||||
#[serde(rename = "item.started")]
|
||||
ItemStarted(ItemStartedEvent),
|
||||
#[serde(rename = "item.updated")]
|
||||
@@ -21,68 +15,47 @@ pub enum ThreadEvent {
|
||||
#[serde(rename = "item.completed")]
|
||||
ItemCompleted(ItemCompletedEvent),
|
||||
#[serde(rename = "error")]
|
||||
Error(ThreadErrorEvent),
|
||||
Error(ConversationErrorEvent),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct ThreadStartedEvent {
|
||||
pub thread_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS, Default)]
|
||||
pub struct TurnStartedEvent {}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct TurnCompletedEvent {
|
||||
pub usage: Usage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct TurnFailedEvent {
|
||||
pub error: ThreadErrorEvent,
|
||||
}
|
||||
|
||||
/// Minimal usage summary for a turn.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS, Default)]
|
||||
pub struct Usage {
|
||||
pub input_tokens: u64,
|
||||
pub cached_input_tokens: u64,
|
||||
pub output_tokens: u64,
|
||||
pub struct SessionCreatedEvent {
|
||||
pub session_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct ItemStartedEvent {
|
||||
pub item: ThreadItem,
|
||||
pub item: ConversationItem,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct ItemCompletedEvent {
|
||||
pub item: ThreadItem,
|
||||
pub item: ConversationItem,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct ItemUpdatedEvent {
|
||||
pub item: ThreadItem,
|
||||
pub item: ConversationItem,
|
||||
}
|
||||
|
||||
/// Fatal error emitted by the stream.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct ThreadErrorEvent {
|
||||
pub struct ConversationErrorEvent {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// Canonical representation of a thread item and its domain-specific payload.
|
||||
/// Canonical representation of a conversation item and its domain-specific payload.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct ThreadItem {
|
||||
pub struct ConversationItem {
|
||||
pub id: String,
|
||||
#[serde(flatten)]
|
||||
pub details: ThreadItemDetails,
|
||||
pub details: ConversationItemDetails,
|
||||
}
|
||||
|
||||
/// Typed payloads for each supported thread item type.
|
||||
/// Typed payloads for each supported conversation item type.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
#[serde(tag = "item_type", rename_all = "snake_case")]
|
||||
pub enum ThreadItemDetails {
|
||||
pub enum ConversationItemDetails {
|
||||
AssistantMessage(AssistantMessageItem),
|
||||
Reasoning(ReasoningItem),
|
||||
CommandExecution(CommandExecutionItem),
|
||||
@@ -93,7 +66,7 @@ pub enum ThreadItemDetails {
|
||||
Error(ErrorItem),
|
||||
}
|
||||
|
||||
/// Session metadata.
|
||||
/// Session conversation metadata.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct SessionItem {
|
||||
pub session_id: String,
|
||||
|
||||
@@ -8,27 +8,21 @@ use crate::event_processor::handle_last_message;
|
||||
use crate::exec_events::AssistantMessageItem;
|
||||
use crate::exec_events::CommandExecutionItem;
|
||||
use crate::exec_events::CommandExecutionStatus;
|
||||
use crate::exec_events::ConversationErrorEvent;
|
||||
use crate::exec_events::ConversationEvent;
|
||||
use crate::exec_events::ConversationItem;
|
||||
use crate::exec_events::ConversationItemDetails;
|
||||
use crate::exec_events::FileChangeItem;
|
||||
use crate::exec_events::FileUpdateChange;
|
||||
use crate::exec_events::ItemCompletedEvent;
|
||||
use crate::exec_events::ItemStartedEvent;
|
||||
use crate::exec_events::ItemUpdatedEvent;
|
||||
use crate::exec_events::McpToolCallItem;
|
||||
use crate::exec_events::McpToolCallStatus;
|
||||
use crate::exec_events::PatchApplyStatus;
|
||||
use crate::exec_events::PatchChangeKind;
|
||||
use crate::exec_events::ReasoningItem;
|
||||
use crate::exec_events::ThreadErrorEvent;
|
||||
use crate::exec_events::ThreadEvent;
|
||||
use crate::exec_events::ThreadItem;
|
||||
use crate::exec_events::ThreadItemDetails;
|
||||
use crate::exec_events::ThreadStartedEvent;
|
||||
use crate::exec_events::SessionCreatedEvent;
|
||||
use crate::exec_events::TodoItem;
|
||||
use crate::exec_events::TodoListItem;
|
||||
use crate::exec_events::TurnCompletedEvent;
|
||||
use crate::exec_events::TurnFailedEvent;
|
||||
use crate::exec_events::TurnStartedEvent;
|
||||
use crate::exec_events::Usage;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::plan_tool::StepStatus;
|
||||
use codex_core::plan_tool::UpdatePlanArgs;
|
||||
@@ -39,13 +33,10 @@ use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecCommandBeginEvent;
|
||||
use codex_core::protocol::ExecCommandEndEvent;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::McpToolCallBeginEvent;
|
||||
use codex_core::protocol::McpToolCallEndEvent;
|
||||
use codex_core::protocol::PatchApplyBeginEvent;
|
||||
use codex_core::protocol::PatchApplyEndEvent;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use codex_core::protocol::TaskStartedEvent;
|
||||
use tracing::error;
|
||||
use tracing::warn;
|
||||
|
||||
@@ -57,9 +48,6 @@ pub struct ExperimentalEventProcessorWithJsonOutput {
|
||||
running_patch_applies: HashMap<String, PatchApplyBeginEvent>,
|
||||
// Tracks the todo list for the current turn (at most one per turn).
|
||||
running_todo_list: Option<RunningTodoList>,
|
||||
last_total_token_usage: Option<codex_core::protocol::TokenUsage>,
|
||||
running_mcp_tool_calls: HashMap<String, RunningMcpToolCall>,
|
||||
last_critical_error: Option<ThreadErrorEvent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -74,13 +62,6 @@ struct RunningTodoList {
|
||||
items: Vec<TodoItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct RunningMcpToolCall {
|
||||
server: String,
|
||||
tool: String,
|
||||
item_id: String,
|
||||
}
|
||||
|
||||
impl ExperimentalEventProcessorWithJsonOutput {
|
||||
pub fn new(last_message_path: Option<PathBuf>) -> Self {
|
||||
Self {
|
||||
@@ -89,42 +70,26 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
running_commands: HashMap::new(),
|
||||
running_patch_applies: HashMap::new(),
|
||||
running_todo_list: None,
|
||||
last_total_token_usage: None,
|
||||
running_mcp_tool_calls: HashMap::new(),
|
||||
last_critical_error: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect_thread_events(&mut self, event: &Event) -> Vec<ThreadEvent> {
|
||||
pub fn collect_conversation_events(&mut self, event: &Event) -> Vec<ConversationEvent> {
|
||||
match &event.msg {
|
||||
EventMsg::SessionConfigured(ev) => self.handle_session_configured(ev),
|
||||
EventMsg::AgentMessage(ev) => self.handle_agent_message(ev),
|
||||
EventMsg::AgentReasoning(ev) => self.handle_reasoning_event(ev),
|
||||
EventMsg::ExecCommandBegin(ev) => self.handle_exec_command_begin(ev),
|
||||
EventMsg::ExecCommandEnd(ev) => self.handle_exec_command_end(ev),
|
||||
EventMsg::McpToolCallBegin(ev) => self.handle_mcp_tool_call_begin(ev),
|
||||
EventMsg::McpToolCallEnd(ev) => self.handle_mcp_tool_call_end(ev),
|
||||
EventMsg::PatchApplyBegin(ev) => self.handle_patch_apply_begin(ev),
|
||||
EventMsg::PatchApplyEnd(ev) => self.handle_patch_apply_end(ev),
|
||||
EventMsg::TokenCount(ev) => {
|
||||
if let Some(info) = &ev.info {
|
||||
self.last_total_token_usage = Some(info.total_token_usage.clone());
|
||||
}
|
||||
Vec::new()
|
||||
}
|
||||
EventMsg::TaskStarted(ev) => self.handle_task_started(ev),
|
||||
EventMsg::TaskComplete(_) => self.handle_task_complete(),
|
||||
EventMsg::Error(ev) => {
|
||||
let error = ThreadErrorEvent {
|
||||
message: ev.message.clone(),
|
||||
};
|
||||
self.last_critical_error = Some(error.clone());
|
||||
vec![ThreadEvent::Error(error)]
|
||||
}
|
||||
EventMsg::StreamError(ev) => vec![ThreadEvent::Error(ThreadErrorEvent {
|
||||
EventMsg::Error(ev) => vec![ConversationEvent::Error(ConversationErrorEvent {
|
||||
message: ev.message.clone(),
|
||||
})],
|
||||
EventMsg::StreamError(ev) => vec![ConversationEvent::Error(ConversationErrorEvent {
|
||||
message: ev.message.clone(),
|
||||
})],
|
||||
EventMsg::PlanUpdate(ev) => self.handle_plan_update(ev),
|
||||
EventMsg::TaskComplete(_) => self.handle_task_complete(),
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
@@ -137,36 +102,43 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
)
|
||||
}
|
||||
|
||||
fn handle_session_configured(&self, payload: &SessionConfiguredEvent) -> Vec<ThreadEvent> {
|
||||
vec![ThreadEvent::ThreadStarted(ThreadStartedEvent {
|
||||
thread_id: payload.session_id.to_string(),
|
||||
fn handle_session_configured(
|
||||
&self,
|
||||
payload: &SessionConfiguredEvent,
|
||||
) -> Vec<ConversationEvent> {
|
||||
vec![ConversationEvent::SessionCreated(SessionCreatedEvent {
|
||||
session_id: payload.session_id.to_string(),
|
||||
})]
|
||||
}
|
||||
|
||||
fn handle_agent_message(&self, payload: &AgentMessageEvent) -> Vec<ThreadEvent> {
|
||||
let item = ThreadItem {
|
||||
fn handle_agent_message(&self, payload: &AgentMessageEvent) -> Vec<ConversationEvent> {
|
||||
let item = ConversationItem {
|
||||
id: self.get_next_item_id(),
|
||||
|
||||
details: ThreadItemDetails::AssistantMessage(AssistantMessageItem {
|
||||
details: ConversationItemDetails::AssistantMessage(AssistantMessageItem {
|
||||
text: payload.message.clone(),
|
||||
}),
|
||||
};
|
||||
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent { item })]
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item,
|
||||
})]
|
||||
}
|
||||
|
||||
fn handle_reasoning_event(&self, ev: &AgentReasoningEvent) -> Vec<ThreadEvent> {
|
||||
let item = ThreadItem {
|
||||
fn handle_reasoning_event(&self, ev: &AgentReasoningEvent) -> Vec<ConversationEvent> {
|
||||
let item = ConversationItem {
|
||||
id: self.get_next_item_id(),
|
||||
|
||||
details: ThreadItemDetails::Reasoning(ReasoningItem {
|
||||
details: ConversationItemDetails::Reasoning(ReasoningItem {
|
||||
text: ev.text.clone(),
|
||||
}),
|
||||
};
|
||||
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent { item })]
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item,
|
||||
})]
|
||||
}
|
||||
fn handle_exec_command_begin(&mut self, ev: &ExecCommandBeginEvent) -> Vec<ThreadEvent> {
|
||||
fn handle_exec_command_begin(&mut self, ev: &ExecCommandBeginEvent) -> Vec<ConversationEvent> {
|
||||
let item_id = self.get_next_item_id();
|
||||
|
||||
let command_string = match shlex::try_join(ev.command.iter().map(String::as_str)) {
|
||||
@@ -188,9 +160,9 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
},
|
||||
);
|
||||
|
||||
let item = ThreadItem {
|
||||
let item = ConversationItem {
|
||||
id: item_id,
|
||||
details: ThreadItemDetails::CommandExecution(CommandExecutionItem {
|
||||
details: ConversationItemDetails::CommandExecution(CommandExecutionItem {
|
||||
command: command_string,
|
||||
aggregated_output: String::new(),
|
||||
exit_code: None,
|
||||
@@ -198,70 +170,10 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
}),
|
||||
};
|
||||
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent { item })]
|
||||
vec![ConversationEvent::ItemStarted(ItemStartedEvent { item })]
|
||||
}
|
||||
|
||||
fn handle_mcp_tool_call_begin(&mut self, ev: &McpToolCallBeginEvent) -> Vec<ThreadEvent> {
|
||||
let item_id = self.get_next_item_id();
|
||||
let server = ev.invocation.server.clone();
|
||||
let tool = ev.invocation.tool.clone();
|
||||
|
||||
self.running_mcp_tool_calls.insert(
|
||||
ev.call_id.clone(),
|
||||
RunningMcpToolCall {
|
||||
server: server.clone(),
|
||||
tool: tool.clone(),
|
||||
item_id: item_id.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
let item = ThreadItem {
|
||||
id: item_id,
|
||||
details: ThreadItemDetails::McpToolCall(McpToolCallItem {
|
||||
server,
|
||||
tool,
|
||||
status: McpToolCallStatus::InProgress,
|
||||
}),
|
||||
};
|
||||
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent { item })]
|
||||
}
|
||||
|
||||
fn handle_mcp_tool_call_end(&mut self, ev: &McpToolCallEndEvent) -> Vec<ThreadEvent> {
|
||||
let status = if ev.is_success() {
|
||||
McpToolCallStatus::Completed
|
||||
} else {
|
||||
McpToolCallStatus::Failed
|
||||
};
|
||||
|
||||
let (server, tool, item_id) = match self.running_mcp_tool_calls.remove(&ev.call_id) {
|
||||
Some(running) => (running.server, running.tool, running.item_id),
|
||||
None => {
|
||||
warn!(
|
||||
call_id = ev.call_id,
|
||||
"Received McpToolCallEnd without begin; synthesizing new item"
|
||||
);
|
||||
(
|
||||
ev.invocation.server.clone(),
|
||||
ev.invocation.tool.clone(),
|
||||
self.get_next_item_id(),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let item = ThreadItem {
|
||||
id: item_id,
|
||||
details: ThreadItemDetails::McpToolCall(McpToolCallItem {
|
||||
server,
|
||||
tool,
|
||||
status,
|
||||
}),
|
||||
};
|
||||
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent { item })]
|
||||
}
|
||||
|
||||
fn handle_patch_apply_begin(&mut self, ev: &PatchApplyBeginEvent) -> Vec<ThreadEvent> {
|
||||
fn handle_patch_apply_begin(&mut self, ev: &PatchApplyBeginEvent) -> Vec<ConversationEvent> {
|
||||
self.running_patch_applies
|
||||
.insert(ev.call_id.clone(), ev.clone());
|
||||
|
||||
@@ -276,17 +188,17 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_patch_apply_end(&mut self, ev: &PatchApplyEndEvent) -> Vec<ThreadEvent> {
|
||||
fn handle_patch_apply_end(&mut self, ev: &PatchApplyEndEvent) -> Vec<ConversationEvent> {
|
||||
if let Some(running_patch_apply) = self.running_patch_applies.remove(&ev.call_id) {
|
||||
let status = if ev.success {
|
||||
PatchApplyStatus::Completed
|
||||
} else {
|
||||
PatchApplyStatus::Failed
|
||||
};
|
||||
let item = ThreadItem {
|
||||
let item = ConversationItem {
|
||||
id: self.get_next_item_id(),
|
||||
|
||||
details: ThreadItemDetails::FileChange(FileChangeItem {
|
||||
details: ConversationItemDetails::FileChange(FileChangeItem {
|
||||
changes: running_patch_apply
|
||||
.changes
|
||||
.iter()
|
||||
@@ -299,13 +211,15 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
}),
|
||||
};
|
||||
|
||||
return vec![ThreadEvent::ItemCompleted(ItemCompletedEvent { item })];
|
||||
return vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item,
|
||||
})];
|
||||
}
|
||||
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn handle_exec_command_end(&mut self, ev: &ExecCommandEndEvent) -> Vec<ThreadEvent> {
|
||||
fn handle_exec_command_end(&mut self, ev: &ExecCommandEndEvent) -> Vec<ConversationEvent> {
|
||||
let Some(RunningCommand { command, item_id }) = self.running_commands.remove(&ev.call_id)
|
||||
else {
|
||||
warn!(
|
||||
@@ -319,10 +233,10 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
} else {
|
||||
CommandExecutionStatus::Failed
|
||||
};
|
||||
let item = ThreadItem {
|
||||
let item = ConversationItem {
|
||||
id: item_id,
|
||||
|
||||
details: ThreadItemDetails::CommandExecution(CommandExecutionItem {
|
||||
details: ConversationItemDetails::CommandExecution(CommandExecutionItem {
|
||||
command,
|
||||
aggregated_output: ev.aggregated_output.clone(),
|
||||
exit_code: Some(ev.exit_code),
|
||||
@@ -330,7 +244,9 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
}),
|
||||
};
|
||||
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent { item })]
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item,
|
||||
})]
|
||||
}
|
||||
|
||||
fn todo_items_from_plan(&self, args: &UpdatePlanArgs) -> Vec<TodoItem> {
|
||||
@@ -343,16 +259,16 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn handle_plan_update(&mut self, args: &UpdatePlanArgs) -> Vec<ThreadEvent> {
|
||||
fn handle_plan_update(&mut self, args: &UpdatePlanArgs) -> Vec<ConversationEvent> {
|
||||
let items = self.todo_items_from_plan(args);
|
||||
|
||||
if let Some(running) = &mut self.running_todo_list {
|
||||
running.items = items.clone();
|
||||
let item = ThreadItem {
|
||||
let item = ConversationItem {
|
||||
id: running.item_id.clone(),
|
||||
details: ThreadItemDetails::TodoList(TodoListItem { items }),
|
||||
details: ConversationItemDetails::TodoList(TodoListItem { items }),
|
||||
};
|
||||
return vec![ThreadEvent::ItemUpdated(ItemUpdatedEvent { item })];
|
||||
return vec![ConversationEvent::ItemUpdated(ItemUpdatedEvent { item })];
|
||||
}
|
||||
|
||||
let item_id = self.get_next_item_id();
|
||||
@@ -360,48 +276,26 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
item_id: item_id.clone(),
|
||||
items: items.clone(),
|
||||
});
|
||||
let item = ThreadItem {
|
||||
let item = ConversationItem {
|
||||
id: item_id,
|
||||
details: ThreadItemDetails::TodoList(TodoListItem { items }),
|
||||
details: ConversationItemDetails::TodoList(TodoListItem { items }),
|
||||
};
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent { item })]
|
||||
vec![ConversationEvent::ItemStarted(ItemStartedEvent { item })]
|
||||
}
|
||||
|
||||
fn handle_task_started(&mut self, _: &TaskStartedEvent) -> Vec<ThreadEvent> {
|
||||
self.last_critical_error = None;
|
||||
vec![ThreadEvent::TurnStarted(TurnStartedEvent {})]
|
||||
}
|
||||
|
||||
fn handle_task_complete(&mut self) -> Vec<ThreadEvent> {
|
||||
let usage = if let Some(u) = &self.last_total_token_usage {
|
||||
Usage {
|
||||
input_tokens: u.input_tokens,
|
||||
cached_input_tokens: u.cached_input_tokens,
|
||||
output_tokens: u.output_tokens,
|
||||
}
|
||||
} else {
|
||||
Usage::default()
|
||||
};
|
||||
|
||||
let mut items = Vec::new();
|
||||
|
||||
fn handle_task_complete(&mut self) -> Vec<ConversationEvent> {
|
||||
if let Some(running) = self.running_todo_list.take() {
|
||||
let item = ThreadItem {
|
||||
let item = ConversationItem {
|
||||
id: running.item_id,
|
||||
details: ThreadItemDetails::TodoList(TodoListItem {
|
||||
details: ConversationItemDetails::TodoList(TodoListItem {
|
||||
items: running.items,
|
||||
}),
|
||||
};
|
||||
items.push(ThreadEvent::ItemCompleted(ItemCompletedEvent { item }));
|
||||
return vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item,
|
||||
})];
|
||||
}
|
||||
|
||||
if let Some(error) = self.last_critical_error.take() {
|
||||
items.push(ThreadEvent::TurnFailed(TurnFailedEvent { error }));
|
||||
} else {
|
||||
items.push(ThreadEvent::TurnCompleted(TurnCompletedEvent { usage }));
|
||||
}
|
||||
|
||||
items
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -414,7 +308,7 @@ impl EventProcessor for ExperimentalEventProcessorWithJsonOutput {
|
||||
}
|
||||
|
||||
fn process_event(&mut self, event: Event) -> CodexStatus {
|
||||
let aggregated = self.collect_thread_events(&event);
|
||||
let aggregated = self.collect_conversation_events(&event);
|
||||
for conv_event in aggregated {
|
||||
match serde_json::to_string(&conv_event) {
|
||||
Ok(line) => {
|
||||
|
||||
@@ -5,6 +5,10 @@ pub mod event_processor_with_json_output;
|
||||
pub mod exec_events;
|
||||
pub mod experimental_event_processor_with_json_output;
|
||||
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub use cli::Cli;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::BUILT_IN_OSS_MODEL_PROVIDER_ID;
|
||||
@@ -22,30 +26,20 @@ use codex_core::protocol::TaskCompleteEvent;
|
||||
use codex_ollama::DEFAULT_OSS_MODEL;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use event_processor_with_human_output::EventProcessorWithHumanOutput;
|
||||
use event_processor_with_json_output::EventProcessorWithJsonOutput;
|
||||
use experimental_event_processor_with_json_output::ExperimentalEventProcessorWithJsonOutput;
|
||||
use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge;
|
||||
use serde_json::Value;
|
||||
use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
use crate::cli::Command as ExecCommand;
|
||||
use crate::event_processor::CodexStatus;
|
||||
use crate::event_processor::EventProcessor;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use crate::event_processor_with_json_output::EventProcessorWithJsonOutput;
|
||||
use codex_core::find_conversation_path_by_id_str;
|
||||
|
||||
pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
if let Err(err) = set_default_originator("codex_exec") {
|
||||
tracing::warn!(?err, "Failed to set codex exec originator override {err:?}");
|
||||
}
|
||||
|
||||
let Cli {
|
||||
command,
|
||||
images,
|
||||
@@ -120,18 +114,19 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
),
|
||||
};
|
||||
|
||||
// Build fmt layer (existing logging) to compose with OTEL layer.
|
||||
// TODO(mbolin): Take a more thoughtful approach to logging.
|
||||
let default_level = "error";
|
||||
|
||||
// Build env_filter separately and attach via with_filter.
|
||||
let env_filter = EnvFilter::try_from_default_env()
|
||||
.or_else(|_| EnvFilter::try_new(default_level))
|
||||
.unwrap_or_else(|_| EnvFilter::new(default_level));
|
||||
|
||||
let fmt_layer = tracing_subscriber::fmt::layer()
|
||||
let _ = tracing_subscriber::fmt()
|
||||
// Fallback to the `default_level` log filter if the environment
|
||||
// variable is not set _or_ contains an invalid value
|
||||
.with_env_filter(
|
||||
EnvFilter::try_from_default_env()
|
||||
.or_else(|_| EnvFilter::try_new(default_level))
|
||||
.unwrap_or_else(|_| EnvFilter::new(default_level)),
|
||||
)
|
||||
.with_ansi(stderr_with_ansi)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_filter(env_filter);
|
||||
.try_init();
|
||||
|
||||
let sandbox_mode = if full_auto {
|
||||
Some(SandboxMode::WorkspaceWrite)
|
||||
@@ -187,31 +182,6 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
};
|
||||
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, overrides)?;
|
||||
|
||||
let otel = codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION"));
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
let otel = match otel {
|
||||
Ok(otel) => otel,
|
||||
Err(e) => {
|
||||
eprintln!("Could not create otel exporter: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(provider) = otel.as_ref() {
|
||||
let otel_layer = OpenTelemetryTracingBridge::new(&provider.logger).with_filter(
|
||||
tracing_subscriber::filter::filter_fn(codex_core::otel_init::codex_export_filter),
|
||||
);
|
||||
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(fmt_layer)
|
||||
.with(otel_layer)
|
||||
.try_init();
|
||||
} else {
|
||||
let _ = tracing_subscriber::registry().with(fmt_layer).try_init();
|
||||
}
|
||||
|
||||
let mut event_processor: Box<dyn EventProcessor> = match (json_mode, experimental_json) {
|
||||
(_, true) => Box::new(ExperimentalEventProcessorWithJsonOutput::new(
|
||||
last_message_file.clone(),
|
||||
@@ -361,13 +331,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
info!("Sent prompt with event ID: {initial_prompt_task_id}");
|
||||
|
||||
// Run the loop until the task is complete.
|
||||
// Track whether a fatal error was reported by the server so we can
|
||||
// exit with a non-zero status for automation-friendly signaling.
|
||||
let mut error_seen = false;
|
||||
while let Some(event) = rx.recv().await {
|
||||
if matches!(event.msg, EventMsg::Error(_)) {
|
||||
error_seen = true;
|
||||
}
|
||||
let shutdown: CodexStatus = event_processor.process_event(event);
|
||||
match shutdown {
|
||||
CodexStatus::Running => continue,
|
||||
@@ -379,9 +343,6 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
}
|
||||
}
|
||||
}
|
||||
if error_seen {
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,41 +1,30 @@
|
||||
use codex_core::protocol::AgentMessageEvent;
|
||||
use codex_core::protocol::AgentReasoningEvent;
|
||||
use codex_core::protocol::ErrorEvent;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecCommandBeginEvent;
|
||||
use codex_core::protocol::ExecCommandEndEvent;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::McpInvocation;
|
||||
use codex_core::protocol::McpToolCallBeginEvent;
|
||||
use codex_core::protocol::McpToolCallEndEvent;
|
||||
use codex_core::protocol::PatchApplyBeginEvent;
|
||||
use codex_core::protocol::PatchApplyEndEvent;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_exec::exec_events::AssistantMessageItem;
|
||||
use codex_exec::exec_events::CommandExecutionItem;
|
||||
use codex_exec::exec_events::CommandExecutionStatus;
|
||||
use codex_exec::exec_events::ConversationErrorEvent;
|
||||
use codex_exec::exec_events::ConversationEvent;
|
||||
use codex_exec::exec_events::ConversationItem;
|
||||
use codex_exec::exec_events::ConversationItemDetails;
|
||||
use codex_exec::exec_events::ItemCompletedEvent;
|
||||
use codex_exec::exec_events::ItemStartedEvent;
|
||||
use codex_exec::exec_events::ItemUpdatedEvent;
|
||||
use codex_exec::exec_events::McpToolCallItem;
|
||||
use codex_exec::exec_events::McpToolCallStatus;
|
||||
use codex_exec::exec_events::PatchApplyStatus;
|
||||
use codex_exec::exec_events::PatchChangeKind;
|
||||
use codex_exec::exec_events::ReasoningItem;
|
||||
use codex_exec::exec_events::ThreadErrorEvent;
|
||||
use codex_exec::exec_events::ThreadEvent;
|
||||
use codex_exec::exec_events::ThreadItem;
|
||||
use codex_exec::exec_events::ThreadItemDetails;
|
||||
use codex_exec::exec_events::ThreadStartedEvent;
|
||||
use codex_exec::exec_events::SessionCreatedEvent;
|
||||
use codex_exec::exec_events::TodoItem as ExecTodoItem;
|
||||
use codex_exec::exec_events::TodoListItem as ExecTodoListItem;
|
||||
use codex_exec::exec_events::TurnCompletedEvent;
|
||||
use codex_exec::exec_events::TurnFailedEvent;
|
||||
use codex_exec::exec_events::TurnStartedEvent;
|
||||
use codex_exec::exec_events::Usage;
|
||||
use codex_exec::experimental_event_processor_with_json_output::ExperimentalEventProcessorWithJsonOutput;
|
||||
use mcp_types::CallToolResult;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
@@ -48,7 +37,7 @@ fn event(id: &str, msg: EventMsg) -> Event {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn session_configured_produces_thread_started_event() {
|
||||
fn session_configured_produces_session_created_event() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
let session_id = codex_protocol::mcp_protocol::ConversationId::from_string(
|
||||
"67e55044-10b1-426f-9247-bb680e5fe0c8",
|
||||
@@ -67,28 +56,15 @@ fn session_configured_produces_thread_started_event() {
|
||||
rollout_path,
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_thread_events(&ev);
|
||||
let out = ep.collect_conversation_events(&ev);
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ThreadEvent::ThreadStarted(ThreadStartedEvent {
|
||||
thread_id: "67e55044-10b1-426f-9247-bb680e5fe0c8".to_string(),
|
||||
vec![ConversationEvent::SessionCreated(SessionCreatedEvent {
|
||||
session_id: "67e55044-10b1-426f-9247-bb680e5fe0c8".to_string(),
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn task_started_produces_turn_started_event() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
let out = ep.collect_thread_events(&event(
|
||||
"t1",
|
||||
EventMsg::TaskStarted(codex_core::protocol::TaskStartedEvent {
|
||||
model_context_window: Some(32_000),
|
||||
}),
|
||||
));
|
||||
|
||||
assert_eq!(out, vec![ThreadEvent::TurnStarted(TurnStartedEvent {})]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plan_update_emits_todo_list_started_updated_and_completed() {
|
||||
use codex_core::plan_tool::PlanItemArg;
|
||||
@@ -114,13 +90,13 @@ fn plan_update_emits_todo_list_started_updated_and_completed() {
|
||||
],
|
||||
}),
|
||||
);
|
||||
let out_first = ep.collect_thread_events(&first);
|
||||
let out_first = ep.collect_conversation_events(&first);
|
||||
assert_eq!(
|
||||
out_first,
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemStarted(ItemStartedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::TodoList(ExecTodoListItem {
|
||||
details: ConversationItemDetails::TodoList(ExecTodoListItem {
|
||||
items: vec![
|
||||
ExecTodoItem {
|
||||
text: "step one".to_string(),
|
||||
@@ -153,13 +129,13 @@ fn plan_update_emits_todo_list_started_updated_and_completed() {
|
||||
],
|
||||
}),
|
||||
);
|
||||
let out_second = ep.collect_thread_events(&second);
|
||||
let out_second = ep.collect_conversation_events(&second);
|
||||
assert_eq!(
|
||||
out_second,
|
||||
vec![ThreadEvent::ItemUpdated(ItemUpdatedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemUpdated(ItemUpdatedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::TodoList(ExecTodoListItem {
|
||||
details: ConversationItemDetails::TodoList(ExecTodoListItem {
|
||||
items: vec![
|
||||
ExecTodoItem {
|
||||
text: "step one".to_string(),
|
||||
@@ -182,131 +158,23 @@ fn plan_update_emits_todo_list_started_updated_and_completed() {
|
||||
last_agent_message: None,
|
||||
}),
|
||||
);
|
||||
let out_complete = ep.collect_thread_events(&complete);
|
||||
let out_complete = ep.collect_conversation_events(&complete);
|
||||
assert_eq!(
|
||||
out_complete,
|
||||
vec![
|
||||
ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::TodoList(ExecTodoListItem {
|
||||
items: vec![
|
||||
ExecTodoItem {
|
||||
text: "step one".to_string(),
|
||||
completed: true
|
||||
},
|
||||
ExecTodoItem {
|
||||
text: "step two".to_string(),
|
||||
completed: false
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
}),
|
||||
ThreadEvent::TurnCompleted(TurnCompletedEvent {
|
||||
usage: Usage::default(),
|
||||
}),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mcp_tool_call_begin_and_end_emit_item_events() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
let invocation = McpInvocation {
|
||||
server: "server_a".to_string(),
|
||||
tool: "tool_x".to_string(),
|
||||
arguments: None,
|
||||
};
|
||||
|
||||
let begin = event(
|
||||
"m1",
|
||||
EventMsg::McpToolCallBegin(McpToolCallBeginEvent {
|
||||
call_id: "call-1".to_string(),
|
||||
invocation: invocation.clone(),
|
||||
}),
|
||||
);
|
||||
let begin_events = ep.collect_thread_events(&begin);
|
||||
assert_eq!(
|
||||
begin_events,
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::McpToolCall(McpToolCallItem {
|
||||
server: "server_a".to_string(),
|
||||
tool: "tool_x".to_string(),
|
||||
status: McpToolCallStatus::InProgress,
|
||||
}),
|
||||
},
|
||||
})]
|
||||
);
|
||||
|
||||
let end = event(
|
||||
"m2",
|
||||
EventMsg::McpToolCallEnd(McpToolCallEndEvent {
|
||||
call_id: "call-1".to_string(),
|
||||
invocation,
|
||||
duration: Duration::from_secs(1),
|
||||
result: Ok(CallToolResult {
|
||||
content: Vec::new(),
|
||||
is_error: None,
|
||||
structured_content: None,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
let end_events = ep.collect_thread_events(&end);
|
||||
assert_eq!(
|
||||
end_events,
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::McpToolCall(McpToolCallItem {
|
||||
server: "server_a".to_string(),
|
||||
tool: "tool_x".to_string(),
|
||||
status: McpToolCallStatus::Completed,
|
||||
}),
|
||||
},
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mcp_tool_call_failure_sets_failed_status() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
let invocation = McpInvocation {
|
||||
server: "server_b".to_string(),
|
||||
tool: "tool_y".to_string(),
|
||||
arguments: None,
|
||||
};
|
||||
|
||||
let begin = event(
|
||||
"m3",
|
||||
EventMsg::McpToolCallBegin(McpToolCallBeginEvent {
|
||||
call_id: "call-2".to_string(),
|
||||
invocation: invocation.clone(),
|
||||
}),
|
||||
);
|
||||
ep.collect_thread_events(&begin);
|
||||
|
||||
let end = event(
|
||||
"m4",
|
||||
EventMsg::McpToolCallEnd(McpToolCallEndEvent {
|
||||
call_id: "call-2".to_string(),
|
||||
invocation,
|
||||
duration: Duration::from_millis(5),
|
||||
result: Err("tool exploded".to_string()),
|
||||
}),
|
||||
);
|
||||
let events = ep.collect_thread_events(&end);
|
||||
assert_eq!(
|
||||
events,
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::McpToolCall(McpToolCallItem {
|
||||
server: "server_b".to_string(),
|
||||
tool: "tool_y".to_string(),
|
||||
status: McpToolCallStatus::Failed,
|
||||
details: ConversationItemDetails::TodoList(ExecTodoListItem {
|
||||
items: vec![
|
||||
ExecTodoItem {
|
||||
text: "step one".to_string(),
|
||||
completed: true
|
||||
},
|
||||
ExecTodoItem {
|
||||
text: "step two".to_string(),
|
||||
completed: false
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
})]
|
||||
@@ -332,14 +200,14 @@ fn plan_update_after_complete_starts_new_todo_list_with_new_id() {
|
||||
}],
|
||||
}),
|
||||
);
|
||||
let _ = ep.collect_thread_events(&start);
|
||||
let _ = ep.collect_conversation_events(&start);
|
||||
let complete = event(
|
||||
"t2",
|
||||
EventMsg::TaskComplete(codex_core::protocol::TaskCompleteEvent {
|
||||
last_agent_message: None,
|
||||
}),
|
||||
);
|
||||
let _ = ep.collect_thread_events(&complete);
|
||||
let _ = ep.collect_conversation_events(&complete);
|
||||
|
||||
// Second turn: a new todo list should have a new id
|
||||
let start_again = event(
|
||||
@@ -352,10 +220,10 @@ fn plan_update_after_complete_starts_new_todo_list_with_new_id() {
|
||||
}],
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_thread_events(&start_again);
|
||||
let out = ep.collect_conversation_events(&start_again);
|
||||
|
||||
match &out[0] {
|
||||
ThreadEvent::ItemStarted(ItemStartedEvent { item }) => {
|
||||
ConversationEvent::ItemStarted(ItemStartedEvent { item }) => {
|
||||
assert_eq!(&item.id, "item_1");
|
||||
}
|
||||
other => panic!("unexpected event: {other:?}"),
|
||||
@@ -371,13 +239,13 @@ fn agent_reasoning_produces_item_completed_reasoning() {
|
||||
text: "thinking...".to_string(),
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_thread_events(&ev);
|
||||
let out = ep.collect_conversation_events(&ev);
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::Reasoning(ReasoningItem {
|
||||
details: ConversationItemDetails::Reasoning(ReasoningItem {
|
||||
text: "thinking...".to_string(),
|
||||
}),
|
||||
},
|
||||
@@ -394,13 +262,13 @@ fn agent_message_produces_item_completed_assistant_message() {
|
||||
message: "hello".to_string(),
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_thread_events(&ev);
|
||||
let out = ep.collect_conversation_events(&ev);
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::AssistantMessage(AssistantMessageItem {
|
||||
details: ConversationItemDetails::AssistantMessage(AssistantMessageItem {
|
||||
text: "hello".to_string(),
|
||||
}),
|
||||
},
|
||||
@@ -411,7 +279,7 @@ fn agent_message_produces_item_completed_assistant_message() {
|
||||
#[test]
|
||||
fn error_event_produces_error() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
let out = ep.collect_thread_events(&event(
|
||||
let out = ep.collect_conversation_events(&event(
|
||||
"e1",
|
||||
EventMsg::Error(codex_core::protocol::ErrorEvent {
|
||||
message: "boom".to_string(),
|
||||
@@ -419,7 +287,7 @@ fn error_event_produces_error() {
|
||||
));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ThreadEvent::Error(ThreadErrorEvent {
|
||||
vec![ConversationEvent::Error(ConversationErrorEvent {
|
||||
message: "boom".to_string(),
|
||||
})]
|
||||
);
|
||||
@@ -428,7 +296,7 @@ fn error_event_produces_error() {
|
||||
#[test]
|
||||
fn stream_error_event_produces_error() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
let out = ep.collect_thread_events(&event(
|
||||
let out = ep.collect_conversation_events(&event(
|
||||
"e1",
|
||||
EventMsg::StreamError(codex_core::protocol::StreamErrorEvent {
|
||||
message: "retrying".to_string(),
|
||||
@@ -436,45 +304,12 @@ fn stream_error_event_produces_error() {
|
||||
));
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ThreadEvent::Error(ThreadErrorEvent {
|
||||
vec![ConversationEvent::Error(ConversationErrorEvent {
|
||||
message: "retrying".to_string(),
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_followed_by_task_complete_produces_turn_failed() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
|
||||
let error_event = event(
|
||||
"e1",
|
||||
EventMsg::Error(ErrorEvent {
|
||||
message: "boom".to_string(),
|
||||
}),
|
||||
);
|
||||
assert_eq!(
|
||||
ep.collect_thread_events(&error_event),
|
||||
vec![ThreadEvent::Error(ThreadErrorEvent {
|
||||
message: "boom".to_string(),
|
||||
})]
|
||||
);
|
||||
|
||||
let complete_event = event(
|
||||
"e2",
|
||||
EventMsg::TaskComplete(codex_core::protocol::TaskCompleteEvent {
|
||||
last_agent_message: None,
|
||||
}),
|
||||
);
|
||||
assert_eq!(
|
||||
ep.collect_thread_events(&complete_event),
|
||||
vec![ThreadEvent::TurnFailed(TurnFailedEvent {
|
||||
error: ThreadErrorEvent {
|
||||
message: "boom".to_string(),
|
||||
},
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exec_command_end_success_produces_completed_command_item() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
@@ -489,13 +324,13 @@ fn exec_command_end_success_produces_completed_command_item() {
|
||||
parsed_cmd: Vec::new(),
|
||||
}),
|
||||
);
|
||||
let out_begin = ep.collect_thread_events(&begin);
|
||||
let out_begin = ep.collect_conversation_events(&begin);
|
||||
assert_eq!(
|
||||
out_begin,
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemStarted(ItemStartedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::CommandExecution(CommandExecutionItem {
|
||||
details: ConversationItemDetails::CommandExecution(CommandExecutionItem {
|
||||
command: "bash -lc 'echo hi'".to_string(),
|
||||
aggregated_output: String::new(),
|
||||
exit_code: None,
|
||||
@@ -518,13 +353,13 @@ fn exec_command_end_success_produces_completed_command_item() {
|
||||
formatted_output: String::new(),
|
||||
}),
|
||||
);
|
||||
let out_ok = ep.collect_thread_events(&end_ok);
|
||||
let out_ok = ep.collect_conversation_events(&end_ok);
|
||||
assert_eq!(
|
||||
out_ok,
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::CommandExecution(CommandExecutionItem {
|
||||
details: ConversationItemDetails::CommandExecution(CommandExecutionItem {
|
||||
command: "bash -lc 'echo hi'".to_string(),
|
||||
aggregated_output: "hi\n".to_string(),
|
||||
exit_code: Some(0),
|
||||
@@ -550,11 +385,11 @@ fn exec_command_end_failure_produces_failed_command_item() {
|
||||
}),
|
||||
);
|
||||
assert_eq!(
|
||||
ep.collect_thread_events(&begin),
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent {
|
||||
item: ThreadItem {
|
||||
ep.collect_conversation_events(&begin),
|
||||
vec![ConversationEvent::ItemStarted(ItemStartedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::CommandExecution(CommandExecutionItem {
|
||||
details: ConversationItemDetails::CommandExecution(CommandExecutionItem {
|
||||
command: "sh -c 'exit 1'".to_string(),
|
||||
aggregated_output: String::new(),
|
||||
exit_code: None,
|
||||
@@ -577,13 +412,13 @@ fn exec_command_end_failure_produces_failed_command_item() {
|
||||
formatted_output: String::new(),
|
||||
}),
|
||||
);
|
||||
let out_fail = ep.collect_thread_events(&end_fail);
|
||||
let out_fail = ep.collect_conversation_events(&end_fail);
|
||||
assert_eq!(
|
||||
out_fail,
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::CommandExecution(CommandExecutionItem {
|
||||
details: ConversationItemDetails::CommandExecution(CommandExecutionItem {
|
||||
command: "sh -c 'exit 1'".to_string(),
|
||||
aggregated_output: String::new(),
|
||||
exit_code: Some(1),
|
||||
@@ -598,7 +433,7 @@ fn exec_command_end_failure_produces_failed_command_item() {
|
||||
fn exec_command_end_without_begin_is_ignored() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
|
||||
// End event arrives without a prior Begin; should produce no thread events.
|
||||
// End event arrives without a prior Begin; should produce no conversation events.
|
||||
let end_only = event(
|
||||
"c1",
|
||||
EventMsg::ExecCommandEnd(ExecCommandEndEvent {
|
||||
@@ -611,7 +446,7 @@ fn exec_command_end_without_begin_is_ignored() {
|
||||
formatted_output: String::new(),
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_thread_events(&end_only);
|
||||
let out = ep.collect_conversation_events(&end_only);
|
||||
assert!(out.is_empty());
|
||||
}
|
||||
|
||||
@@ -650,7 +485,7 @@ fn patch_apply_success_produces_item_completed_patchapply() {
|
||||
changes: changes.clone(),
|
||||
}),
|
||||
);
|
||||
let out_begin = ep.collect_thread_events(&begin);
|
||||
let out_begin = ep.collect_conversation_events(&begin);
|
||||
assert!(out_begin.is_empty());
|
||||
|
||||
// End (success) -> item.completed (item_0)
|
||||
@@ -663,15 +498,15 @@ fn patch_apply_success_produces_item_completed_patchapply() {
|
||||
success: true,
|
||||
}),
|
||||
);
|
||||
let out_end = ep.collect_thread_events(&end);
|
||||
let out_end = ep.collect_conversation_events(&end);
|
||||
assert_eq!(out_end.len(), 1);
|
||||
|
||||
// Validate structure without relying on HashMap iteration order
|
||||
match &out_end[0] {
|
||||
ThreadEvent::ItemCompleted(ItemCompletedEvent { item }) => {
|
||||
ConversationEvent::ItemCompleted(ItemCompletedEvent { item }) => {
|
||||
assert_eq!(&item.id, "item_0");
|
||||
match &item.details {
|
||||
ThreadItemDetails::FileChange(file_update) => {
|
||||
ConversationItemDetails::FileChange(file_update) => {
|
||||
assert_eq!(file_update.status, PatchApplyStatus::Completed);
|
||||
|
||||
let mut actual: Vec<(String, PatchChangeKind)> = file_update
|
||||
@@ -719,7 +554,7 @@ fn patch_apply_failure_produces_item_completed_patchapply_failed() {
|
||||
changes: changes.clone(),
|
||||
}),
|
||||
);
|
||||
assert!(ep.collect_thread_events(&begin).is_empty());
|
||||
assert!(ep.collect_conversation_events(&begin).is_empty());
|
||||
|
||||
// End (failure) -> item.completed (item_0) with Failed status
|
||||
let end = event(
|
||||
@@ -731,14 +566,14 @@ fn patch_apply_failure_produces_item_completed_patchapply_failed() {
|
||||
success: false,
|
||||
}),
|
||||
);
|
||||
let out_end = ep.collect_thread_events(&end);
|
||||
let out_end = ep.collect_conversation_events(&end);
|
||||
assert_eq!(out_end.len(), 1);
|
||||
|
||||
match &out_end[0] {
|
||||
ThreadEvent::ItemCompleted(ItemCompletedEvent { item }) => {
|
||||
ConversationEvent::ItemCompleted(ItemCompletedEvent { item }) => {
|
||||
assert_eq!(&item.id, "item_0");
|
||||
match &item.details {
|
||||
ThreadItemDetails::FileChange(file_update) => {
|
||||
ConversationItemDetails::FileChange(file_update) => {
|
||||
assert_eq!(file_update.status, PatchApplyStatus::Failed);
|
||||
assert_eq!(file_update.changes.len(), 1);
|
||||
assert_eq!(file_update.changes[0].path, "file.txt".to_string());
|
||||
@@ -750,49 +585,3 @@ fn patch_apply_failure_produces_item_completed_patchapply_failed() {
|
||||
other => panic!("unexpected event: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn task_complete_produces_turn_completed_with_usage() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
|
||||
// First, feed a TokenCount event with known totals.
|
||||
let usage = codex_core::protocol::TokenUsage {
|
||||
input_tokens: 1200,
|
||||
cached_input_tokens: 200,
|
||||
output_tokens: 345,
|
||||
reasoning_output_tokens: 0,
|
||||
total_tokens: 0,
|
||||
};
|
||||
let info = codex_core::protocol::TokenUsageInfo {
|
||||
total_token_usage: usage.clone(),
|
||||
last_token_usage: usage,
|
||||
model_context_window: None,
|
||||
};
|
||||
let token_count_event = event(
|
||||
"e1",
|
||||
EventMsg::TokenCount(codex_core::protocol::TokenCountEvent {
|
||||
info: Some(info),
|
||||
rate_limits: None,
|
||||
}),
|
||||
);
|
||||
assert!(ep.collect_thread_events(&token_count_event).is_empty());
|
||||
|
||||
// Then TaskComplete should produce turn.completed with the captured usage.
|
||||
let complete_event = event(
|
||||
"e2",
|
||||
EventMsg::TaskComplete(codex_core::protocol::TaskCompleteEvent {
|
||||
last_agent_message: Some("done".to_string()),
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_thread_events(&complete_event);
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ThreadEvent::TurnCompleted(TurnCompletedEvent {
|
||||
usage: Usage {
|
||||
input_tokens: 1200,
|
||||
cached_input_tokens: 200,
|
||||
output_tokens: 345,
|
||||
},
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,4 +3,3 @@ mod apply_patch;
|
||||
mod output_schema;
|
||||
mod resume;
|
||||
mod sandbox;
|
||||
mod server_error_exit;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user