mirror of
https://github.com/openai/codex.git
synced 2026-02-03 23:43:39 +00:00
Compare commits
21 Commits
worktree-c
...
remote-tas
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
065fa50f10 | ||
|
|
25ab9f5e10 | ||
|
|
f5ab495189 | ||
|
|
4923df37ea | ||
|
|
8858ed1090 | ||
|
|
f0491f4826 | ||
|
|
e1d6531103 | ||
|
|
5fa64b7ae1 | ||
|
|
e20e4edbab | ||
|
|
16ac10f9d3 | ||
|
|
3d12b46b18 | ||
|
|
36803606a0 | ||
|
|
21ef6be571 | ||
|
|
acb706b553 | ||
|
|
35dec89d8a | ||
|
|
d1cf46b09f | ||
|
|
e17d794a4e | ||
|
|
83dfb43dbd | ||
|
|
e5d31d5ccc | ||
|
|
9be247e41e | ||
|
|
d2fcf4314e |
28
.github/dotslash-config.json
vendored
28
.github/dotslash-config.json
vendored
@@ -27,34 +27,6 @@
|
||||
"path": "codex.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-responses-api-proxy": {
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
pull_request: {}
|
||||
pull_request: { branches: [main] }
|
||||
push: { branches: [main] }
|
||||
|
||||
jobs:
|
||||
@@ -31,7 +31,6 @@ jobs:
|
||||
- uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Stage npm package
|
||||
id: stage_npm_package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
@@ -41,13 +40,13 @@ jobs:
|
||||
python3 ./codex-cli/scripts/build_npm_package.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--pack-output "$PACK_OUTPUT"
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
echo "PACK_OUTPUT=$PACK_OUTPUT" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
path: ${{ env.PACK_OUTPUT }}
|
||||
|
||||
- name: Ensure root README.md contains only ASCII and certain Unicode code points
|
||||
run: ./scripts/asciicheck.py README.md
|
||||
|
||||
34
.github/workflows/rust-release.yml
vendored
34
.github/workflows/rust-release.yml
vendored
@@ -97,7 +97,7 @@ jobs:
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
@@ -107,10 +107,8 @@ jobs:
|
||||
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
|
||||
fi
|
||||
|
||||
- if: ${{ matrix.runner == 'windows-11-arm' }}
|
||||
@@ -218,30 +216,17 @@ jobs:
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage codex CLI npm package
|
||||
- name: Stage npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
|
||||
- name: Stage responses API proxy npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage-responses"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex-responses-api-proxy \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-responses-api-proxy-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
@@ -284,7 +269,7 @@ jobs:
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarballs from release
|
||||
- name: Download npm tarball from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
@@ -296,10 +281,6 @@ jobs:
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
@@ -313,14 +294,7 @@ jobs:
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
|
||||
tarballs=(
|
||||
"codex-npm-${VERSION}.tgz"
|
||||
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||
)
|
||||
|
||||
for tarball in "${tarballs[@]}"; do
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}"
|
||||
done
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
|
||||
update-branch:
|
||||
name: Update latest-alpha-cli branch
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
<h1 align="center">OpenAI Codex CLI</h1>
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install codex</code></p>
|
||||
|
||||
@@ -101,3 +102,4 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
|
||||
@@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS:
|
||||
| Requirement | Details |
|
||||
| --------------------------- | --------------------------------------------------------------- |
|
||||
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
||||
| Node.js | **16 or newer** (Node 20 LTS recommended) |
|
||||
| Node.js | **22 or newer** (LTS recommended) |
|
||||
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
||||
| RAM | 4-GB minimum (8-GB recommended) |
|
||||
|
||||
@@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
|
||||
<details>
|
||||
<summary>Does it work on Windows?</summary>
|
||||
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22.
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
// Unified entry point for the Codex CLI.
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
@@ -69,6 +68,7 @@ const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
// executing. This allows us to forward those signals to the child process
|
||||
// and guarantees that when either the child terminates or the parent
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
function getUpdatedPath(newDirs) {
|
||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||
|
||||
2
codex-cli/package-lock.json
generated
2
codex-cli/package-lock.json
generated
@@ -11,7 +11,7 @@
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=20"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=20"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
|
||||
@@ -13,7 +13,6 @@ from pathlib import Path
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||
GITHUB_REPO = "openai/codex"
|
||||
|
||||
# The docs are not clear on what the expected value/format of
|
||||
@@ -24,12 +23,6 @@ WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--package",
|
||||
choices=("codex", "codex-responses-api-proxy"),
|
||||
default="codex",
|
||||
help="Which npm package to stage (default: codex).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
help="Version number to write to package.json inside the staged package.",
|
||||
@@ -70,7 +63,6 @@ def parse_args() -> argparse.Namespace:
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
package = args.package
|
||||
version = args.version
|
||||
release_version = args.release_version
|
||||
if release_version:
|
||||
@@ -84,7 +76,7 @@ def main() -> int:
|
||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||
|
||||
try:
|
||||
stage_sources(staging_dir, version, package)
|
||||
stage_sources(staging_dir, version)
|
||||
|
||||
workflow_url = args.workflow_url
|
||||
resolved_head_sha: str | None = None
|
||||
@@ -108,23 +100,16 @@ def main() -> int:
|
||||
if not workflow_url:
|
||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
||||
|
||||
install_native_binaries(staging_dir, workflow_url, package)
|
||||
install_native_binaries(staging_dir, workflow_url)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
if package == "codex":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the responses API proxy:\n"
|
||||
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||
)
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
@@ -151,34 +136,20 @@ def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
return temp_dir, True
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
def stage_sources(staging_dir: Path, version: str) -> None:
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if package == "codex":
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||
elif package == "codex-responses-api-proxy":
|
||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||
|
||||
readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||
else:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
with open(package_json_path, "r", encoding="utf-8") as fh:
|
||||
with open(CODEX_CLI_ROOT / "package.json", "r", encoding="utf-8") as fh:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
@@ -187,19 +158,10 @@ def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str, package: str) -> None:
|
||||
package_components = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
}
|
||||
|
||||
components = package_components.get(package)
|
||||
if components is None:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url]
|
||||
for component in components:
|
||||
cmd.extend(["--component", component])
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str | None) -> None:
|
||||
cmd = ["./scripts/install_native_deps.py"]
|
||||
if workflow_url:
|
||||
cmd.extend(["--workflow-url", workflow_url])
|
||||
cmd.append(str(staging_dir))
|
||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Sequence
|
||||
@@ -21,7 +20,7 @@ CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
BINARY_TARGETS = (
|
||||
CODEX_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
@@ -30,27 +29,6 @@ BINARY_TARGETS = (
|
||||
"aarch64-pc-windows-msvc",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BinaryComponent:
|
||||
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex",
|
||||
),
|
||||
"codex-responses-api-proxy": BinaryComponent(
|
||||
artifact_prefix="codex-responses-api-proxy",
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
),
|
||||
}
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||
@@ -72,16 +50,6 @@ def parse_args() -> argparse.Namespace:
|
||||
"known good run when omitted."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--component",
|
||||
dest="components",
|
||||
action="append",
|
||||
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||
help=(
|
||||
"Limit installation to the specified components."
|
||||
" May be repeated. Defaults to 'codex' and 'rg'."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"root",
|
||||
nargs="?",
|
||||
@@ -101,28 +69,18 @@ def main() -> int:
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or ["codex", "rg"]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
workflow_url = DEFAULT_WORKFLOW_URL
|
||||
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
BINARY_TARGETS,
|
||||
[name for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
install_codex_binaries(artifacts_dir, vendor_dir, CODEX_TARGETS)
|
||||
|
||||
if "rg" in components:
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
@@ -166,8 +124,6 @@ def fetch_rg(
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
print("Installing ripgrep binaries for targets: " + ", ".join(targets))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(
|
||||
@@ -184,7 +140,6 @@ def fetch_rg(
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
@@ -203,60 +158,40 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def install_binary_components(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
targets: Iterable[str],
|
||||
component_names: Sequence[str],
|
||||
) -> None:
|
||||
selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
|
||||
if not selected_components:
|
||||
return
|
||||
|
||||
def install_codex_binaries(
|
||||
artifacts_dir: Path, vendor_dir: Path, targets: Iterable[str]
|
||||
) -> list[Path]:
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return
|
||||
return []
|
||||
|
||||
for component in selected_components:
|
||||
print(
|
||||
f"Installing {component.binary_basename} binaries for targets: "
|
||||
+ ", ".join(targets)
|
||||
)
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
_install_single_binary,
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
target,
|
||||
component,
|
||||
): target
|
||||
for target in targets
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
installed_path = future.result()
|
||||
print(f" installed {installed_path}")
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(_install_single_codex_binary, artifacts_dir, vendor_dir, target): target
|
||||
for target in targets
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _install_single_binary(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
component: BinaryComponent,
|
||||
) -> Path:
|
||||
def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: str) -> Path:
|
||||
artifact_subdir = artifacts_dir / target
|
||||
archive_name = _archive_name_for_target(component.artifact_prefix, target)
|
||||
archive_name = _archive_name_for_target(target)
|
||||
archive_path = artifact_subdir / archive_name
|
||||
if not archive_path.exists():
|
||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||
|
||||
dest_dir = vendor_dir / target / component.dest_dir
|
||||
dest_dir = vendor_dir / target / "codex"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
binary_name = (
|
||||
f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename
|
||||
)
|
||||
binary_name = "codex.exe" if "windows" in target else "codex"
|
||||
dest = dest_dir / binary_name
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(archive_path, "zst", None, dest)
|
||||
@@ -265,10 +200,10 @@ def _install_single_binary(
|
||||
return dest
|
||||
|
||||
|
||||
def _archive_name_for_target(artifact_prefix: str, target: str) -> str:
|
||||
def _archive_name_for_target(target: str) -> str:
|
||||
if "windows" in target:
|
||||
return f"{artifact_prefix}-{target}.exe.zst"
|
||||
return f"{artifact_prefix}-{target}.zst"
|
||||
return f"codex-{target}.exe.zst"
|
||||
return f"codex-{target}.zst"
|
||||
|
||||
|
||||
def _fetch_single_rg(
|
||||
|
||||
969
codex-rs/Cargo.lock
generated
969
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,12 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
"cli",
|
||||
"common",
|
||||
"core",
|
||||
@@ -16,12 +20,12 @@ members = [
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/readiness",
|
||||
]
|
||||
resolver = "2"
|
||||
@@ -50,10 +54,10 @@ codex-login = { path = "login" }
|
||||
codex-mcp-client = { path = "mcp-client" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
@@ -84,8 +88,8 @@ dirs = "6"
|
||||
dotenvy = "0.15.7"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
escargot = "0.5"
|
||||
futures = "0.3"
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
|
||||
@@ -4,18 +4,18 @@ We provide Codex CLI as a standalone, native executable to ensure a zero-depende
|
||||
|
||||
## Installing Codex
|
||||
|
||||
Today, the easiest way to install Codex is via `npm`:
|
||||
Today, the easiest way to install Codex is via `npm`, though we plan to publish Codex to other package managers soon.
|
||||
|
||||
```shell
|
||||
npm i -g @openai/codex
|
||||
npm i -g @openai/codex@native
|
||||
codex
|
||||
```
|
||||
|
||||
You can also install via Homebrew (`brew install codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
You can also download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
The Rust implementation is now the maintained Codex CLI and serves as the default experience. It includes a number of features that the legacy TypeScript CLI never supported.
|
||||
While we are [working to close the gap between the TypeScript and Rust implementations of Codex CLI](https://github.com/openai/codex/issues/1262), note that the Rust CLI has a number of features that the TypeScript CLI does not!
|
||||
|
||||
### Config
|
||||
|
||||
|
||||
16
codex-rs/backend-client/Cargo.toml
Normal file
16
codex-rs/backend-client/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "codex-backend-client"
|
||||
version = "0.0.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
tokio = { version = "1", features = ["macros", "rt"] }
|
||||
codex-backend-openapi-models = { path = "../codex-backend-openapi-models" }
|
||||
242
codex-rs/backend-client/src/client.rs
Normal file
242
codex-rs/backend-client/src/client.rs
Normal file
@@ -0,0 +1,242 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::TurnAttemptsSiblingTurnsResponse;
|
||||
use anyhow::Result;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use reqwest::header::USER_AGENT;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum PathStyle {
|
||||
CodexApi, // /api/codex/...
|
||||
ChatGptApi, // /wham/...
|
||||
}
|
||||
|
||||
impl PathStyle {
|
||||
pub fn from_base_url(base_url: &str) -> Self {
|
||||
if base_url.contains("/backend-api") {
|
||||
PathStyle::ChatGptApi
|
||||
} else {
|
||||
PathStyle::CodexApi
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Client {
|
||||
base_url: String,
|
||||
http: reqwest::Client,
|
||||
bearer_token: Option<String>,
|
||||
user_agent: Option<HeaderValue>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
path_style: PathStyle,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(base_url: impl Into<String>) -> Result<Self> {
|
||||
let mut base_url = base_url.into();
|
||||
// Normalize common ChatGPT hostnames to include /backend-api so we hit the WHAM paths.
|
||||
// Also trim trailing slashes for consistent URL building.
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let path_style = PathStyle::from_base_url(&base_url);
|
||||
Ok(Self {
|
||||
base_url,
|
||||
http,
|
||||
bearer_token: None,
|
||||
user_agent: None,
|
||||
chatgpt_account_id: None,
|
||||
path_style,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.bearer_token = Some(token.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
|
||||
if let Ok(hv) = HeaderValue::from_str(&ua.into()) {
|
||||
self.user_agent = Some(hv);
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.chatgpt_account_id = Some(account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_path_style(mut self, style: PathStyle) -> Self {
|
||||
self.path_style = style;
|
||||
self
|
||||
}
|
||||
|
||||
fn headers(&self) -> HeaderMap {
|
||||
let mut h = HeaderMap::new();
|
||||
if let Some(ua) = &self.user_agent {
|
||||
h.insert(USER_AGENT, ua.clone());
|
||||
} else {
|
||||
h.insert(USER_AGENT, HeaderValue::from_static("codex-cli"));
|
||||
}
|
||||
if let Some(token) = &self.bearer_token {
|
||||
let value = format!("Bearer {token}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&value) {
|
||||
h.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
}
|
||||
if let Some(acc) = &self.chatgpt_account_id
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(acc)
|
||||
{
|
||||
h.insert(name, hv);
|
||||
}
|
||||
h
|
||||
}
|
||||
|
||||
async fn exec_request(
|
||||
&self,
|
||||
req: reqwest::RequestBuilder,
|
||||
method: &str,
|
||||
url: &str,
|
||||
) -> Result<(String, String)> {
|
||||
let res = req.send().await?;
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
if !status.is_success() {
|
||||
anyhow::bail!("{method} {url} failed: {status}; content-type={ct}; body={body}");
|
||||
}
|
||||
Ok((body, ct))
|
||||
}
|
||||
|
||||
fn decode_json<T: DeserializeOwned>(&self, url: &str, ct: &str, body: &str) -> Result<T> {
|
||||
match serde_json::from_str::<T>(body) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(e) => {
|
||||
anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_tasks(
|
||||
&self,
|
||||
limit: Option<i32>,
|
||||
task_filter: Option<&str>,
|
||||
environment_id: Option<&str>,
|
||||
) -> Result<PaginatedListTaskListItem> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks/list", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks/list", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let req = if let Some(lim) = limit {
|
||||
req.query(&[("limit", lim)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let req = if let Some(tf) = task_filter {
|
||||
req.query(&[("task_filter", tf)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let req = if let Some(id) = environment_id {
|
||||
req.query(&[("environment_id", id)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<PaginatedListTaskListItem>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
pub async fn get_task_details(&self, task_id: &str) -> Result<CodeTaskDetailsResponse> {
|
||||
let (parsed, _body, _ct) = self.get_task_details_with_body(task_id).await?;
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
pub async fn get_task_details_with_body(
|
||||
&self,
|
||||
task_id: &str,
|
||||
) -> Result<(CodeTaskDetailsResponse, String, String)> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks/{}", self.base_url, task_id),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks/{}", self.base_url, task_id),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
let parsed: CodeTaskDetailsResponse = self.decode_json(&url, &ct, &body)?;
|
||||
Ok((parsed, body, ct))
|
||||
}
|
||||
|
||||
pub async fn list_sibling_turns(
|
||||
&self,
|
||||
task_id: &str,
|
||||
turn_id: &str,
|
||||
) -> Result<TurnAttemptsSiblingTurnsResponse> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!(
|
||||
"{}/api/codex/tasks/{}/turns/{}/sibling_turns",
|
||||
self.base_url, task_id, turn_id
|
||||
),
|
||||
PathStyle::ChatGptApi => format!(
|
||||
"{}/wham/tasks/{}/turns/{}/sibling_turns",
|
||||
self.base_url, task_id, turn_id
|
||||
),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<TurnAttemptsSiblingTurnsResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Create a new task (user turn) by POSTing to the appropriate backend path
|
||||
/// based on `path_style`. Returns the created task id.
|
||||
pub async fn create_task(&self, request_body: serde_json::Value) -> Result<String> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks", self.base_url),
|
||||
};
|
||||
let req = self
|
||||
.http
|
||||
.post(&url)
|
||||
.headers(self.headers())
|
||||
.header(CONTENT_TYPE, HeaderValue::from_static("application/json"))
|
||||
.json(&request_body);
|
||||
let (body, ct) = self.exec_request(req, "POST", &url).await?;
|
||||
// Extract id from JSON: prefer `task.id`; fallback to top-level `id` when present.
|
||||
match serde_json::from_str::<serde_json::Value>(&body) {
|
||||
Ok(v) => {
|
||||
if let Some(id) = v
|
||||
.get("task")
|
||||
.and_then(|t| t.get("id"))
|
||||
.and_then(|s| s.as_str())
|
||||
{
|
||||
Ok(id.to_string())
|
||||
} else if let Some(id) = v.get("id").and_then(|s| s.as_str()) {
|
||||
Ok(id.to_string())
|
||||
} else {
|
||||
anyhow::bail!(
|
||||
"POST {url} succeeded but no task id found; content-type={ct}; body={body}"
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
9
codex-rs/backend-client/src/lib.rs
Normal file
9
codex-rs/backend-client/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod client;
|
||||
pub mod types;
|
||||
|
||||
pub use client::Client;
|
||||
pub use types::CodeTaskDetailsResponse;
|
||||
pub use types::CodeTaskDetailsResponseExt;
|
||||
pub use types::PaginatedListTaskListItem;
|
||||
pub use types::TaskListItem;
|
||||
pub use types::TurnAttemptsSiblingTurnsResponse;
|
||||
141
codex-rs/backend-client/src/types.rs
Normal file
141
codex-rs/backend-client/src/types.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
pub use codex_backend_openapi_models::models::CodeTaskDetailsResponse;
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::TaskListItem;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
/// Extension helpers on generated types.
|
||||
pub trait CodeTaskDetailsResponseExt {
|
||||
/// Attempt to extract a unified diff string from `current_diff_task_turn`.
|
||||
fn unified_diff(&self) -> Option<String>;
|
||||
/// Extract assistant text output messages (no diff) from current turns.
|
||||
fn assistant_text_messages(&self) -> Vec<String>;
|
||||
/// Extract the user's prompt text from the current user turn, when present.
|
||||
fn user_text_prompt(&self) -> Option<String>;
|
||||
/// Extract an assistant error message (if the turn failed and provided one).
|
||||
fn assistant_error_message(&self) -> Option<String>;
|
||||
}
|
||||
impl CodeTaskDetailsResponseExt for CodeTaskDetailsResponse {
|
||||
fn unified_diff(&self) -> Option<String> {
|
||||
// `current_diff_task_turn` is an object; look for `output_items`.
|
||||
// Prefer explicit diff turn; fallback to assistant turn if needed.
|
||||
let candidates: [&Option<std::collections::HashMap<String, Value>>; 2] =
|
||||
[&self.current_diff_task_turn, &self.current_assistant_turn];
|
||||
|
||||
for map in candidates {
|
||||
let items = map
|
||||
.as_ref()
|
||||
.and_then(|m| m.get("output_items"))
|
||||
.and_then(|v| v.as_array());
|
||||
if let Some(items) = items {
|
||||
for item in items {
|
||||
match item.get("type").and_then(Value::as_str) {
|
||||
Some("output_diff") => {
|
||||
if let Some(s) = item.get("diff").and_then(Value::as_str) {
|
||||
return Some(s.to_string());
|
||||
}
|
||||
}
|
||||
Some("pr") => {
|
||||
if let Some(s) = item
|
||||
.get("output_diff")
|
||||
.and_then(|od| od.get("diff"))
|
||||
.and_then(Value::as_str)
|
||||
{
|
||||
return Some(s.to_string());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
fn assistant_text_messages(&self) -> Vec<String> {
|
||||
let mut out = Vec::new();
|
||||
let candidates: [&Option<std::collections::HashMap<String, Value>>; 2] =
|
||||
[&self.current_diff_task_turn, &self.current_assistant_turn];
|
||||
for map in candidates {
|
||||
let items = map
|
||||
.as_ref()
|
||||
.and_then(|m| m.get("output_items"))
|
||||
.and_then(|v| v.as_array());
|
||||
if let Some(items) = items {
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) == Some("message")
|
||||
&& let Some(content) = item.get("content").and_then(Value::as_array)
|
||||
{
|
||||
for part in content {
|
||||
if part.get("content_type").and_then(Value::as_str) == Some("text")
|
||||
&& let Some(txt) = part.get("text").and_then(Value::as_str)
|
||||
{
|
||||
out.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn user_text_prompt(&self) -> Option<String> {
|
||||
use serde_json::Value;
|
||||
let map = self.current_user_turn.as_ref()?;
|
||||
let items = map.get("input_items").and_then(Value::as_array)?;
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) == Some("message") {
|
||||
// optional role filter (prefer user)
|
||||
let is_user = item
|
||||
.get("role")
|
||||
.and_then(Value::as_str)
|
||||
.map(|r| r.eq_ignore_ascii_case("user"))
|
||||
.unwrap_or(true);
|
||||
if !is_user {
|
||||
continue;
|
||||
}
|
||||
if let Some(content) = item.get("content").and_then(Value::as_array) {
|
||||
for c in content {
|
||||
if c.get("content_type").and_then(Value::as_str) == Some("text")
|
||||
&& let Some(txt) = c.get("text").and_then(Value::as_str)
|
||||
{
|
||||
parts.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if parts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(parts.join("\n\n"))
|
||||
}
|
||||
}
|
||||
|
||||
fn assistant_error_message(&self) -> Option<String> {
|
||||
let map = self.current_assistant_turn.as_ref()?;
|
||||
let err = map.get("error")?.as_object()?;
|
||||
let message = err.get("message").and_then(Value::as_str).unwrap_or("");
|
||||
let code = err.get("code").and_then(Value::as_str).unwrap_or("");
|
||||
if message.is_empty() && code.is_empty() {
|
||||
None
|
||||
} else if message.is_empty() {
|
||||
Some(code.to_string())
|
||||
} else if code.is_empty() {
|
||||
Some(message.to_string())
|
||||
} else {
|
||||
Some(format!("{code}: {message}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Removed unused helpers `single_file_paths` and `extract_file_paths_list` to reduce
|
||||
// surface area; reintroduce as needed near call sites.
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct TurnAttemptsSiblingTurnsResponse {
|
||||
#[serde(default)]
|
||||
pub sibling_turns: Vec<std::collections::HashMap<String, Value>>,
|
||||
}
|
||||
@@ -14,6 +14,7 @@ codex-core = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -56,46 +56,24 @@ pub async fn apply_diff_from_task(
|
||||
}
|
||||
|
||||
async fn apply_diff(diff: &str, cwd: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let mut cmd = tokio::process::Command::new("git");
|
||||
if let Some(cwd) = cwd {
|
||||
cmd.current_dir(cwd);
|
||||
}
|
||||
let toplevel_output = cmd
|
||||
.args(vec!["rev-parse", "--show-toplevel"])
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !toplevel_output.status.success() {
|
||||
anyhow::bail!("apply must be run from a git repository.");
|
||||
}
|
||||
|
||||
let repo_root = String::from_utf8(toplevel_output.stdout)?
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let mut git_apply_cmd = tokio::process::Command::new("git")
|
||||
.args(vec!["apply", "--3way"])
|
||||
.current_dir(&repo_root)
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
if let Some(mut stdin) = git_apply_cmd.stdin.take() {
|
||||
tokio::io::AsyncWriteExt::write_all(&mut stdin, diff.as_bytes()).await?;
|
||||
drop(stdin);
|
||||
}
|
||||
|
||||
let output = git_apply_cmd.wait_with_output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let cwd = cwd.unwrap_or(std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()));
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
cwd,
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let res = codex_git_apply::apply_git_patch(&req)?;
|
||||
if res.exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"Git apply failed with status {}: {}",
|
||||
output.status,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
"Git apply failed (applied={}, skipped={}, conflicts={})\nstdout:\n{}\nstderr:\n{}",
|
||||
res.applied_paths.len(),
|
||||
res.skipped_paths.len(),
|
||||
res.conflicted_paths.len(),
|
||||
res.stdout,
|
||||
res.stderr
|
||||
);
|
||||
}
|
||||
|
||||
println!("Successfully applied diff");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -44,6 +44,6 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {}: {}", status, body)
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,10 +25,11 @@ codex-core = { workspace = true }
|
||||
codex-exec = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-tui = { workspace = true }
|
||||
codex-cloud-tasks = { path = "../cloud-tasks" }
|
||||
ctor = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
@@ -43,6 +44,15 @@ tokio = { workspace = true, features = [
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "android")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use anyhow::Context;
|
||||
use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap_complete::Shell;
|
||||
@@ -12,8 +13,10 @@ use codex_cli::login::run_login_with_api_key;
|
||||
use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::login::run_logout;
|
||||
use codex_cli::proto;
|
||||
use codex_cloud_tasks::Cli as CloudTasksCli;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use owo_colors::OwoColorize;
|
||||
@@ -21,6 +24,7 @@ use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
|
||||
mod mcp_cmd;
|
||||
mod pre_main_hardening;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use crate::proto::ProtoCli;
|
||||
@@ -85,6 +89,14 @@ enum Subcommand {
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
|
||||
/// Browse and apply tasks from the cloud.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
|
||||
/// Internal: run the responses API proxy.
|
||||
#[clap(hide = true)]
|
||||
ResponsesApiProxy(ResponsesApiProxyArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -181,7 +193,7 @@ fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<Stri
|
||||
} else {
|
||||
resume_cmd
|
||||
};
|
||||
lines.push(format!("To continue this session, run {command}"));
|
||||
lines.push(format!("To continue this session, run {command}."));
|
||||
}
|
||||
|
||||
lines
|
||||
@@ -206,7 +218,14 @@ fn pre_main_hardening() {
|
||||
};
|
||||
|
||||
if secure_mode == "1" {
|
||||
codex_process_hardening::pre_main_hardening();
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
crate::pre_main_hardening::pre_main_hardening_linux();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
crate::pre_main_hardening::pre_main_hardening_macos();
|
||||
|
||||
#[cfg(windows)]
|
||||
crate::pre_main_hardening::pre_main_hardening_windows();
|
||||
}
|
||||
|
||||
// Always clear this env var so child processes don't inherit it.
|
||||
@@ -299,6 +318,13 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
Some(Subcommand::Completion(completion_cli)) => {
|
||||
print_completion(completion_cli);
|
||||
}
|
||||
Some(Subcommand::Cloud(mut cloud_cli)) => {
|
||||
prepend_config_flags(
|
||||
&mut cloud_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cloud_tasks::run_main(cloud_cli, codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Debug(debug_args)) => match debug_args.cmd {
|
||||
DebugCommand::Seatbelt(mut seatbelt_cli) => {
|
||||
prepend_config_flags(
|
||||
@@ -333,6 +359,11 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
Some(Subcommand::ResponsesApiProxy(args)) => {
|
||||
tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
|
||||
.await
|
||||
.context("responses-api-proxy blocking task panicked")??;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -481,7 +512,7 @@ mod tests {
|
||||
lines,
|
||||
vec![
|
||||
"Token usage: total=2 input=0 output=2".to_string(),
|
||||
"To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000"
|
||||
"To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000."
|
||||
.to_string(),
|
||||
]
|
||||
);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -12,7 +13,6 @@ use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
|
||||
/// [experimental] Launch Codex as an MCP server or manage configured MCP servers.
|
||||
///
|
||||
@@ -145,11 +145,9 @@ fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<(
|
||||
.with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?;
|
||||
|
||||
let new_entry = McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
},
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
};
|
||||
@@ -203,25 +201,16 @@ fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Resul
|
||||
let json_entries: Vec<_> = entries
|
||||
.into_iter()
|
||||
.map(|(name, cfg)| {
|
||||
let transport = match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let env = cfg.env.as_ref().map(|env| {
|
||||
env.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
});
|
||||
serde_json::json!({
|
||||
"name": name,
|
||||
"transport": transport,
|
||||
"command": cfg.command,
|
||||
"args": cfg.args,
|
||||
"env": env,
|
||||
"startup_timeout_sec": cfg
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
@@ -241,111 +230,62 @@ fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Resul
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut stdio_rows: Vec<[String; 4]> = Vec::new();
|
||||
let mut http_rows: Vec<[String; 3]> = Vec::new();
|
||||
|
||||
let mut rows: Vec<[String; 4]> = Vec::new();
|
||||
for (name, cfg) in entries {
|
||||
match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
stdio_rows.push([name.clone(), command.clone(), args_display, env_display]);
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
let has_bearer = if bearer_token.is_some() {
|
||||
"True"
|
||||
} else {
|
||||
"False"
|
||||
};
|
||||
http_rows.push([name.clone(), url.clone(), has_bearer.into()]);
|
||||
let args = if cfg.args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
cfg.args.join(" ")
|
||||
};
|
||||
|
||||
let env = match cfg.env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
|
||||
rows.push([name.clone(), cfg.command.clone(), args, env]);
|
||||
}
|
||||
|
||||
let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()];
|
||||
for row in &rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()];
|
||||
for row in &stdio_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
"Name",
|
||||
"Command",
|
||||
"Args",
|
||||
"Env",
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
|
||||
for row in rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
"Name",
|
||||
"Command",
|
||||
"Args",
|
||||
"Env",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
row[3],
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
|
||||
for row in &stdio_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
row[3],
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() && !http_rows.is_empty() {
|
||||
println!();
|
||||
}
|
||||
|
||||
if !http_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Url".len(), "Has Bearer Token".len()];
|
||||
for row in &http_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
"Name",
|
||||
"Url",
|
||||
"Has Bearer Token",
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
);
|
||||
|
||||
for row in &http_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -361,22 +301,16 @@ fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<(
|
||||
};
|
||||
|
||||
if get_args.json {
|
||||
let transport = match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
}),
|
||||
};
|
||||
let env = server.env.as_ref().map(|env| {
|
||||
env.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
});
|
||||
let output = serde_json::to_string_pretty(&serde_json::json!({
|
||||
"name": get_args.name,
|
||||
"transport": transport,
|
||||
"command": server.command,
|
||||
"args": server.args,
|
||||
"env": env,
|
||||
"startup_timeout_sec": server
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
@@ -389,38 +323,27 @@ fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<(
|
||||
}
|
||||
|
||||
println!("{}", get_args.name);
|
||||
match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
println!(" transport: stdio");
|
||||
println!(" command: {command}");
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
println!(" args: {args_display}");
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
println!(" env: {env_display}");
|
||||
println!(" command: {}", server.command);
|
||||
let args = if server.args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
server.args.join(" ")
|
||||
};
|
||||
println!(" args: {args}");
|
||||
let env_display = match server.env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let bearer = bearer_token.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token: {bearer}");
|
||||
}
|
||||
}
|
||||
};
|
||||
println!(" env: {env_display}");
|
||||
if let Some(timeout) = server.startup_timeout_sec {
|
||||
println!(" startup_timeout_sec: {}", timeout.as_secs_f64());
|
||||
}
|
||||
|
||||
@@ -1,19 +1,3 @@
|
||||
/// This is designed to be called pre-main() (using `#[ctor::ctor]`) to perform
|
||||
/// various process hardening steps, such as
|
||||
/// - disabling core dumps
|
||||
/// - disabling ptrace attach on Linux and macOS.
|
||||
/// - removing dangerous environment variables such as LD_PRELOAD and DYLD_*
|
||||
pub fn pre_main_hardening() {
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
pre_main_hardening_linux();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pre_main_hardening_macos();
|
||||
|
||||
#[cfg(windows)]
|
||||
pre_main_hardening_windows();
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
const PRCTL_FAILED_EXIT_CODE: i32 = 5;
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
@@ -27,14 +26,9 @@ fn add_and_remove_server_updates_global_config() -> Result<()> {
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
assert_eq!(servers.len(), 1);
|
||||
let docs = servers.get("docs").expect("server should exist");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "echo");
|
||||
assert_eq!(args, &vec!["hello".to_string()]);
|
||||
assert!(env.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
assert_eq!(docs.command, "echo");
|
||||
assert_eq!(docs.args, vec!["hello".to_string()]);
|
||||
assert!(docs.env.is_none());
|
||||
|
||||
let mut remove_cmd = codex_command(codex_home.path())?;
|
||||
remove_cmd
|
||||
@@ -82,10 +76,7 @@ fn add_with_env_preserves_key_order_and_values() -> Result<()> {
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
let envy = servers.get("envy").expect("server should exist");
|
||||
let env = match &envy.transport {
|
||||
McpServerTransportConfig::Stdio { env: Some(env), .. } => env,
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
};
|
||||
let env = envy.env.as_ref().expect("env should be present");
|
||||
|
||||
assert_eq!(env.len(), 2);
|
||||
assert_eq!(env.get("FOO"), Some(&"bar".to_string()));
|
||||
|
||||
@@ -4,7 +4,6 @@ use anyhow::Result;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value as JsonValue;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
@@ -59,35 +58,38 @@ fn list_and_get_render_expected_output() -> Result<()> {
|
||||
assert!(json_output.status.success());
|
||||
let stdout = String::from_utf8(json_output.stdout)?;
|
||||
let parsed: JsonValue = serde_json::from_str(&stdout)?;
|
||||
let array = parsed.as_array().expect("expected array");
|
||||
assert_eq!(array.len(), 1);
|
||||
let entry = &array[0];
|
||||
assert_eq!(entry.get("name"), Some(&JsonValue::String("docs".into())));
|
||||
assert_eq!(
|
||||
parsed,
|
||||
json!([
|
||||
{
|
||||
"name": "docs",
|
||||
"transport": {
|
||||
"type": "stdio",
|
||||
"command": "docs-server",
|
||||
"args": [
|
||||
"--port",
|
||||
"4000"
|
||||
],
|
||||
"env": {
|
||||
"TOKEN": "secret"
|
||||
}
|
||||
},
|
||||
"startup_timeout_sec": null,
|
||||
"tool_timeout_sec": null
|
||||
}
|
||||
]
|
||||
)
|
||||
entry.get("command"),
|
||||
Some(&JsonValue::String("docs-server".into()))
|
||||
);
|
||||
|
||||
let args = entry
|
||||
.get("args")
|
||||
.and_then(|v| v.as_array())
|
||||
.expect("args array");
|
||||
assert_eq!(
|
||||
args,
|
||||
&vec![
|
||||
JsonValue::String("--port".into()),
|
||||
JsonValue::String("4000".into())
|
||||
]
|
||||
);
|
||||
|
||||
let env = entry
|
||||
.get("env")
|
||||
.and_then(|v| v.as_object())
|
||||
.expect("env map");
|
||||
assert_eq!(env.get("TOKEN"), Some(&JsonValue::String("secret".into())));
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
assert!(get_output.status.success());
|
||||
let stdout = String::from_utf8(get_output.stdout)?;
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("transport: stdio"));
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
|
||||
30
codex-rs/cloud-tasks-client/Cargo.toml
Normal file
30
codex-rs/cloud-tasks-client/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "codex-cloud-tasks-client"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_cloud_tasks_client"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
default = ["online"]
|
||||
online = ["dep:reqwest", "dep:tokio", "dep:codex-backend-client"]
|
||||
mock = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
async-trait = "0.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
reqwest = { version = "0.12", features = ["json"], optional = true }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"], optional = true }
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
dirs = { workspace = true }
|
||||
188
codex-rs/cloud-tasks-client/src/api.rs
Normal file
188
codex-rs/cloud-tasks-client/src/api.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, CloudTaskError>;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CloudTaskError {
|
||||
#[error("unimplemented: {0}")]
|
||||
Unimplemented(&'static str),
|
||||
#[error("http error: {0}")]
|
||||
Http(String),
|
||||
#[error("io error: {0}")]
|
||||
Io(String),
|
||||
#[error("{0}")]
|
||||
Msg(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct TaskId(pub String);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum TaskStatus {
|
||||
Pending,
|
||||
Ready,
|
||||
Applied,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct TaskSummary {
|
||||
pub id: TaskId,
|
||||
pub title: String,
|
||||
pub status: TaskStatus,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
/// Backend environment identifier (when available)
|
||||
pub environment_id: Option<String>,
|
||||
/// Human-friendly environment label (when available)
|
||||
pub environment_label: Option<String>,
|
||||
pub summary: DiffSummary,
|
||||
/// True when the backend reports this task as a code review.
|
||||
#[serde(default)]
|
||||
pub is_review: bool,
|
||||
/// Number of assistant attempts (best-of-N), when reported by the backend.
|
||||
#[serde(default)]
|
||||
pub attempt_total: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
|
||||
pub enum AttemptStatus {
|
||||
Pending,
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
Cancelled,
|
||||
#[default]
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct TurnAttempt {
|
||||
pub turn_id: String,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub created_at: Option<DateTime<Utc>>,
|
||||
pub status: AttemptStatus,
|
||||
pub diff: Option<String>,
|
||||
pub messages: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ApplyStatus {
|
||||
Success,
|
||||
Partial,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ApplyOutcome {
|
||||
pub applied: bool,
|
||||
pub status: ApplyStatus,
|
||||
pub message: String,
|
||||
#[serde(default)]
|
||||
pub skipped_paths: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub conflict_paths: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct CreatedTask {
|
||||
pub id: TaskId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentKind {
|
||||
File,
|
||||
Image,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentReference {
|
||||
pub sediment_id: String,
|
||||
pub asset_pointer: String,
|
||||
pub path: Option<String>,
|
||||
pub display_name: Option<String>,
|
||||
pub kind: AttachmentKind,
|
||||
pub size_bytes: Option<u64>,
|
||||
pub width: Option<u32>,
|
||||
pub height: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct FileServiceConfig {
|
||||
pub base_url: String,
|
||||
pub bearer_token: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct DiffSummary {
|
||||
pub files_changed: usize,
|
||||
pub lines_added: usize,
|
||||
pub lines_removed: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct TaskText {
|
||||
pub prompt: Option<String>,
|
||||
pub messages: Vec<String>,
|
||||
pub turn_id: Option<String>,
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub attempt_status: AttemptStatus,
|
||||
}
|
||||
|
||||
impl Default for TaskText {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
prompt: None,
|
||||
messages: Vec::new(),
|
||||
turn_id: None,
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: None,
|
||||
attempt_status: AttemptStatus::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait CloudBackend: Send + Sync {
|
||||
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>>;
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>>;
|
||||
/// Return assistant output messages (no diff) when available.
|
||||
async fn get_task_messages(&self, id: TaskId) -> Result<Vec<String>>;
|
||||
/// Return the creating prompt and assistant messages (when available).
|
||||
async fn get_task_text(&self, id: TaskId) -> Result<TaskText>;
|
||||
/// Return any sibling attempts (best-of-N) for the given assistant turn.
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>>;
|
||||
/// Dry-run apply (preflight) that validates whether the patch would apply cleanly.
|
||||
/// Never modifies the working tree. When `diff_override` is supplied, the provided diff is
|
||||
/// used instead of re-fetching the task details so callers can apply alternate attempts.
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome>;
|
||||
async fn apply_task(&self, id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome>;
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
attachments: &[AttachmentReference],
|
||||
) -> Result<CreatedTask>;
|
||||
|
||||
fn file_service_config(&self) -> Option<FileServiceConfig> {
|
||||
None
|
||||
}
|
||||
}
|
||||
849
codex-rs/cloud-tasks-client/src/http.rs
Normal file
849
codex-rs/cloud-tasks-client/src/http.rs
Normal file
@@ -0,0 +1,849 @@
|
||||
use crate::ApplyOutcome;
|
||||
use crate::ApplyStatus;
|
||||
use crate::AttemptStatus;
|
||||
use crate::CloudBackend;
|
||||
use crate::CloudTaskError;
|
||||
use crate::DiffSummary;
|
||||
use crate::Result;
|
||||
use crate::TaskId;
|
||||
use crate::TaskStatus;
|
||||
use crate::TaskSummary;
|
||||
use crate::TurnAttempt;
|
||||
use crate::api::TaskText;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
|
||||
use serde_json::Value;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_backend_client as backend;
|
||||
use codex_backend_client::CodeTaskDetailsResponseExt;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HttpClient {
|
||||
pub base_url: String,
|
||||
backend: backend::Client,
|
||||
bearer_token: Option<String>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn new(base_url: impl Into<String>) -> anyhow::Result<Self> {
|
||||
let base_url = base_url.into();
|
||||
let backend = backend::Client::new(base_url.clone())?;
|
||||
Ok(Self {
|
||||
base_url,
|
||||
backend,
|
||||
bearer_token: None,
|
||||
chatgpt_account_id: None,
|
||||
user_agent: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
let token = token.into();
|
||||
self.backend = self.backend.clone().with_bearer_token(token.clone());
|
||||
self.bearer_token = Some(token);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
|
||||
let ua = ua.into();
|
||||
self.backend = self.backend.clone().with_user_agent(ua.clone());
|
||||
self.user_agent = Some(ua);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
let account_id = account_id.into();
|
||||
self.backend = self
|
||||
.backend
|
||||
.clone()
|
||||
.with_chatgpt_account_id(account_id.clone());
|
||||
self.chatgpt_account_id = Some(account_id);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
fn is_unified_diff(diff: &str) -> bool {
|
||||
let t = diff.trim_start();
|
||||
if t.starts_with("diff --git ") {
|
||||
return true;
|
||||
}
|
||||
let has_dash_headers = diff.contains("\n--- ") && diff.contains("\n+++ ");
|
||||
let has_hunk = diff.contains("\n@@ ") || diff.starts_with("@@ ");
|
||||
has_dash_headers && has_hunk
|
||||
}
|
||||
|
||||
fn tail(s: &str, max: usize) -> String {
|
||||
if s.len() <= max {
|
||||
s.to_string()
|
||||
} else {
|
||||
s[s.len() - max..].to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CloudBackend for HttpClient {
|
||||
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
let resp = self
|
||||
.backend
|
||||
.list_tasks(Some(20), Some("current"), env)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("list_tasks failed: {e}")))?;
|
||||
|
||||
let tasks: Vec<TaskSummary> = resp
|
||||
.items
|
||||
.into_iter()
|
||||
.map(map_task_list_item_to_summary)
|
||||
.collect();
|
||||
// Debug log for env filtering visibility
|
||||
append_error_log(&format!(
|
||||
"http.list_tasks: env={} items={}",
|
||||
env.unwrap_or("<all>"),
|
||||
tasks.len()
|
||||
));
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
async fn get_task_diff(&self, _id: TaskId) -> Result<Option<String>> {
|
||||
let id = _id.0;
|
||||
let (details, body, ct) = self
|
||||
.backend
|
||||
.get_task_details_with_body(&id)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
if let Some(diff) = details.unified_diff() {
|
||||
return Ok(Some(diff));
|
||||
}
|
||||
// No diff yet (pending or non-diff task).
|
||||
// Keep variables bound for potential future logging.
|
||||
let _ = (body, ct);
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn get_task_messages(&self, _id: TaskId) -> Result<Vec<String>> {
|
||||
let id = _id.0;
|
||||
let (details, body, ct) = self
|
||||
.backend
|
||||
.get_task_details_with_body(&id)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
let mut msgs = details.assistant_text_messages();
|
||||
if msgs.is_empty() {
|
||||
msgs.extend(extract_assistant_messages_from_body(&body));
|
||||
}
|
||||
if !msgs.is_empty() {
|
||||
return Ok(msgs);
|
||||
}
|
||||
if let Some(err) = details.assistant_error_message() {
|
||||
return Ok(vec![format!("Task failed: {err}")]);
|
||||
}
|
||||
// No assistant messages found; return a debuggable error with context for logging.
|
||||
let url = if self.base_url.contains("/backend-api") {
|
||||
format!("{}/wham/tasks/{}", self.base_url, id)
|
||||
} else {
|
||||
format!("{}/api/codex/tasks/{}", self.base_url, id)
|
||||
};
|
||||
Err(CloudTaskError::Http(format!(
|
||||
"No assistant text messages in response. GET {url}; content-type={ct}; body={body}"
|
||||
)))
|
||||
}
|
||||
|
||||
async fn get_task_text(&self, _id: TaskId) -> Result<TaskText> {
|
||||
let id = _id.0;
|
||||
let (details, body, _ct) = self
|
||||
.backend
|
||||
.get_task_details_with_body(&id)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
let prompt = details.user_text_prompt();
|
||||
let mut messages = details.assistant_text_messages();
|
||||
if messages.is_empty() {
|
||||
messages.extend(extract_assistant_messages_from_body(&body));
|
||||
}
|
||||
let turn_map = details.current_assistant_turn.as_ref();
|
||||
let turn_id = turn_map
|
||||
.and_then(|m| m.get("id"))
|
||||
.and_then(Value::as_str)
|
||||
.map(str::to_string);
|
||||
let sibling_turn_ids = turn_map
|
||||
.and_then(|m| m.get("sibling_turn_ids"))
|
||||
.and_then(Value::as_array)
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(Value::as_str)
|
||||
.map(str::to_string)
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let attempt_placement = turn_map
|
||||
.and_then(|m| m.get("attempt_placement"))
|
||||
.and_then(Value::as_i64);
|
||||
let attempt_status = attempt_status_from_str(
|
||||
turn_map
|
||||
.and_then(|m| m.get("turn_status"))
|
||||
.and_then(Value::as_str),
|
||||
);
|
||||
Ok(TaskText {
|
||||
prompt,
|
||||
messages,
|
||||
turn_id,
|
||||
sibling_turn_ids,
|
||||
attempt_placement,
|
||||
attempt_status,
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>> {
|
||||
let resp = self
|
||||
.backend
|
||||
.list_sibling_turns(&task.0, &turn_id)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("list_sibling_turns failed: {e}")))?;
|
||||
|
||||
let mut attempts: Vec<TurnAttempt> = resp
|
||||
.sibling_turns
|
||||
.iter()
|
||||
.filter_map(turn_attempt_from_map)
|
||||
.collect();
|
||||
attempts.sort_by(compare_attempts);
|
||||
Ok(attempts)
|
||||
}
|
||||
|
||||
async fn apply_task(&self, _id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome> {
|
||||
let id = _id.0;
|
||||
self.apply_with_diff(id, diff_override, false).await
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome> {
|
||||
let id = _id.0;
|
||||
self.apply_with_diff(id, diff_override, true).await
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
attachments: &[crate::AttachmentReference],
|
||||
) -> Result<crate::CreatedTask> {
|
||||
// Build request payload patterned after VSCode/newtask.rs
|
||||
let mut input_items: Vec<serde_json::Value> = Vec::new();
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [{ "content_type": "text", "text": prompt }]
|
||||
}));
|
||||
|
||||
for attachment in attachments {
|
||||
match attachment.kind {
|
||||
crate::AttachmentKind::Image => {
|
||||
if let (Some(width), Some(height), Some(size_bytes)) =
|
||||
(attachment.width, attachment.height, attachment.size_bytes)
|
||||
{
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "image_asset_pointer",
|
||||
"asset_pointer": attachment.asset_pointer,
|
||||
"width": width,
|
||||
"height": height,
|
||||
"size_bytes": size_bytes,
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
// Fallback to container when metadata is missing
|
||||
}
|
||||
crate::AttachmentKind::File => {}
|
||||
}
|
||||
|
||||
let default_path = attachment
|
||||
.path
|
||||
.clone()
|
||||
.or_else(|| attachment.display_name.clone())
|
||||
.unwrap_or_else(|| attachment.sediment_id.clone());
|
||||
|
||||
let file_entry = serde_json::json!({
|
||||
"type": "file",
|
||||
"sediment_id": attachment.sediment_id,
|
||||
"path": default_path.clone(),
|
||||
});
|
||||
|
||||
let mut container = serde_json::json!({
|
||||
"type": "container_file",
|
||||
"file_ids": [file_entry],
|
||||
"body": "",
|
||||
});
|
||||
container["path"] = serde_json::Value::String(default_path);
|
||||
input_items.push(container);
|
||||
}
|
||||
|
||||
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "pre_apply_patch",
|
||||
"output_diff": { "diff": diff }
|
||||
}));
|
||||
}
|
||||
|
||||
let request_body = serde_json::json!({
|
||||
"new_task": {
|
||||
"environment_id": env_id,
|
||||
"branch": git_ref,
|
||||
"run_environment_in_qa_mode": qa_mode,
|
||||
},
|
||||
"input_items": input_items,
|
||||
});
|
||||
|
||||
// Use the underlying backend client to post with proper headers
|
||||
match self.backend.create_task(request_body).await {
|
||||
Ok(id) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: created id={id} env={} prompt_chars={} attachments={}",
|
||||
env_id,
|
||||
prompt.chars().count(),
|
||||
attachments.len()
|
||||
));
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
Err(e) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: create failed env={} prompt_chars={} attachments={}: {}",
|
||||
env_id,
|
||||
prompt.chars().count(),
|
||||
attachments.len(),
|
||||
e
|
||||
));
|
||||
Err(CloudTaskError::Http(format!("create_task failed: {e}")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn file_service_config(&self) -> Option<crate::FileServiceConfig> {
|
||||
Some(crate::FileServiceConfig {
|
||||
base_url: self.base_url.clone(),
|
||||
bearer_token: self.bearer_token.clone(),
|
||||
chatgpt_account_id: self.chatgpt_account_id.clone(),
|
||||
user_agent: self.user_agent.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Best-effort extraction of assistant text messages from a raw `get_task_details` body.
|
||||
/// Falls back to worklog messages when structured turns are not present.
|
||||
impl HttpClient {
|
||||
async fn apply_with_diff(
|
||||
&self,
|
||||
id: String,
|
||||
diff_override: Option<String>,
|
||||
preflight: bool,
|
||||
) -> Result<ApplyOutcome> {
|
||||
let diff = match diff_override {
|
||||
Some(diff) => diff,
|
||||
None => {
|
||||
let details =
|
||||
self.backend.get_task_details(&id).await.map_err(|e| {
|
||||
CloudTaskError::Http(format!("get_task_details failed: {e}"))
|
||||
})?;
|
||||
details.unified_diff().ok_or_else(|| {
|
||||
CloudTaskError::Msg(format!("No diff available for task {id}"))
|
||||
})?
|
||||
}
|
||||
};
|
||||
|
||||
if !is_unified_diff(&diff) {
|
||||
let summary = summarize_patch_for_logging(&diff);
|
||||
let mode = if preflight { "preflight" } else { "apply" };
|
||||
append_error_log(&format!(
|
||||
"apply_error: id={id} mode={mode} format=non-unified; {summary}"
|
||||
));
|
||||
return Ok(ApplyOutcome {
|
||||
applied: false,
|
||||
status: ApplyStatus::Error,
|
||||
message: "Expected unified git diff; backend returned an incompatible format."
|
||||
.to_string(),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()),
|
||||
diff: diff.clone(),
|
||||
revert: false,
|
||||
preflight,
|
||||
};
|
||||
let r = codex_git_apply::apply_git_patch(&req)
|
||||
.map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?;
|
||||
|
||||
let status = if r.exit_code == 0 {
|
||||
ApplyStatus::Success
|
||||
} else if !r.applied_paths.is_empty() || !r.conflicted_paths.is_empty() {
|
||||
ApplyStatus::Partial
|
||||
} else {
|
||||
ApplyStatus::Error
|
||||
};
|
||||
let applied = matches!(status, ApplyStatus::Success) && !preflight;
|
||||
|
||||
let message = if preflight {
|
||||
match status {
|
||||
ApplyStatus::Success => format!("Preflight passed for task {id} (applies cleanly)"),
|
||||
ApplyStatus::Partial => format!(
|
||||
"Preflight: patch does not fully apply for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
ApplyStatus::Error => format!(
|
||||
"Preflight failed for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
}
|
||||
} else {
|
||||
match status {
|
||||
ApplyStatus::Success => format!(
|
||||
"Applied task {id} locally ({} files)",
|
||||
r.applied_paths.len()
|
||||
),
|
||||
ApplyStatus::Partial => format!(
|
||||
"Apply partially succeeded for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
ApplyStatus::Error => format!(
|
||||
"Apply failed for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if matches!(status, ApplyStatus::Partial | ApplyStatus::Error)
|
||||
|| (preflight && !matches!(status, ApplyStatus::Success))
|
||||
{
|
||||
let mut log = String::new();
|
||||
let summary = summarize_patch_for_logging(&diff);
|
||||
let mode = if preflight { "preflight" } else { "apply" };
|
||||
use std::fmt::Write as _;
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"apply_result: mode={} id={} status={:?} applied={} skipped={} conflicts={} cmd={}",
|
||||
mode,
|
||||
id,
|
||||
status,
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len(),
|
||||
r.cmd_for_log
|
||||
);
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"stdout_tail=
|
||||
{}
|
||||
stderr_tail=
|
||||
{}",
|
||||
tail(&r.stdout, 2000),
|
||||
tail(&r.stderr, 2000)
|
||||
);
|
||||
let _ = writeln!(&mut log, "{summary}");
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"----- PATCH BEGIN -----
|
||||
{diff}
|
||||
----- PATCH END -----"
|
||||
);
|
||||
append_error_log(&log);
|
||||
}
|
||||
|
||||
Ok(ApplyOutcome {
|
||||
applied,
|
||||
status,
|
||||
message,
|
||||
skipped_paths: r.skipped_paths,
|
||||
conflict_paths: r.conflicted_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_assistant_messages_from_body(body: &str) -> Vec<String> {
|
||||
let mut msgs = Vec::new();
|
||||
if let Ok(full) = serde_json::from_str::<serde_json::Value>(body)
|
||||
&& let Some(arr) = full
|
||||
.get("current_assistant_turn")
|
||||
.and_then(|v| v.get("worklog"))
|
||||
.and_then(|v| v.get("messages"))
|
||||
.and_then(|v| v.as_array())
|
||||
{
|
||||
for m in arr {
|
||||
let is_assistant = m
|
||||
.get("author")
|
||||
.and_then(|a| a.get("role"))
|
||||
.and_then(|r| r.as_str())
|
||||
== Some("assistant");
|
||||
if !is_assistant {
|
||||
continue;
|
||||
}
|
||||
if let Some(parts) = m
|
||||
.get("content")
|
||||
.and_then(|c| c.get("parts"))
|
||||
.and_then(|p| p.as_array())
|
||||
{
|
||||
for p in parts {
|
||||
if let Some(s) = p.as_str() {
|
||||
if !s.is_empty() {
|
||||
msgs.push(s.to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(obj) = p.as_object()
|
||||
&& obj.get("content_type").and_then(|t| t.as_str()) == Some("text")
|
||||
&& let Some(txt) = obj.get("text").and_then(|t| t.as_str())
|
||||
{
|
||||
msgs.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
msgs
|
||||
}
|
||||
|
||||
fn turn_attempt_from_map(turn: &HashMap<String, Value>) -> Option<TurnAttempt> {
|
||||
let turn_id = turn.get("id").and_then(Value::as_str)?.to_string();
|
||||
let attempt_placement = turn.get("attempt_placement").and_then(Value::as_i64);
|
||||
let created_at = parse_timestamp_value(turn.get("created_at"));
|
||||
let status = attempt_status_from_str(turn.get("turn_status").and_then(Value::as_str));
|
||||
let diff = extract_diff_from_turn(turn);
|
||||
let messages = extract_assistant_messages_from_turn(turn);
|
||||
Some(TurnAttempt {
|
||||
turn_id,
|
||||
attempt_placement,
|
||||
created_at,
|
||||
status,
|
||||
diff,
|
||||
messages,
|
||||
})
|
||||
}
|
||||
|
||||
fn compare_attempts(a: &TurnAttempt, b: &TurnAttempt) -> Ordering {
|
||||
match (a.attempt_placement, b.attempt_placement) {
|
||||
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
|
||||
(Some(_), None) => Ordering::Less,
|
||||
(None, Some(_)) => Ordering::Greater,
|
||||
(None, None) => match (a.created_at, b.created_at) {
|
||||
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
|
||||
(Some(_), None) => Ordering::Less,
|
||||
(None, Some(_)) => Ordering::Greater,
|
||||
(None, None) => a.turn_id.cmp(&b.turn_id),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_diff_from_turn(turn: &HashMap<String, Value>) -> Option<String> {
|
||||
let items = turn.get("output_items").and_then(Value::as_array)?;
|
||||
for item in items {
|
||||
match item.get("type").and_then(Value::as_str) {
|
||||
Some("output_diff") => {
|
||||
if let Some(diff) = item.get("diff").and_then(Value::as_str)
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.to_string());
|
||||
}
|
||||
}
|
||||
Some("pr") => {
|
||||
if let Some(diff) = item
|
||||
.get("output_diff")
|
||||
.and_then(Value::as_object)
|
||||
.and_then(|od| od.get("diff"))
|
||||
.and_then(Value::as_str)
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.to_string());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn extract_assistant_messages_from_turn(turn: &HashMap<String, Value>) -> Vec<String> {
|
||||
let mut msgs = Vec::new();
|
||||
if let Some(items) = turn.get("output_items").and_then(Value::as_array) {
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) != Some("message") {
|
||||
continue;
|
||||
}
|
||||
if let Some(content) = item.get("content").and_then(Value::as_array) {
|
||||
for part in content {
|
||||
if part.get("content_type").and_then(Value::as_str) == Some("text")
|
||||
&& let Some(txt) = part.get("text").and_then(Value::as_str)
|
||||
&& !txt.is_empty()
|
||||
{
|
||||
msgs.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if msgs.is_empty()
|
||||
&& let Some(err) = turn.get("error").and_then(Value::as_object)
|
||||
{
|
||||
let message = err.get("message").and_then(Value::as_str).unwrap_or("");
|
||||
let code = err.get("code").and_then(Value::as_str).unwrap_or("");
|
||||
if !message.is_empty() || !code.is_empty() {
|
||||
let text = if !code.is_empty() && !message.is_empty() {
|
||||
format!("{code}: {message}")
|
||||
} else if !code.is_empty() {
|
||||
code.to_string()
|
||||
} else {
|
||||
message.to_string()
|
||||
};
|
||||
msgs.push(format!("Task failed: {text}"));
|
||||
}
|
||||
}
|
||||
msgs
|
||||
}
|
||||
|
||||
fn parse_timestamp_value(v: Option<&Value>) -> Option<DateTime<Utc>> {
|
||||
let raw = v?.as_f64()?;
|
||||
let secs = raw as i64;
|
||||
let nanos = ((raw - secs as f64) * 1_000_000_000.0) as u32;
|
||||
Some(DateTime::<Utc>::from(
|
||||
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
|
||||
))
|
||||
}
|
||||
|
||||
fn attempt_status_from_str(s: Option<&str>) -> AttemptStatus {
|
||||
match s.unwrap_or("") {
|
||||
"pending" => AttemptStatus::Pending,
|
||||
"in_progress" => AttemptStatus::InProgress,
|
||||
"completed" => AttemptStatus::Completed,
|
||||
"failed" => AttemptStatus::Failed,
|
||||
"cancelled" => AttemptStatus::Cancelled,
|
||||
_ => AttemptStatus::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_task_list_item_to_summary(src: backend::TaskListItem) -> TaskSummary {
|
||||
fn env_label_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<String> {
|
||||
let obj = v?;
|
||||
let raw = obj.get("environment_label")?;
|
||||
if let Some(s) = raw.as_str() {
|
||||
if s.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
return Some(s.to_string());
|
||||
}
|
||||
if let Some(o) = raw.as_object() {
|
||||
// Best-effort support for rich shapes: { text: "..." } or { plain_text: "..." }
|
||||
if let Some(s) = o.get("text").and_then(Value::as_str)
|
||||
&& !s.trim().is_empty()
|
||||
{
|
||||
return Some(s.to_string());
|
||||
}
|
||||
if let Some(s) = o.get("plain_text").and_then(Value::as_str)
|
||||
&& !s.trim().is_empty()
|
||||
{
|
||||
return Some(s.to_string());
|
||||
}
|
||||
// Fallback: compact JSON for debugging
|
||||
if let Ok(s) = serde_json::to_string(o)
|
||||
&& !s.is_empty()
|
||||
{
|
||||
return Some(s);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// Best-effort parse of diff_stats (when present in latest_turn_status_display)
|
||||
fn diff_summary_from_status_display(v: Option<&HashMap<String, Value>>) -> DiffSummary {
|
||||
let mut out = DiffSummary::default();
|
||||
let Some(map) = v else { return out };
|
||||
let latest = map
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object);
|
||||
let Some(latest) = latest else { return out };
|
||||
if let Some(ds) = latest.get("diff_stats").and_then(Value::as_object) {
|
||||
if let Some(n) = ds.get("files_modified").and_then(Value::as_i64) {
|
||||
out.files_changed = n.max(0) as usize;
|
||||
}
|
||||
if let Some(n) = ds.get("lines_added").and_then(Value::as_i64) {
|
||||
out.lines_added = n.max(0) as usize;
|
||||
}
|
||||
if let Some(n) = ds.get("lines_removed").and_then(Value::as_i64) {
|
||||
out.lines_removed = n.max(0) as usize;
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn attempt_total_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<usize> {
|
||||
let map = v?;
|
||||
let latest = map
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object)?;
|
||||
let siblings = latest.get("sibling_turn_ids").and_then(Value::as_array)?;
|
||||
Some(siblings.len().saturating_add(1))
|
||||
}
|
||||
|
||||
TaskSummary {
|
||||
id: TaskId(src.id),
|
||||
title: src.title,
|
||||
status: map_status(src.task_status_display.as_ref()),
|
||||
updated_at: parse_updated_at(src.updated_at.as_ref()),
|
||||
environment_id: None,
|
||||
environment_label: env_label_from_status_display(src.task_status_display.as_ref()),
|
||||
summary: diff_summary_from_status_display(src.task_status_display.as_ref()),
|
||||
is_review: src
|
||||
.pull_requests
|
||||
.as_ref()
|
||||
.is_some_and(|prs| !prs.is_empty()),
|
||||
attempt_total: attempt_total_from_status_display(src.task_status_display.as_ref()),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_status(v: Option<&HashMap<String, Value>>) -> TaskStatus {
|
||||
if let Some(val) = v {
|
||||
// Prefer nested latest_turn_status_display.turn_status when present.
|
||||
if let Some(turn) = val
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object)
|
||||
&& let Some(s) = turn.get("turn_status").and_then(Value::as_str)
|
||||
{
|
||||
return match s {
|
||||
"failed" => TaskStatus::Error,
|
||||
"completed" => TaskStatus::Ready,
|
||||
"in_progress" => TaskStatus::Pending,
|
||||
"pending" => TaskStatus::Pending,
|
||||
"cancelled" => TaskStatus::Error,
|
||||
_ => TaskStatus::Pending,
|
||||
};
|
||||
}
|
||||
// Legacy or alternative flat state.
|
||||
if let Some(state) = val.get("state").and_then(Value::as_str) {
|
||||
return match state {
|
||||
"pending" => TaskStatus::Pending,
|
||||
"ready" => TaskStatus::Ready,
|
||||
"applied" => TaskStatus::Applied,
|
||||
"error" => TaskStatus::Error,
|
||||
_ => TaskStatus::Pending,
|
||||
};
|
||||
}
|
||||
}
|
||||
TaskStatus::Pending
|
||||
}
|
||||
|
||||
fn parse_updated_at(ts: Option<&f64>) -> DateTime<Utc> {
|
||||
if let Some(v) = ts {
|
||||
// Value is seconds since epoch with fractional part.
|
||||
let secs = *v as i64;
|
||||
let nanos = ((*v - secs as f64) * 1_000_000_000.0) as u32;
|
||||
return DateTime::<Utc>::from(
|
||||
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
|
||||
);
|
||||
}
|
||||
Utc::now()
|
||||
}
|
||||
|
||||
/// Return a compact one-line classification of the patch plus a short head snippet
|
||||
/// to aid debugging when apply fails.
|
||||
fn summarize_patch_for_logging(patch: &str) -> String {
|
||||
let trimmed = patch.trim_start();
|
||||
let kind = if trimmed.starts_with("*** Begin Patch") {
|
||||
"codex-patch"
|
||||
} else if trimmed.starts_with("diff --git ") || trimmed.contains("\n*** End Patch\n") {
|
||||
// In some cases providers nest a codex patch inside another format; detect both.
|
||||
"git-diff"
|
||||
} else if trimmed.starts_with("@@ ") || trimmed.contains("\n@@ ") {
|
||||
"unified-diff"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
let lines = patch.lines().count();
|
||||
let chars = patch.len();
|
||||
let cwd = std::env::current_dir()
|
||||
.ok()
|
||||
.map(|p| p.display().to_string())
|
||||
.unwrap_or_else(|| "<unknown>".to_string());
|
||||
// Grab the first up-to-20 non-empty lines for context.
|
||||
let head: String = patch.lines().take(20).collect::<Vec<&str>>().join("\n");
|
||||
// Make sure we don't explode logs with huge content.
|
||||
let head_trunc = if head.len() > 800 {
|
||||
format!("{}…", &head[..800])
|
||||
} else {
|
||||
head
|
||||
};
|
||||
format!(
|
||||
"patch_summary: kind={kind} lines={lines} chars={chars} cwd={cwd} ; head=\n{head_trunc}"
|
||||
)
|
||||
}
|
||||
|
||||
fn append_error_log(message: &str) {
|
||||
let timestamp = Utc::now().to_rfc3339();
|
||||
|
||||
if let Some(path) = log_file_path()
|
||||
&& write_log_line(&path, ×tamp, message)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let fallback = Path::new("error.log");
|
||||
let _ = write_log_line(fallback, ×tamp, message);
|
||||
}
|
||||
|
||||
fn log_file_path() -> Option<PathBuf> {
|
||||
let mut codex_home = codex_home_dir()?;
|
||||
codex_home.push("log");
|
||||
std::fs::create_dir_all(&codex_home).ok()?;
|
||||
Some(codex_home.join("codex-cloud-tasks.log"))
|
||||
}
|
||||
|
||||
fn codex_home_dir() -> Option<PathBuf> {
|
||||
if let Ok(val) = std::env::var("CODEX_HOME")
|
||||
&& !val.is_empty()
|
||||
{
|
||||
let path = PathBuf::from(val);
|
||||
return path.canonicalize().ok().or(Some(path));
|
||||
}
|
||||
dirs::home_dir().map(|mut home| {
|
||||
home.push(".codex");
|
||||
home
|
||||
})
|
||||
}
|
||||
|
||||
fn write_log_line(path: &Path, timestamp: &str, message: &str) -> bool {
|
||||
let mut opts = std::fs::OpenOptions::new();
|
||||
opts.create(true).append(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
opts.mode(0o600);
|
||||
}
|
||||
|
||||
match opts.open(path) {
|
||||
Ok(mut file) => {
|
||||
use std::io::Write as _;
|
||||
writeln!(file, "[{timestamp}] {message}").is_ok()
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
32
codex-rs/cloud-tasks-client/src/lib.rs
Normal file
32
codex-rs/cloud-tasks-client/src/lib.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
mod api;
|
||||
|
||||
pub use api::ApplyOutcome;
|
||||
pub use api::ApplyStatus;
|
||||
pub use api::AttachmentKind;
|
||||
pub use api::AttachmentReference;
|
||||
pub use api::AttemptStatus;
|
||||
pub use api::CloudBackend;
|
||||
pub use api::CloudTaskError;
|
||||
pub use api::CreatedTask;
|
||||
pub use api::DiffSummary;
|
||||
pub use api::FileServiceConfig;
|
||||
pub use api::Result;
|
||||
pub use api::TaskId;
|
||||
pub use api::TaskStatus;
|
||||
pub use api::TaskSummary;
|
||||
pub use api::TaskText;
|
||||
pub use api::TurnAttempt;
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
mod mock;
|
||||
|
||||
#[cfg(feature = "online")]
|
||||
mod http;
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
pub use mock::MockClient;
|
||||
|
||||
#[cfg(feature = "online")]
|
||||
pub use http::HttpClient;
|
||||
|
||||
// Reusable apply engine now lives in the shared crate `codex-git-apply`.
|
||||
180
codex-rs/cloud-tasks-client/src/mock.rs
Normal file
180
codex-rs/cloud-tasks-client/src/mock.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
use crate::ApplyOutcome;
|
||||
use crate::AttemptStatus;
|
||||
use crate::CloudBackend;
|
||||
use crate::DiffSummary;
|
||||
use crate::Result;
|
||||
use crate::TaskId;
|
||||
use crate::TaskStatus;
|
||||
use crate::TaskSummary;
|
||||
use crate::TurnAttempt;
|
||||
use crate::api::TaskText;
|
||||
use chrono::Utc;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct MockClient;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CloudBackend for MockClient {
|
||||
async fn list_tasks(&self, _env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
// Slightly vary content by env to aid tests that rely on the mock
|
||||
let rows = match _env {
|
||||
Some("env-A") => vec![("T-2000", "A: First", TaskStatus::Ready)],
|
||||
Some("env-B") => vec![
|
||||
("T-3000", "B: One", TaskStatus::Ready),
|
||||
("T-3001", "B: Two", TaskStatus::Pending),
|
||||
],
|
||||
_ => vec![
|
||||
("T-1000", "Update README formatting", TaskStatus::Ready),
|
||||
("T-1001", "Fix clippy warnings in core", TaskStatus::Pending),
|
||||
("T-1002", "Add contributing guide", TaskStatus::Ready),
|
||||
],
|
||||
};
|
||||
let environment_id = _env.map(str::to_string);
|
||||
let environment_label = match _env {
|
||||
Some("env-A") => Some("Env A".to_string()),
|
||||
Some("env-B") => Some("Env B".to_string()),
|
||||
Some(other) => Some(other.to_string()),
|
||||
None => Some("Global".to_string()),
|
||||
};
|
||||
let mut out = Vec::new();
|
||||
for (id_str, title, status) in rows {
|
||||
let id = TaskId(id_str.to_string());
|
||||
let diff = mock_diff_for(&id);
|
||||
let (a, d) = count_from_unified(&diff);
|
||||
out.push(TaskSummary {
|
||||
id,
|
||||
title: title.to_string(),
|
||||
status,
|
||||
updated_at: Utc::now(),
|
||||
environment_id: environment_id.clone(),
|
||||
environment_label: environment_label.clone(),
|
||||
summary: DiffSummary {
|
||||
files_changed: 1,
|
||||
lines_added: a,
|
||||
lines_removed: d,
|
||||
},
|
||||
is_review: false,
|
||||
attempt_total: Some(if id_str == "T-1000" { 2 } else { 1 }),
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>> {
|
||||
Ok(Some(mock_diff_for(&id)))
|
||||
}
|
||||
|
||||
async fn get_task_messages(&self, _id: TaskId) -> Result<Vec<String>> {
|
||||
Ok(vec![
|
||||
"Mock assistant output: this task contains no diff.".to_string(),
|
||||
])
|
||||
}
|
||||
|
||||
async fn get_task_text(&self, _id: TaskId) -> Result<TaskText> {
|
||||
Ok(TaskText {
|
||||
prompt: Some("Why is there no diff?".to_string()),
|
||||
messages: vec!["Mock assistant output: this task contains no diff.".to_string()],
|
||||
turn_id: Some("mock-turn".to_string()),
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: Some(0),
|
||||
attempt_status: AttemptStatus::Completed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn apply_task(&self, id: TaskId, _diff_override: Option<String>) -> Result<ApplyOutcome> {
|
||||
Ok(ApplyOutcome {
|
||||
applied: true,
|
||||
status: crate::ApplyStatus::Success,
|
||||
message: format!("Applied task {} locally (mock)", id.0),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome> {
|
||||
Ok(ApplyOutcome {
|
||||
applied: false,
|
||||
status: crate::ApplyStatus::Success,
|
||||
message: format!("Preflight passed for task {} (mock)", id.0),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
_turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>> {
|
||||
if task.0 == "T-1000" {
|
||||
return Ok(vec![TurnAttempt {
|
||||
turn_id: "T-1000-attempt-2".to_string(),
|
||||
attempt_placement: Some(1),
|
||||
created_at: Some(Utc::now()),
|
||||
status: AttemptStatus::Completed,
|
||||
diff: Some(mock_diff_for(&task)),
|
||||
messages: vec!["Mock alternate attempt".to_string()],
|
||||
}]);
|
||||
}
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
attachments: &[crate::AttachmentReference],
|
||||
) -> Result<crate::CreatedTask> {
|
||||
let _ = (env_id, prompt, git_ref, qa_mode, attachments);
|
||||
let id = format!("task_local_{}", chrono::Utc::now().timestamp_millis());
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
}
|
||||
|
||||
fn mock_diff_for(id: &TaskId) -> String {
|
||||
match id.0.as_str() {
|
||||
"T-1000" => {
|
||||
"diff --git a/README.md b/README.md\nindex 000000..111111 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,2 +1,3 @@\n Intro\n-Hello\n+Hello, world!\n+Task: T-1000\n".to_string()
|
||||
}
|
||||
"T-1001" => {
|
||||
"diff --git a/core/src/lib.rs b/core/src/lib.rs\nindex 000000..111111 100644\n--- a/core/src/lib.rs\n+++ b/core/src/lib.rs\n@@ -1,2 +1,1 @@\n-use foo;\n use bar;\n".to_string()
|
||||
}
|
||||
_ => {
|
||||
"diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md\nindex 000000..111111 100644\n--- /dev/null\n+++ b/CONTRIBUTING.md\n@@ -0,0 +1,3 @@\n+## Contributing\n+Please open PRs.\n+Thanks!\n".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn count_from_unified(diff: &str) -> (usize, usize) {
|
||||
if let Ok(patch) = diffy::Patch::from_str(diff) {
|
||||
patch
|
||||
.hunks()
|
||||
.iter()
|
||||
.flat_map(diffy::Hunk::lines)
|
||||
.fold((0, 0), |(a, d), l| match l {
|
||||
diffy::Line::Insert(_) => (a + 1, d),
|
||||
diffy::Line::Delete(_) => (a, d + 1),
|
||||
_ => (a, d),
|
||||
})
|
||||
} else {
|
||||
let mut a = 0;
|
||||
let mut d = 0;
|
||||
for l in diff.lines() {
|
||||
if l.starts_with("+++") || l.starts_with("---") || l.starts_with("@@") {
|
||||
continue;
|
||||
}
|
||||
match l.as_bytes().first() {
|
||||
Some(b'+') => a += 1,
|
||||
Some(b'-') => d += 1,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
(a, d)
|
||||
}
|
||||
}
|
||||
54
codex-rs/cloud-tasks/Cargo.toml
Normal file
54
codex-rs/cloud-tasks/Cargo.toml
Normal file
@@ -0,0 +1,54 @@
|
||||
[package]
|
||||
name = "codex-cloud-tasks"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_cloud_tasks"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
tokio = { version = "1", features = ["fs", "macros", "rt-multi-thread"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = ["mock", "online"] }
|
||||
ratatui = { version = "0.29.0" }
|
||||
crossterm = { version = "0.28.1", features = ["event-stream"] }
|
||||
tokio-stream = "0.1.17"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
codex-login = { path = "../login" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-backend-client = { path = "../backend-client" }
|
||||
throbber-widgets-tui = "0.8.0"
|
||||
base64 = "0.22"
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
unicode-width = "0.1"
|
||||
codex-tui = { path = "../tui" }
|
||||
codex-file-search = { path = "../file-search" }
|
||||
mime_guess = "2"
|
||||
url = "2"
|
||||
image = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
async-trait = "0.1"
|
||||
tempfile = "3"
|
||||
|
||||
[[bin]]
|
||||
name = "conncheck"
|
||||
path = "src/bin/conncheck.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "newtask"
|
||||
path = "src/bin/newtask.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "envcheck"
|
||||
path = "src/bin/envcheck.rs"
|
||||
474
codex-rs/cloud-tasks/src/app.rs
Normal file
474
codex-rs/cloud-tasks/src/app.rs
Normal file
@@ -0,0 +1,474 @@
|
||||
use std::time::Duration;
|
||||
|
||||
// Environment filter data models for the TUI
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct EnvironmentRow {
|
||||
pub id: String,
|
||||
pub label: Option<String>,
|
||||
pub is_pinned: bool,
|
||||
pub repo_hints: Option<String>, // e.g., "openai/codex"
|
||||
pub default_branch: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct EnvModalState {
|
||||
pub query: String,
|
||||
pub selected: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Copy, PartialEq, Eq)]
|
||||
pub enum ApplyResultLevel {
|
||||
Success,
|
||||
Partial,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ApplyModalState {
|
||||
pub task_id: TaskId,
|
||||
pub title: String,
|
||||
pub result_message: Option<String>,
|
||||
pub result_level: Option<ApplyResultLevel>,
|
||||
pub skipped_paths: Vec<String>,
|
||||
pub conflict_paths: Vec<String>,
|
||||
pub diff_override: Option<String>,
|
||||
}
|
||||
|
||||
use crate::scrollable_diff::ScrollableDiff;
|
||||
use codex_cloud_tasks_client::CloudBackend;
|
||||
use codex_cloud_tasks_client::TaskId;
|
||||
use codex_cloud_tasks_client::TaskSummary;
|
||||
use throbber_widgets_tui::ThrobberState;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct App {
|
||||
pub tasks: Vec<TaskSummary>,
|
||||
pub selected: usize,
|
||||
pub status: String,
|
||||
pub diff_overlay: Option<DiffOverlay>,
|
||||
pub throbber: ThrobberState,
|
||||
pub refresh_inflight: bool,
|
||||
pub details_inflight: bool,
|
||||
// Environment filter state
|
||||
pub env_filter: Option<String>,
|
||||
pub env_modal: Option<EnvModalState>,
|
||||
pub apply_modal: Option<ApplyModalState>,
|
||||
pub environments: Vec<EnvironmentRow>,
|
||||
pub env_last_loaded: Option<std::time::Instant>,
|
||||
pub env_loading: bool,
|
||||
pub env_error: Option<String>,
|
||||
// New Task page
|
||||
pub new_task: Option<crate::new_task::NewTaskPage>,
|
||||
// Apply preflight spinner state
|
||||
pub apply_preflight_inflight: bool,
|
||||
// Apply action spinner state
|
||||
pub apply_inflight: bool,
|
||||
// Background enrichment coordination
|
||||
pub list_generation: u64,
|
||||
pub in_flight: std::collections::HashSet<String>,
|
||||
// Background enrichment caches were planned; currently unused.
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tasks: Vec::new(),
|
||||
selected: 0,
|
||||
status: "Press r to refresh".to_string(),
|
||||
diff_overlay: None,
|
||||
throbber: ThrobberState::default(),
|
||||
refresh_inflight: false,
|
||||
details_inflight: false,
|
||||
env_filter: None,
|
||||
env_modal: None,
|
||||
apply_modal: None,
|
||||
environments: Vec::new(),
|
||||
env_last_loaded: None,
|
||||
env_loading: false,
|
||||
env_error: None,
|
||||
new_task: None,
|
||||
apply_preflight_inflight: false,
|
||||
apply_inflight: false,
|
||||
list_generation: 0,
|
||||
in_flight: std::collections::HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self) {
|
||||
if self.tasks.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.selected = (self.selected + 1).min(self.tasks.len().saturating_sub(1));
|
||||
}
|
||||
|
||||
pub fn prev(&mut self) {
|
||||
if self.tasks.is_empty() {
|
||||
return;
|
||||
}
|
||||
if self.selected > 0 {
|
||||
self.selected -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_tasks(
|
||||
backend: &dyn CloudBackend,
|
||||
env: Option<&str>,
|
||||
) -> anyhow::Result<Vec<TaskSummary>> {
|
||||
// In later milestones, add a small debounce, spinner, and error display.
|
||||
let tasks = tokio::time::timeout(Duration::from_secs(5), backend.list_tasks(env)).await??;
|
||||
// Hide review-only tasks from the main list.
|
||||
let filtered: Vec<TaskSummary> = tasks.into_iter().filter(|t| !t.is_review).collect();
|
||||
Ok(filtered)
|
||||
}
|
||||
|
||||
pub struct DiffOverlay {
|
||||
pub title: String,
|
||||
pub task_id: TaskId,
|
||||
pub sd: ScrollableDiff,
|
||||
pub base_can_apply: bool,
|
||||
pub diff_lines: Vec<String>,
|
||||
pub text_lines: Vec<String>,
|
||||
pub prompt: Option<String>,
|
||||
pub attempts: Vec<AttemptView>,
|
||||
pub selected_attempt: usize,
|
||||
pub current_view: DetailView,
|
||||
pub base_turn_id: Option<String>,
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
pub attempt_total_hint: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct AttemptView {
|
||||
pub turn_id: Option<String>,
|
||||
pub status: codex_cloud_tasks_client::AttemptStatus,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub diff_lines: Vec<String>,
|
||||
pub text_lines: Vec<String>,
|
||||
pub prompt: Option<String>,
|
||||
pub diff_raw: Option<String>,
|
||||
}
|
||||
|
||||
impl AttemptView {
|
||||
pub fn has_diff(&self) -> bool {
|
||||
!self.diff_lines.is_empty()
|
||||
}
|
||||
|
||||
pub fn has_text(&self) -> bool {
|
||||
!self.text_lines.is_empty() || self.prompt.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
impl DiffOverlay {
|
||||
pub fn new(task_id: TaskId, title: String, attempt_total_hint: Option<usize>) -> Self {
|
||||
let mut sd = ScrollableDiff::new();
|
||||
sd.set_content(Vec::new());
|
||||
Self {
|
||||
title,
|
||||
task_id,
|
||||
sd,
|
||||
base_can_apply: false,
|
||||
diff_lines: Vec::new(),
|
||||
text_lines: Vec::new(),
|
||||
prompt: None,
|
||||
attempts: vec![AttemptView::default()],
|
||||
selected_attempt: 0,
|
||||
current_view: DetailView::Prompt,
|
||||
base_turn_id: None,
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_total_hint,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn current_attempt(&self) -> Option<&AttemptView> {
|
||||
self.attempts.get(self.selected_attempt)
|
||||
}
|
||||
|
||||
pub fn base_attempt_mut(&mut self) -> &mut AttemptView {
|
||||
if self.attempts.is_empty() {
|
||||
self.attempts.push(AttemptView::default());
|
||||
}
|
||||
&mut self.attempts[0]
|
||||
}
|
||||
|
||||
pub fn set_view(&mut self, view: DetailView) {
|
||||
self.current_view = view;
|
||||
self.apply_selection_to_fields();
|
||||
}
|
||||
|
||||
pub fn expected_attempts(&self) -> Option<usize> {
|
||||
self.attempt_total_hint.or({
|
||||
if self.attempts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(self.attempts.len())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn attempt_count(&self) -> usize {
|
||||
self.attempts.len()
|
||||
}
|
||||
|
||||
pub fn attempt_display_total(&self) -> usize {
|
||||
self.expected_attempts()
|
||||
.unwrap_or_else(|| self.attempts.len().max(1))
|
||||
}
|
||||
|
||||
pub fn step_attempt(&mut self, delta: isize) -> bool {
|
||||
let total = self.attempts.len();
|
||||
if total <= 1 {
|
||||
return false;
|
||||
}
|
||||
let total_isize = total as isize;
|
||||
let current = self.selected_attempt as isize;
|
||||
let mut next = current + delta;
|
||||
next = ((next % total_isize) + total_isize) % total_isize;
|
||||
let next = next as usize;
|
||||
self.selected_attempt = next;
|
||||
self.apply_selection_to_fields();
|
||||
true
|
||||
}
|
||||
|
||||
pub fn current_can_apply(&self) -> bool {
|
||||
matches!(self.current_view, DetailView::Diff)
|
||||
&& self
|
||||
.current_attempt()
|
||||
.and_then(|attempt| attempt.diff_raw.as_ref())
|
||||
.map(|diff| !diff.is_empty())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn apply_selection_to_fields(&mut self) {
|
||||
let (diff_lines, text_lines, prompt) = if let Some(attempt) = self.current_attempt() {
|
||||
(
|
||||
attempt.diff_lines.clone(),
|
||||
attempt.text_lines.clone(),
|
||||
attempt.prompt.clone(),
|
||||
)
|
||||
} else {
|
||||
self.diff_lines.clear();
|
||||
self.text_lines.clear();
|
||||
self.prompt = None;
|
||||
self.sd.set_content(vec!["<loading attempt>".to_string()]);
|
||||
return;
|
||||
};
|
||||
|
||||
self.diff_lines = diff_lines.clone();
|
||||
self.text_lines = text_lines.clone();
|
||||
self.prompt = prompt;
|
||||
|
||||
match self.current_view {
|
||||
DetailView::Diff => {
|
||||
if diff_lines.is_empty() {
|
||||
self.sd.set_content(vec!["<no diff available>".to_string()]);
|
||||
} else {
|
||||
self.sd.set_content(diff_lines);
|
||||
}
|
||||
}
|
||||
DetailView::Prompt => {
|
||||
if text_lines.is_empty() {
|
||||
self.sd.set_content(vec!["<no output>".to_string()]);
|
||||
} else {
|
||||
self.sd.set_content(text_lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum DetailView {
|
||||
Diff,
|
||||
Prompt,
|
||||
}
|
||||
|
||||
/// Internal app events delivered from background tasks.
|
||||
/// These let the UI event loop remain responsive and keep the spinner animating.
|
||||
#[derive(Debug)]
|
||||
pub enum AppEvent {
|
||||
TasksLoaded {
|
||||
env: Option<String>,
|
||||
result: anyhow::Result<Vec<TaskSummary>>,
|
||||
},
|
||||
// Background diff summary events were planned; removed for now to keep code minimal.
|
||||
/// Autodetection of a likely environment id finished
|
||||
EnvironmentAutodetected(anyhow::Result<crate::env_detect::AutodetectSelection>),
|
||||
/// Background completion of environment list fetch
|
||||
EnvironmentsLoaded(anyhow::Result<Vec<EnvironmentRow>>),
|
||||
DetailsDiffLoaded {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
diff: String,
|
||||
},
|
||||
DetailsMessagesLoaded {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
messages: Vec<String>,
|
||||
prompt: Option<String>,
|
||||
turn_id: Option<String>,
|
||||
sibling_turn_ids: Vec<String>,
|
||||
attempt_placement: Option<i64>,
|
||||
attempt_status: codex_cloud_tasks_client::AttemptStatus,
|
||||
},
|
||||
DetailsFailed {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
error: String,
|
||||
},
|
||||
AttemptsLoaded {
|
||||
id: TaskId,
|
||||
attempts: Vec<codex_cloud_tasks_client::TurnAttempt>,
|
||||
},
|
||||
/// Background completion of new task submission
|
||||
NewTaskSubmitted(Result<codex_cloud_tasks_client::CreatedTask, String>),
|
||||
/// Background completion of apply preflight when opening modal or on demand
|
||||
ApplyPreflightFinished {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
message: String,
|
||||
level: ApplyResultLevel,
|
||||
skipped: Vec<String>,
|
||||
conflicts: Vec<String>,
|
||||
},
|
||||
/// Background completion of apply action (actual patch application)
|
||||
ApplyFinished {
|
||||
id: TaskId,
|
||||
result: std::result::Result<codex_cloud_tasks_client::ApplyOutcome, String>,
|
||||
},
|
||||
}
|
||||
|
||||
// Convenience aliases; currently unused.
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::Utc;
|
||||
|
||||
struct FakeBackend {
|
||||
// maps env key to titles
|
||||
by_env: std::collections::HashMap<Option<String>, Vec<&'static str>>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl codex_cloud_tasks_client::CloudBackend for FakeBackend {
|
||||
async fn list_tasks(
|
||||
&self,
|
||||
env: Option<&str>,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<TaskSummary>> {
|
||||
let key = env.map(str::to_string);
|
||||
let titles = self
|
||||
.by_env
|
||||
.get(&key)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| vec!["default-a", "default-b"]);
|
||||
let mut out = Vec::new();
|
||||
for (i, t) in titles.into_iter().enumerate() {
|
||||
out.push(TaskSummary {
|
||||
id: TaskId(format!("T-{i}")),
|
||||
title: t.to_string(),
|
||||
status: codex_cloud_tasks_client::TaskStatus::Ready,
|
||||
updated_at: Utc::now(),
|
||||
environment_id: env.map(str::to_string),
|
||||
environment_label: None,
|
||||
summary: codex_cloud_tasks_client::DiffSummary::default(),
|
||||
is_review: false,
|
||||
attempt_total: Some(1),
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
async fn get_task_diff(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<Option<String>> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn get_task_messages(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<String>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
async fn get_task_text(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::TaskText> {
|
||||
Ok(codex_cloud_tasks_client::TaskText {
|
||||
prompt: Some("Example prompt".to_string()),
|
||||
messages: Vec::new(),
|
||||
turn_id: Some("fake-turn".to_string()),
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: Some(0),
|
||||
attempt_status: codex_cloud_tasks_client::AttemptStatus::Completed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
_task: TaskId,
|
||||
_turn_id: String,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<codex_cloud_tasks_client::TurnAttempt>> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn apply_task(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::ApplyOutcome> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::ApplyOutcome> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
_env_id: &str,
|
||||
_prompt: &str,
|
||||
_git_ref: &str,
|
||||
_qa_mode: bool,
|
||||
_attachments: &[codex_cloud_tasks_client::AttachmentReference],
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::CreatedTask> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_tasks_uses_env_parameter() {
|
||||
// Arrange: env-specific task titles
|
||||
let mut by_env = std::collections::HashMap::new();
|
||||
by_env.insert(None, vec!["root-1", "root-2"]);
|
||||
by_env.insert(Some("env-A".to_string()), vec!["A-1"]);
|
||||
by_env.insert(Some("env-B".to_string()), vec!["B-1", "B-2", "B-3"]);
|
||||
let backend = FakeBackend { by_env };
|
||||
|
||||
// Act + Assert
|
||||
let root = load_tasks(&backend, None).await.unwrap();
|
||||
assert_eq!(root.len(), 2);
|
||||
assert_eq!(root[0].title, "root-1");
|
||||
|
||||
let a = load_tasks(&backend, Some("env-A")).await.unwrap();
|
||||
assert_eq!(a.len(), 1);
|
||||
assert_eq!(a[0].title, "A-1");
|
||||
|
||||
let b = load_tasks(&backend, Some("env-B")).await.unwrap();
|
||||
assert_eq!(b.len(), 3);
|
||||
assert_eq!(b[2].title, "B-3");
|
||||
}
|
||||
}
|
||||
226
codex-rs/cloud-tasks/src/attachments/mod.rs
Normal file
226
codex-rs/cloud-tasks/src/attachments/mod.rs
Normal file
@@ -0,0 +1,226 @@
|
||||
pub mod upload;
|
||||
|
||||
pub use upload::AttachmentAssetPointer;
|
||||
pub use upload::AttachmentId;
|
||||
pub use upload::AttachmentUploadError;
|
||||
pub use upload::AttachmentUploadMode;
|
||||
pub use upload::AttachmentUploadProgress;
|
||||
pub use upload::AttachmentUploadState;
|
||||
pub use upload::AttachmentUploadUpdate;
|
||||
pub use upload::AttachmentUploader;
|
||||
pub use upload::HttpConfig as AttachmentUploadHttpConfig;
|
||||
pub use upload::pointer_id_from_value;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
const MAX_SUGGESTIONS: usize = 5;
|
||||
|
||||
/// The type of attachment included alongside a composer submission.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum AttachmentKind {
|
||||
File,
|
||||
Image,
|
||||
}
|
||||
|
||||
/// Metadata describing a file or asset attached via an `@` mention.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ComposerAttachment {
|
||||
pub kind: AttachmentKind,
|
||||
pub label: String,
|
||||
pub path: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub fs_path: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub start_line: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub end_line: Option<u32>,
|
||||
#[serde(skip, default)]
|
||||
pub id: AttachmentId,
|
||||
#[serde(skip_serializing, skip_deserializing)]
|
||||
pub upload: AttachmentUploadState,
|
||||
}
|
||||
|
||||
impl ComposerAttachment {
|
||||
pub fn from_suggestion(id: AttachmentId, suggestion: &MentionSuggestion) -> Self {
|
||||
Self {
|
||||
kind: AttachmentKind::File,
|
||||
label: suggestion.label.clone(),
|
||||
path: suggestion.path.clone(),
|
||||
fs_path: suggestion.fs_path.clone(),
|
||||
start_line: suggestion.start_line,
|
||||
end_line: suggestion.end_line,
|
||||
id,
|
||||
upload: AttachmentUploadState::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// UI state for the active `@` mention query inside the composer.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct MentionQueryState {
|
||||
pub current: Option<MentionToken>,
|
||||
}
|
||||
|
||||
impl MentionQueryState {
|
||||
/// Returns true when the stored token changed.
|
||||
pub fn update_from(&mut self, token: Option<String>) -> bool {
|
||||
let next = token.map(MentionToken::from_query);
|
||||
if next != self.current {
|
||||
self.current = next;
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents an `@` mention currently under the user's cursor.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MentionToken {
|
||||
/// Query string without the leading `@`.
|
||||
pub query: String,
|
||||
/// Raw token including the `@` prefix.
|
||||
pub raw: String,
|
||||
}
|
||||
|
||||
impl MentionToken {
|
||||
pub(crate) fn from_query(query: String) -> Self {
|
||||
let raw = format!("@{query}");
|
||||
Self { query, raw }
|
||||
}
|
||||
}
|
||||
|
||||
/// A suggested file (or range within a file) that matches the active `@` token.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MentionSuggestion {
|
||||
pub label: String,
|
||||
pub path: String,
|
||||
pub fs_path: Option<String>,
|
||||
pub start_line: Option<u32>,
|
||||
pub end_line: Option<u32>,
|
||||
}
|
||||
|
||||
impl MentionSuggestion {
|
||||
pub fn new(label: impl Into<String>, path: impl Into<String>) -> Self {
|
||||
Self {
|
||||
label: label.into(),
|
||||
path: path.into(),
|
||||
fs_path: None,
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Tracks suggestion list + selection for the mention picker overlay.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct MentionPickerState {
|
||||
suggestions: Vec<MentionSuggestion>,
|
||||
selected: usize,
|
||||
}
|
||||
|
||||
impl MentionPickerState {
|
||||
pub fn clear(&mut self) -> bool {
|
||||
if self.suggestions.is_empty() {
|
||||
return false;
|
||||
}
|
||||
self.suggestions.clear();
|
||||
self.selected = 0;
|
||||
true
|
||||
}
|
||||
|
||||
pub fn move_selection(&mut self, delta: isize) {
|
||||
if self.suggestions.is_empty() {
|
||||
return;
|
||||
}
|
||||
let len = self.suggestions.len() as isize;
|
||||
let mut idx = self.selected as isize + delta;
|
||||
if idx < 0 {
|
||||
idx = len - 1;
|
||||
}
|
||||
if idx >= len {
|
||||
idx = 0;
|
||||
}
|
||||
self.selected = idx as usize;
|
||||
}
|
||||
|
||||
pub fn selected_index(&self) -> usize {
|
||||
self.selected.min(self.suggestions.len().saturating_sub(1))
|
||||
}
|
||||
|
||||
pub fn current(&self) -> Option<&MentionSuggestion> {
|
||||
self.suggestions.get(self.selected_index())
|
||||
}
|
||||
|
||||
pub fn render_height(&self) -> u16 {
|
||||
let rows = self.suggestions.len().clamp(1, MAX_SUGGESTIONS) as u16;
|
||||
// Add borders + padding space.
|
||||
rows.saturating_add(2)
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &[MentionSuggestion] {
|
||||
&self.suggestions
|
||||
}
|
||||
|
||||
pub fn set_suggestions(&mut self, suggestions: Vec<MentionSuggestion>) -> bool {
|
||||
let mut trimmed = suggestions;
|
||||
if trimmed.len() > MAX_SUGGESTIONS {
|
||||
trimmed.truncate(MAX_SUGGESTIONS);
|
||||
}
|
||||
if trimmed == self.suggestions {
|
||||
return false;
|
||||
}
|
||||
self.suggestions = trimmed;
|
||||
self.selected = 0;
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::AttachmentUploadState;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn compose_attachment_from_suggestion_copies_fields() {
|
||||
let mut suggestion = MentionSuggestion::new("src/main.rs", "src/main.rs");
|
||||
suggestion.fs_path = Some("/repo/src/main.rs".to_string());
|
||||
suggestion.start_line = Some(10);
|
||||
suggestion.end_line = Some(20);
|
||||
let att = ComposerAttachment::from_suggestion(AttachmentId::new(42), &suggestion);
|
||||
assert_eq!(att.label, "src/main.rs");
|
||||
assert_eq!(att.path, "src/main.rs");
|
||||
assert_eq!(att.fs_path.as_deref(), Some("/repo/src/main.rs"));
|
||||
assert_eq!(att.start_line, Some(10));
|
||||
assert_eq!(att.end_line, Some(20));
|
||||
assert!(matches!(att.upload, AttachmentUploadState::NotStarted));
|
||||
assert_eq!(att.id.raw(), 42);
|
||||
}
|
||||
#[test]
|
||||
fn move_selection_wraps() {
|
||||
let _token = MentionToken::from_query("foo".to_string());
|
||||
let mut picker = MentionPickerState::default();
|
||||
assert!(picker.set_suggestions(vec![
|
||||
MentionSuggestion::new("src/foo.rs", "src/foo.rs"),
|
||||
MentionSuggestion::new("src/main.rs", "src/main.rs"),
|
||||
]));
|
||||
picker.move_selection(1);
|
||||
assert_eq!(
|
||||
picker.selected_index(),
|
||||
1.min(picker.items().len().saturating_sub(1))
|
||||
);
|
||||
picker.move_selection(-1);
|
||||
assert_eq!(picker.selected_index(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn refresh_none_clears_suggestions() {
|
||||
let _token = MentionToken::from_query("bar".to_string());
|
||||
let mut picker = MentionPickerState::default();
|
||||
assert!(
|
||||
picker.set_suggestions(vec![MentionSuggestion::new("docs/bar.md", "docs/bar.md",)])
|
||||
);
|
||||
assert!(picker.clear());
|
||||
assert!(picker.items().is_empty());
|
||||
}
|
||||
}
|
||||
605
codex-rs/cloud-tasks/src/attachments/upload.rs
Normal file
605
codex-rs/cloud-tasks/src/attachments/upload.rs
Normal file
@@ -0,0 +1,605 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use crate::util::append_error_log;
|
||||
use chrono::Local;
|
||||
use mime_guess::MimeGuess;
|
||||
use reqwest::Client;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::UnboundedReceiver;
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
use tracing::debug;
|
||||
use tracing::warn;
|
||||
use url::Url;
|
||||
|
||||
const UPLOAD_USE_CASE: &str = "codex";
|
||||
|
||||
/// Stable identifier assigned to each staged attachment.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct AttachmentId(pub u64);
|
||||
|
||||
impl AttachmentId {
|
||||
pub const fn new(raw: u64) -> Self {
|
||||
Self(raw)
|
||||
}
|
||||
|
||||
pub const fn raw(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the lifecycle of an attachment upload initiated after an `@` mention.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentUploadState {
|
||||
NotStarted,
|
||||
Uploading(AttachmentUploadProgress),
|
||||
Uploaded(AttachmentUploadSuccess),
|
||||
Failed(AttachmentUploadError),
|
||||
}
|
||||
|
||||
impl Default for AttachmentUploadState {
|
||||
fn default() -> Self {
|
||||
Self::NotStarted
|
||||
}
|
||||
}
|
||||
|
||||
impl AttachmentUploadState {
|
||||
pub fn is_pending(&self) -> bool {
|
||||
matches!(self, Self::NotStarted | Self::Uploading(_))
|
||||
}
|
||||
|
||||
pub fn is_uploaded(&self) -> bool {
|
||||
matches!(self, Self::Uploaded(_))
|
||||
}
|
||||
|
||||
pub fn is_failed(&self) -> bool {
|
||||
matches!(self, Self::Failed(_))
|
||||
}
|
||||
}
|
||||
|
||||
/// Progress for uploads where the total size is known.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadProgress {
|
||||
pub uploaded_bytes: u64,
|
||||
pub total_bytes: Option<u64>,
|
||||
}
|
||||
|
||||
impl AttachmentUploadProgress {
|
||||
pub fn new(uploaded_bytes: u64, total_bytes: Option<u64>) -> Self {
|
||||
Self {
|
||||
uploaded_bytes,
|
||||
total_bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Successful upload metadata containing the remote pointer.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadSuccess {
|
||||
pub asset_pointer: AttachmentAssetPointer,
|
||||
pub display_name: String,
|
||||
}
|
||||
|
||||
impl AttachmentUploadSuccess {
|
||||
pub fn new(asset_pointer: AttachmentAssetPointer, display_name: impl Into<String>) -> Self {
|
||||
Self {
|
||||
asset_pointer,
|
||||
display_name: display_name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describes the remote asset pointer returned by the file service.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentAssetPointer {
|
||||
pub kind: AttachmentPointerKind,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl AttachmentAssetPointer {
|
||||
pub fn new(kind: AttachmentPointerKind, value: impl Into<String>) -> Self {
|
||||
Self {
|
||||
kind,
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// High-level pointer type so we can support both single file and container uploads.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentPointerKind {
|
||||
File,
|
||||
Image,
|
||||
#[allow(dead_code)]
|
||||
Container,
|
||||
}
|
||||
|
||||
impl fmt::Display for AttachmentPointerKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::File => write!(f, "file"),
|
||||
Self::Image => write!(f, "image"),
|
||||
Self::Container => write!(f, "container"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Captures a user-visible error when uploading an attachment fails.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AttachmentUploadError {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl AttachmentUploadError {
|
||||
pub fn new(message: impl Into<String>) -> Self {
|
||||
Self {
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AttachmentUploadError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.message)
|
||||
}
|
||||
}
|
||||
|
||||
/// Internal update emitted by the background uploader task.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AttachmentUploadUpdate {
|
||||
Started {
|
||||
id: AttachmentId,
|
||||
total_bytes: Option<u64>,
|
||||
},
|
||||
Finished {
|
||||
id: AttachmentId,
|
||||
result: Result<AttachmentUploadSuccess, AttachmentUploadError>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Configuration for attachment uploads.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum AttachmentUploadMode {
|
||||
Disabled,
|
||||
#[cfg_attr(not(test), allow(dead_code))]
|
||||
ImmediateSuccess,
|
||||
Http(HttpConfig),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HttpConfig {
|
||||
pub base_url: String,
|
||||
pub bearer_token: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl HttpConfig {
|
||||
fn trimmed_base(&self) -> String {
|
||||
self.base_url.trim_end_matches('/').to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum AttachmentUploadBackend {
|
||||
Disabled,
|
||||
ImmediateSuccess,
|
||||
Http(Arc<AttachmentUploadHttp>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AttachmentUploadHttp {
|
||||
client: Client,
|
||||
base_url: String,
|
||||
bearer_token: Option<String>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
user_agent: Option<String>,
|
||||
}
|
||||
|
||||
impl AttachmentUploadHttp {
|
||||
fn apply_default_headers(&self, builder: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
|
||||
let mut b = builder;
|
||||
if let Some(token) = &self.bearer_token {
|
||||
b = b.bearer_auth(token);
|
||||
}
|
||||
if let Some(acc) = &self.chatgpt_account_id {
|
||||
b = b.header("ChatGPT-Account-Id", acc);
|
||||
}
|
||||
if let Some(ua) = &self.user_agent {
|
||||
b = b.header(reqwest::header::USER_AGENT, ua.clone());
|
||||
}
|
||||
b
|
||||
}
|
||||
}
|
||||
|
||||
/// Bookkeeping for in-flight attachment uploads, providing polling APIs for the UI thread.
|
||||
pub struct AttachmentUploader {
|
||||
update_tx: UnboundedSender<AttachmentUploadUpdate>,
|
||||
update_rx: UnboundedReceiver<AttachmentUploadUpdate>,
|
||||
inflight: HashMap<AttachmentId, Arc<AtomicBool>>,
|
||||
backend: AttachmentUploadBackend,
|
||||
}
|
||||
|
||||
impl AttachmentUploader {
|
||||
pub fn new(mode: AttachmentUploadMode) -> Self {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let backend = match mode {
|
||||
AttachmentUploadMode::Disabled => AttachmentUploadBackend::Disabled,
|
||||
AttachmentUploadMode::ImmediateSuccess => AttachmentUploadBackend::ImmediateSuccess,
|
||||
AttachmentUploadMode::Http(cfg) => match Client::builder().build() {
|
||||
Ok(client) => AttachmentUploadBackend::Http(Arc::new(AttachmentUploadHttp {
|
||||
client,
|
||||
base_url: cfg.trimmed_base(),
|
||||
bearer_token: cfg.bearer_token,
|
||||
chatgpt_account_id: cfg.chatgpt_account_id,
|
||||
user_agent: cfg.user_agent,
|
||||
})),
|
||||
Err(err) => {
|
||||
warn!("attachment_upload.http_client_init_failed: {err}");
|
||||
AttachmentUploadBackend::Disabled
|
||||
}
|
||||
},
|
||||
};
|
||||
Self {
|
||||
update_tx: tx,
|
||||
update_rx: rx,
|
||||
inflight: HashMap::new(),
|
||||
backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_upload(
|
||||
&mut self,
|
||||
id: AttachmentId,
|
||||
display_name: impl Into<String>,
|
||||
fs_path: PathBuf,
|
||||
) -> Result<(), AttachmentUploadError> {
|
||||
if self.inflight.contains_key(&id) {
|
||||
return Err(AttachmentUploadError::new("upload already queued"));
|
||||
}
|
||||
if let AttachmentUploadBackend::Disabled = &self.backend {
|
||||
return Err(AttachmentUploadError::new(
|
||||
"file uploads are not available in this environment",
|
||||
));
|
||||
}
|
||||
|
||||
if !is_supported_image(&fs_path) {
|
||||
return Err(AttachmentUploadError::new(
|
||||
"only image files can be uploaded",
|
||||
));
|
||||
}
|
||||
|
||||
let cancel_token = Arc::new(AtomicBool::new(false));
|
||||
self.inflight.insert(id, cancel_token.clone());
|
||||
let tx = self.update_tx.clone();
|
||||
let backend = self.backend.clone();
|
||||
let path_clone = fs_path.clone();
|
||||
let label = display_name.into();
|
||||
tokio::spawn(async move {
|
||||
let metadata = tokio::fs::metadata(&fs_path).await.ok();
|
||||
let total_bytes = metadata.as_ref().map(std::fs::Metadata::len);
|
||||
let _ = tx.send(AttachmentUploadUpdate::Started { id, total_bytes });
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
let _ = tx.send(AttachmentUploadUpdate::Finished {
|
||||
id,
|
||||
result: Err(AttachmentUploadError::new("upload canceled")),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
let result = match backend {
|
||||
AttachmentUploadBackend::Disabled => Err(AttachmentUploadError::new(
|
||||
"file uploads are not available in this environment",
|
||||
)),
|
||||
AttachmentUploadBackend::ImmediateSuccess => {
|
||||
let pointer = AttachmentAssetPointer::new(
|
||||
AttachmentPointerKind::File,
|
||||
format!("file-service://mock-{}", id.raw()),
|
||||
);
|
||||
Ok(AttachmentUploadSuccess::new(pointer, label.clone()))
|
||||
}
|
||||
AttachmentUploadBackend::Http(http) => {
|
||||
perform_http_upload(
|
||||
http,
|
||||
&path_clone,
|
||||
&label,
|
||||
total_bytes,
|
||||
cancel_token.clone(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
};
|
||||
|
||||
let _ = tx.send(AttachmentUploadUpdate::Finished { id, result });
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(not(test), allow(dead_code))]
|
||||
pub fn cancel_all(&mut self) {
|
||||
for cancel in self.inflight.values() {
|
||||
cancel.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn poll(&mut self) -> Vec<AttachmentUploadUpdate> {
|
||||
let mut out = Vec::new();
|
||||
while let Ok(update) = self.update_rx.try_recv() {
|
||||
if let AttachmentUploadUpdate::Finished { id, .. } = &update {
|
||||
self.inflight.remove(id);
|
||||
}
|
||||
out.push(update);
|
||||
}
|
||||
out
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AttachmentUploader {
|
||||
fn default() -> Self {
|
||||
Self::new(AttachmentUploadMode::Disabled)
|
||||
}
|
||||
}
|
||||
|
||||
async fn perform_http_upload(
|
||||
http: Arc<AttachmentUploadHttp>,
|
||||
fs_path: &Path,
|
||||
display_label: &str,
|
||||
total_bytes: Option<u64>,
|
||||
cancel_token: Arc<AtomicBool>,
|
||||
) -> Result<AttachmentUploadSuccess, AttachmentUploadError> {
|
||||
let file_bytes = tokio::fs::read(fs_path)
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("failed to read file: {e}")))?;
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
let file_name = fs_path
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.map(std::string::ToString::to_string)
|
||||
.unwrap_or_else(|| display_label.to_string());
|
||||
|
||||
let create_url = format!("{}/files", http.base_url);
|
||||
let body = CreateFileRequest {
|
||||
file_name: &file_name,
|
||||
file_size: total_bytes.unwrap_or(file_bytes.len() as u64),
|
||||
use_case: UPLOAD_USE_CASE,
|
||||
timezone_offset_min: (Local::now().offset().utc_minus_local() / 60),
|
||||
reset_rate_limits: false,
|
||||
};
|
||||
|
||||
let create_resp = http
|
||||
.apply_default_headers(http.client.post(&create_url))
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("file create failed: {e}")))?;
|
||||
if !create_resp.status().is_success() {
|
||||
let status = create_resp.status();
|
||||
let text = create_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"file create request failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
let created: CreateFileResponse = create_resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("decode file create response: {e}")))?;
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
let upload_url = resolve_upload_url(&created.upload_url)
|
||||
.ok_or_else(|| AttachmentUploadError::new("invalid upload url"))?;
|
||||
|
||||
let mime = infer_image_mime(fs_path)
|
||||
.ok_or_else(|| AttachmentUploadError::new("only image files can be uploaded"))?;
|
||||
let mut azure_req = http.client.put(&upload_url);
|
||||
azure_req = azure_req
|
||||
.header("x-ms-blob-type", "BlockBlob")
|
||||
.header("x-ms-version", "2020-04-08");
|
||||
|
||||
azure_req = azure_req
|
||||
.header(reqwest::header::CONTENT_TYPE, mime.as_str())
|
||||
.header("x-ms-blob-content-type", mime.as_str());
|
||||
|
||||
let azure_resp = azure_req
|
||||
.body(file_bytes)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("blob upload failed: {e}")))?;
|
||||
|
||||
if !(200..300).contains(&azure_resp.status().as_u16()) {
|
||||
let status = azure_resp.status();
|
||||
let text = azure_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"blob upload failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
|
||||
if cancel_token.load(Ordering::Relaxed) {
|
||||
return Err(AttachmentUploadError::new("upload canceled"));
|
||||
}
|
||||
|
||||
// Finalization must succeed so the pointer can be used; surface any failure
|
||||
// to the caller after logging for easier debugging.
|
||||
if let Err(err) = finalize_upload(http.clone(), &created.file_id, &file_name).await {
|
||||
let reason = err.message.clone();
|
||||
warn!(
|
||||
"mention.attachment.upload.finalize_failed file_id={} reason={reason}",
|
||||
created.file_id
|
||||
);
|
||||
append_error_log(format!(
|
||||
"mention.attachment.upload.finalize_failed file_id={} reason={reason}",
|
||||
created.file_id
|
||||
));
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let pointer = asset_pointer_from_id(&created.file_id);
|
||||
debug!(
|
||||
"mention.attachment.upload.success file_id={} pointer={}",
|
||||
created.file_id, pointer
|
||||
);
|
||||
let pointer_kind = AttachmentPointerKind::Image;
|
||||
|
||||
Ok(AttachmentUploadSuccess::new(
|
||||
AttachmentAssetPointer::new(pointer_kind, pointer),
|
||||
display_label,
|
||||
))
|
||||
}
|
||||
|
||||
fn asset_pointer_from_id(file_id: &str) -> String {
|
||||
if file_id.starts_with("file_") {
|
||||
format!("sediment://{file_id}")
|
||||
} else {
|
||||
format!("file-service://{file_id}")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pointer_id_from_value(pointer: &str) -> Option<String> {
|
||||
pointer
|
||||
.strip_prefix("file-service://")
|
||||
.or_else(|| pointer.strip_prefix("sediment://"))
|
||||
.map(str::to_string)
|
||||
.or_else(|| (!pointer.is_empty()).then(|| pointer.to_string()))
|
||||
}
|
||||
|
||||
async fn finalize_upload(
|
||||
http: Arc<AttachmentUploadHttp>,
|
||||
file_id: &str,
|
||||
file_name: &str,
|
||||
) -> Result<(), AttachmentUploadError> {
|
||||
let finalize_url = format!("{}/files/process_upload_stream", http.base_url);
|
||||
let body = FinalizeUploadRequest {
|
||||
file_id,
|
||||
use_case: UPLOAD_USE_CASE,
|
||||
index_for_retrieval: false,
|
||||
file_name,
|
||||
};
|
||||
let finalize_resp = http
|
||||
.apply_default_headers(http.client.post(&finalize_url))
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| AttachmentUploadError::new(format!("finalize upload failed: {e}")))?;
|
||||
if !finalize_resp.status().is_success() {
|
||||
let status = finalize_resp.status();
|
||||
let text = finalize_resp.text().await.unwrap_or_default();
|
||||
return Err(AttachmentUploadError::new(format!(
|
||||
"finalize upload failed status={status} body={text}"
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_upload_url(url: &str) -> Option<String> {
|
||||
let parsed = Url::parse(url).ok()?;
|
||||
if !parsed.as_str().to_lowercase().contains("estuary") {
|
||||
return Some(parsed.into());
|
||||
}
|
||||
parsed
|
||||
.query_pairs()
|
||||
.find(|(k, _)| k == "upload_url")
|
||||
.map(|(_, v)| v.into_owned())
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct CreateFileRequest<'a> {
|
||||
file_name: &'a str,
|
||||
file_size: u64,
|
||||
use_case: &'a str,
|
||||
timezone_offset_min: i32,
|
||||
reset_rate_limits: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct FinalizeUploadRequest<'a> {
|
||||
file_id: &'a str,
|
||||
use_case: &'a str,
|
||||
index_for_retrieval: bool,
|
||||
file_name: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CreateFileResponse {
|
||||
file_id: String,
|
||||
upload_url: String,
|
||||
}
|
||||
|
||||
fn is_supported_image(path: &Path) -> bool {
|
||||
infer_image_mime(path).is_some()
|
||||
}
|
||||
|
||||
fn infer_image_mime(path: &Path) -> Option<String> {
|
||||
let guess = MimeGuess::from_path(path)
|
||||
.first_raw()
|
||||
.map(std::string::ToString::to_string);
|
||||
if let Some(m) = guess {
|
||||
if m.starts_with("image/") {
|
||||
return Some(m);
|
||||
}
|
||||
}
|
||||
|
||||
let ext = path
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
.map(|ext| ext.trim().to_ascii_lowercase())?;
|
||||
|
||||
let mime = match ext.as_str() {
|
||||
"png" => "image/png",
|
||||
"jpg" | "jpeg" => "image/jpeg",
|
||||
"gif" => "image/gif",
|
||||
"webp" => "image/webp",
|
||||
"bmp" => "image/bmp",
|
||||
"svg" => "image/svg+xml",
|
||||
"heic" => "image/heic",
|
||||
"heif" => "image/heif",
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(mime.to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn infer_image_mime_accepts_common_extensions() {
|
||||
let cases = [
|
||||
("foo.png", Some("image/png")),
|
||||
("bar.JPG", Some("image/jpeg")),
|
||||
("baz.jpeg", Some("image/jpeg")),
|
||||
("img.gif", Some("image/gif")),
|
||||
("slide.WEBP", Some("image/webp")),
|
||||
("art.bmp", Some("image/bmp")),
|
||||
("vector.svg", Some("image/svg+xml")),
|
||||
("photo.heic", Some("image/heic")),
|
||||
("photo.heif", Some("image/heif")),
|
||||
];
|
||||
|
||||
for (path, expected) in cases {
|
||||
let actual = infer_image_mime(Path::new(path));
|
||||
assert_eq!(actual.as_deref(), expected, "case {path}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_image_mime_rejects_unknown_extension() {
|
||||
assert!(infer_image_mime(Path::new("doc.txt")).is_none());
|
||||
}
|
||||
}
|
||||
106
codex-rs/cloud-tasks/src/bin/conncheck.rs
Normal file
106
codex-rs/cloud-tasks/src/bin/conncheck.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_cloud_tasks::util::extract_chatgpt_account_id;
|
||||
use codex_cloud_tasks::util::normalize_base_url;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
use std::time::Duration;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
// Base URL (default to ChatGPT backend API) and normalize to canonical form
|
||||
let raw_base = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
let base_url = normalize_base_url(&raw_base);
|
||||
println!("base_url: {base_url}");
|
||||
let path_style = if base_url.contains("/backend-api") {
|
||||
"wham"
|
||||
} else {
|
||||
"codex-api"
|
||||
};
|
||||
println!("path_style: {path_style}");
|
||||
|
||||
// Locate CODEX_HOME and try to load ChatGPT auth
|
||||
let codex_home = match find_codex_home() {
|
||||
Ok(p) => {
|
||||
println!("codex_home: {}", p.display());
|
||||
Some(p)
|
||||
}
|
||||
Err(e) => {
|
||||
println!("codex_home: <not found> ({e})");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// Build backend client with UA
|
||||
set_user_agent_suffix("codex_cloud_tasks_conncheck");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut client = BackendClient::new(base_url.clone())?.with_user_agent(ua);
|
||||
|
||||
// Attach bearer token if available from ChatGPT auth
|
||||
let mut have_auth = false;
|
||||
if let Some(home) = codex_home {
|
||||
let authm = AuthManager::new(home);
|
||||
if let Some(auth) = authm.auth() {
|
||||
match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
have_auth = true;
|
||||
println!("auth: ChatGPT token present ({} chars)", token.len());
|
||||
// Add Authorization header
|
||||
client = client.with_bearer_token(&token);
|
||||
|
||||
// Attempt to extract ChatGPT account id from the JWT and set header.
|
||||
if let Some(account_id) = extract_chatgpt_account_id(&token) {
|
||||
println!("auth: ChatGPT-Account-Id: {account_id}");
|
||||
client = client.with_chatgpt_account_id(account_id);
|
||||
} else if let Some(acc) = auth.get_account_id() {
|
||||
// Fallback: some older auth.jsons persist account_id
|
||||
println!("auth: ChatGPT-Account-Id (from auth.json): {acc}");
|
||||
client = client.with_chatgpt_account_id(acc);
|
||||
}
|
||||
}
|
||||
Ok(_) => {
|
||||
println!("auth: ChatGPT token empty");
|
||||
}
|
||||
Err(e) => {
|
||||
println!("auth: failed to load ChatGPT token: {e}");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("auth: no ChatGPT auth.json");
|
||||
}
|
||||
}
|
||||
|
||||
if !have_auth {
|
||||
println!("note: Online endpoints typically require ChatGPT sign-in. Run: `codex login`");
|
||||
}
|
||||
|
||||
// Attempt the /list call with a short timeout to avoid hanging
|
||||
match path_style {
|
||||
"wham" => println!("request: GET /wham/tasks/list?limit=5&task_filter=current"),
|
||||
_ => println!("request: GET /api/codex/tasks/list?limit=5&task_filter=current"),
|
||||
}
|
||||
let fut = client.list_tasks(Some(5), Some("current"), None);
|
||||
let res = tokio::time::timeout(Duration::from_secs(30), fut).await;
|
||||
match res {
|
||||
Err(_) => {
|
||||
println!("error: request timed out after 30s");
|
||||
std::process::exit(2);
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
// backend-client includes HTTP status and body in errors.
|
||||
println!("error: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Ok(Ok(list)) => {
|
||||
println!("ok: received {} tasks", list.items.len());
|
||||
for item in list.items.iter().take(5) {
|
||||
println!("- {} — {}", item.id, item.title);
|
||||
}
|
||||
// Keep output concise; omit full JSON payload to stay readable.
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
45
codex-rs/cloud-tasks/src/bin/detailcheck.rs
Normal file
45
codex-rs/cloud-tasks/src/bin/detailcheck.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use codex_backend_client::Client as BackendClient;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
set_user_agent_suffix("codex_cloud_tasks_detailcheck");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut client = BackendClient::new(base_url)?.with_user_agent(ua);
|
||||
|
||||
if let Ok(home) = find_codex_home() {
|
||||
let am = AuthManager::new(home);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
{
|
||||
client = client.with_bearer_token(tok);
|
||||
}
|
||||
}
|
||||
|
||||
let list = client.list_tasks(Some(5), Some("current"), None).await?;
|
||||
println!("items: {}", list.items.len());
|
||||
for item in list.items.iter().take(5) {
|
||||
println!("item: {} {}", item.id, item.title);
|
||||
let (details, body, ct) = client.get_task_details_with_body(&item.id).await?;
|
||||
let diff = codex_backend_client::CodeTaskDetailsResponseExt::unified_diff(&details);
|
||||
match diff {
|
||||
Some(d) => println!(
|
||||
"unified diff len={} sample=\n{}",
|
||||
d.len(),
|
||||
&d.lines().take(10).collect::<Vec<_>>().join("\n")
|
||||
),
|
||||
None => {
|
||||
println!(
|
||||
"no unified diff found; ct={ct}; body sample=\n{}",
|
||||
&body.chars().take(5000).collect::<String>()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
136
codex-rs/cloud-tasks/src/bin/envcheck.rs
Normal file
136
codex-rs/cloud-tasks/src/bin/envcheck.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
use base64::Engine;
|
||||
use clap::Parser;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(version, about = "Resolve Codex environment id (debug helper)")]
|
||||
struct Args {
|
||||
/// Optional override for environment id; if present we just echo it.
|
||||
#[arg(long = "env-id")]
|
||||
environment_id: Option<String>,
|
||||
/// Optional label to select a matching environment (case-insensitive exact match).
|
||||
#[arg(long = "env-label")]
|
||||
environment_label: Option<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args = Args::parse();
|
||||
|
||||
// Base URL (default to ChatGPT backend API) with normalization
|
||||
let mut base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
println!("base_url: {base_url}");
|
||||
println!(
|
||||
"path_style: {}",
|
||||
if base_url.contains("/backend-api") {
|
||||
"wham"
|
||||
} else {
|
||||
"codex-api"
|
||||
}
|
||||
);
|
||||
|
||||
// Build headers: UA + ChatGPT auth if available
|
||||
set_user_agent_suffix("codex_cloud_tasks_envcheck");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
reqwest::header::USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
|
||||
// Locate CODEX_HOME and try to load ChatGPT auth
|
||||
if let Ok(home) = find_codex_home() {
|
||||
println!("codex_home: {}", home.display());
|
||||
let authm = AuthManager::new(home);
|
||||
if let Some(auth) = authm.auth() {
|
||||
match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
println!("auth: ChatGPT token present ({} chars)", token.len());
|
||||
let value = format!("Bearer {token}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&value) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(account_id) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&token))
|
||||
{
|
||||
println!("auth: ChatGPT-Account-Id: {account_id}");
|
||||
if let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&account_id)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(_) => println!("auth: ChatGPT token empty"),
|
||||
Err(e) => println!("auth: failed to load ChatGPT token: {e}"),
|
||||
}
|
||||
} else {
|
||||
println!("auth: no ChatGPT auth.json");
|
||||
}
|
||||
} else {
|
||||
println!("codex_home: <not found>");
|
||||
}
|
||||
|
||||
// If user supplied an environment id, just echo it and exit.
|
||||
if let Some(id) = args.environment_id {
|
||||
println!("env: provided env-id={id}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Auto-detect environment id using shared env_detect
|
||||
match codex_cloud_tasks::env_detect::autodetect_environment_id(
|
||||
&base_url,
|
||||
&headers,
|
||||
args.environment_label,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(sel) => {
|
||||
println!(
|
||||
"env: selected environment_id={} label={}",
|
||||
sel.id,
|
||||
sel.label.unwrap_or_else(|| "<none>".to_string())
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
println!("env: failed: {e}");
|
||||
std::process::exit(2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
// JWT: header.payload.signature
|
||||
let mut parts = token.split('.');
|
||||
let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
|
||||
_ => return None,
|
||||
};
|
||||
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.decode(payload_b64)
|
||||
.ok()?;
|
||||
let v: serde_json::Value = serde_json::from_slice(&payload_bytes).ok()?;
|
||||
v.get("https://api.openai.com/auth")
|
||||
.and_then(|auth| auth.get("chatgpt_account_id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.map(str::to_string)
|
||||
}
|
||||
206
codex-rs/cloud-tasks/src/bin/newtask.rs
Normal file
206
codex-rs/cloud-tasks/src/bin/newtask.rs
Normal file
@@ -0,0 +1,206 @@
|
||||
use base64::Engine;
|
||||
use clap::Parser;
|
||||
use codex_cloud_tasks::util::set_user_agent_suffix;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_login::AuthManager;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(version, about = "Create a new Codex cloud task (debug helper)")]
|
||||
struct Args {
|
||||
/// Optional override for environment id; if absent we auto-detect.
|
||||
#[arg(long = "env-id")]
|
||||
environment_id: Option<String>,
|
||||
/// Optional label match for environment selection (case-insensitive, exact match).
|
||||
#[arg(long = "env-label")]
|
||||
environment_label: Option<String>,
|
||||
/// Branch or ref to use (e.g., main)
|
||||
#[arg(long = "ref", default_value = "main")]
|
||||
git_ref: String,
|
||||
/// Run environment in QA (ask) mode
|
||||
#[arg(long = "qa-mode", default_value_t = false)]
|
||||
qa_mode: bool,
|
||||
/// Task prompt text
|
||||
#[arg(required = true)]
|
||||
prompt: Vec<String>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let args = Args::parse();
|
||||
let prompt = args.prompt.join(" ");
|
||||
|
||||
// Base URL (default to ChatGPT backend API)
|
||||
let mut base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL")
|
||||
.unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string());
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
println!("base_url: {base_url}");
|
||||
let is_wham = base_url.contains("/backend-api");
|
||||
println!("path_style: {}", if is_wham { "wham" } else { "codex-api" });
|
||||
|
||||
// Build headers: UA + ChatGPT auth if available
|
||||
set_user_agent_suffix("codex_cloud_tasks_newtask");
|
||||
let ua = get_codex_user_agent();
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
reqwest::header::USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
let mut have_auth = false;
|
||||
// Locate CODEX_HOME and try to load ChatGPT auth
|
||||
if let Ok(home) = find_codex_home() {
|
||||
let authm = AuthManager::new(home);
|
||||
if let Some(auth) = authm.auth() {
|
||||
match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
have_auth = true;
|
||||
println!("auth: ChatGPT token present ({} chars)", token.len());
|
||||
let value = format!("Bearer {token}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&value) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(account_id) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&token))
|
||||
{
|
||||
println!("auth: ChatGPT-Account-Id: {account_id}");
|
||||
if let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&account_id)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(_) => println!("auth: ChatGPT token empty"),
|
||||
Err(e) => println!("auth: failed to load ChatGPT token: {e}"),
|
||||
}
|
||||
} else {
|
||||
println!("auth: no ChatGPT auth.json");
|
||||
}
|
||||
}
|
||||
if !have_auth {
|
||||
println!("note: Online endpoints typically require ChatGPT sign-in. Run: `codex login`");
|
||||
}
|
||||
|
||||
// Determine environment id: prefer flag, then by-repo lookup, then full list.
|
||||
let env_id = if let Some(id) = args.environment_id.clone() {
|
||||
println!("env: using provided env-id={id}");
|
||||
id
|
||||
} else {
|
||||
match codex_cloud_tasks::env_detect::autodetect_environment_id(
|
||||
&base_url,
|
||||
&headers,
|
||||
args.environment_label.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(sel) => sel.id,
|
||||
Err(e) => {
|
||||
println!("env: failed to auto-detect environment: {e}");
|
||||
std::process::exit(2);
|
||||
}
|
||||
}
|
||||
};
|
||||
println!("env: selected environment_id={env_id}");
|
||||
|
||||
// Build request payload patterned after VSCode: POST /wham/tasks
|
||||
let url = if is_wham {
|
||||
format!("{base_url}/wham/tasks")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/tasks")
|
||||
};
|
||||
println!(
|
||||
"request: POST {}",
|
||||
url.strip_prefix(&base_url).unwrap_or(&url)
|
||||
);
|
||||
|
||||
// input_items
|
||||
let mut input_items: Vec<serde_json::Value> = Vec::new();
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [{ "content_type": "text", "text": prompt }]
|
||||
}));
|
||||
|
||||
// Optional: starting diff via env var for quick testing
|
||||
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "pre_apply_patch",
|
||||
"output_diff": { "diff": diff }
|
||||
}));
|
||||
}
|
||||
|
||||
let request_body = serde_json::json!({
|
||||
"new_task": {
|
||||
"environment_id": env_id,
|
||||
"branch": args.git_ref,
|
||||
"run_environment_in_qa_mode": args.qa_mode,
|
||||
},
|
||||
"input_items": input_items,
|
||||
});
|
||||
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let res = http
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header(CONTENT_TYPE, HeaderValue::from_static("application/json"))
|
||||
.json(&request_body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
println!("status: {status}");
|
||||
println!("content-type: {ct}");
|
||||
match serde_json::from_str::<serde_json::Value>(&body) {
|
||||
Ok(v) => println!(
|
||||
"response (pretty JSON):\n{}",
|
||||
serde_json::to_string_pretty(&v).unwrap_or(body)
|
||||
),
|
||||
Err(_) => println!("response (raw):\n{body}"),
|
||||
}
|
||||
|
||||
if !status.is_success() {
|
||||
// Exit non-zero on failure
|
||||
std::process::exit(1);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
// JWT: header.payload.signature
|
||||
let mut parts = token.split('.');
|
||||
let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
|
||||
_ => return None,
|
||||
};
|
||||
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.decode(payload_b64)
|
||||
.ok()?;
|
||||
let v: serde_json::Value = serde_json::from_slice(&payload_bytes).ok()?;
|
||||
v.get("https://api.openai.com/auth")
|
||||
.and_then(|auth| auth.get("chatgpt_account_id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.map(str::to_string)
|
||||
}
|
||||
9
codex-rs/cloud-tasks/src/cli.rs
Normal file
9
codex-rs/cloud-tasks/src/cli.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
#[derive(Parser, Debug, Default)]
|
||||
#[command(version)]
|
||||
pub struct Cli {
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
}
|
||||
405
codex-rs/cloud-tasks/src/env_detect.rs
Normal file
405
codex-rs/cloud-tasks/src/env_detect.rs
Normal file
@@ -0,0 +1,405 @@
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use std::collections::HashMap;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
struct CodeEnvironment {
|
||||
id: String,
|
||||
#[serde(default)]
|
||||
label: Option<String>,
|
||||
#[serde(default)]
|
||||
is_pinned: Option<bool>,
|
||||
#[serde(default)]
|
||||
task_count: Option<i64>,
|
||||
#[serde(default)]
|
||||
repo_map: Option<HashMap<String, GitRepository>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
struct GitRepository {
|
||||
#[serde(default)]
|
||||
repository_full_name: Option<String>,
|
||||
#[serde(default)]
|
||||
default_branch: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AutodetectSelection {
|
||||
pub id: String,
|
||||
pub label: Option<String>,
|
||||
pub default_branch: Option<String>,
|
||||
}
|
||||
|
||||
fn clean_branch(branch: Option<&str>) -> Option<String> {
|
||||
branch
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(std::string::ToString::to_string)
|
||||
}
|
||||
|
||||
fn default_branch_from_env(env: &CodeEnvironment, repo_hint: Option<&str>) -> Option<String> {
|
||||
let repo_map = env.repo_map.as_ref()?;
|
||||
if let Some(hint) = repo_hint {
|
||||
if let Some(repo) = repo_map
|
||||
.values()
|
||||
.find(|repo| repo.repository_full_name.as_deref() == Some(hint))
|
||||
&& let Some(branch) = clean_branch(repo.default_branch.as_deref())
|
||||
{
|
||||
return Some(branch);
|
||||
}
|
||||
if let Some(repo) = repo_map.get(hint)
|
||||
&& let Some(branch) = clean_branch(repo.default_branch.as_deref())
|
||||
{
|
||||
return Some(branch);
|
||||
}
|
||||
}
|
||||
repo_map
|
||||
.values()
|
||||
.find_map(|repo| clean_branch(repo.default_branch.as_deref()))
|
||||
}
|
||||
|
||||
fn merge_environment_row(
|
||||
map: &mut HashMap<String, crate::app::EnvironmentRow>,
|
||||
env: &CodeEnvironment,
|
||||
repo_hint: Option<&str>,
|
||||
) {
|
||||
let default_branch = default_branch_from_env(env, repo_hint);
|
||||
let repo_hint_owned = repo_hint.map(str::to_string);
|
||||
let entry = map
|
||||
.entry(env.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: env.id.clone(),
|
||||
label: env.label.clone(),
|
||||
is_pinned: env.is_pinned.unwrap_or(false),
|
||||
repo_hints: repo_hint_owned.clone(),
|
||||
default_branch: default_branch.clone(),
|
||||
});
|
||||
if entry.label.is_none() {
|
||||
entry.label = env.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || env.is_pinned.unwrap_or(false);
|
||||
if entry.repo_hints.is_none() {
|
||||
entry.repo_hints = repo_hint_owned;
|
||||
}
|
||||
if let Some(branch) = default_branch {
|
||||
entry.default_branch = Some(branch);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn autodetect_environment_id(
|
||||
base_url: &str,
|
||||
headers: &HeaderMap,
|
||||
desired_label: Option<String>,
|
||||
) -> anyhow::Result<AutodetectSelection> {
|
||||
// 1) Try repo-specific environments based on local git origins (GitHub only, like VSCode)
|
||||
let origins = get_git_origins();
|
||||
crate::append_error_log(format!("env: git origins: {origins:?}"));
|
||||
let mut by_repo_envs: Vec<CodeEnvironment> = Vec::new();
|
||||
for origin in &origins {
|
||||
if let Some((owner, repo)) = parse_owner_repo(origin) {
|
||||
let url = if base_url.contains("/backend-api") {
|
||||
format!(
|
||||
"{}/wham/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{}/api/codex/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
};
|
||||
crate::append_error_log(format!("env: GET {url}"));
|
||||
match get_json::<Vec<CodeEnvironment>>(&url, headers).await {
|
||||
Ok(mut list) => {
|
||||
crate::append_error_log(format!(
|
||||
"env: by-repo returned {} env(s) for {owner}/{repo}",
|
||||
list.len(),
|
||||
));
|
||||
by_repo_envs.append(&mut list);
|
||||
}
|
||||
Err(e) => crate::append_error_log(format!(
|
||||
"env: by-repo fetch failed for {owner}/{repo}: {e}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(env) = pick_environment_row(&by_repo_envs, desired_label.as_deref()) {
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
default_branch: default_branch_from_env(&env, None),
|
||||
});
|
||||
}
|
||||
|
||||
// 2) Fallback to the full list
|
||||
let list_url = if base_url.contains("/backend-api") {
|
||||
format!("{base_url}/wham/environments")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/environments")
|
||||
};
|
||||
crate::append_error_log(format!("env: GET {list_url}"));
|
||||
// Fetch and log the full environments JSON for debugging
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let res = http.get(&list_url).headers(headers.clone()).send().await?;
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
crate::append_error_log(format!("env: status={status} content-type={ct}"));
|
||||
match serde_json::from_str::<serde_json::Value>(&body) {
|
||||
Ok(v) => {
|
||||
let pretty = serde_json::to_string_pretty(&v).unwrap_or(body.clone());
|
||||
crate::append_error_log(format!("env: /environments JSON (pretty):\n{pretty}"));
|
||||
}
|
||||
Err(_) => crate::append_error_log(format!("env: /environments (raw):\n{body}")),
|
||||
}
|
||||
if !status.is_success() {
|
||||
anyhow::bail!("GET {list_url} failed: {status}; content-type={ct}; body={body}");
|
||||
}
|
||||
let all_envs: Vec<CodeEnvironment> = serde_json::from_str(&body).map_err(|e| {
|
||||
anyhow::anyhow!("Decode error for {list_url}: {e}; content-type={ct}; body={body}")
|
||||
})?;
|
||||
if let Some(env) = pick_environment_row(&all_envs, desired_label.as_deref()) {
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
default_branch: default_branch_from_env(&env, None),
|
||||
});
|
||||
}
|
||||
anyhow::bail!("no environments available")
|
||||
}
|
||||
|
||||
fn pick_environment_row(
|
||||
envs: &[CodeEnvironment],
|
||||
desired_label: Option<&str>,
|
||||
) -> Option<CodeEnvironment> {
|
||||
if envs.is_empty() {
|
||||
return None;
|
||||
}
|
||||
if let Some(label) = desired_label {
|
||||
let lc = label.to_lowercase();
|
||||
if let Some(e) = envs
|
||||
.iter()
|
||||
.find(|e| e.label.as_deref().unwrap_or("").to_lowercase() == lc)
|
||||
{
|
||||
crate::append_error_log(format!("env: matched by label: {label} -> {}", e.id));
|
||||
return Some(e.clone());
|
||||
}
|
||||
}
|
||||
if envs.len() == 1 {
|
||||
crate::append_error_log("env: single environment available; selecting it");
|
||||
return Some(envs[0].clone());
|
||||
}
|
||||
if let Some(e) = envs.iter().find(|e| e.is_pinned.unwrap_or(false)) {
|
||||
crate::append_error_log(format!("env: selecting pinned environment: {}", e.id));
|
||||
return Some(e.clone());
|
||||
}
|
||||
// Highest task_count as heuristic
|
||||
if let Some(e) = envs
|
||||
.iter()
|
||||
.max_by_key(|e| e.task_count.unwrap_or(0))
|
||||
.or_else(|| envs.first())
|
||||
{
|
||||
crate::append_error_log(format!("env: selecting by task_count/first: {}", e.id));
|
||||
return Some(e.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn get_json<T: serde::de::DeserializeOwned>(
|
||||
url: &str,
|
||||
headers: &HeaderMap,
|
||||
) -> anyhow::Result<T> {
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let res = http.get(url).headers(headers.clone()).send().await?;
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
crate::append_error_log(format!("env: status={status} content-type={ct}"));
|
||||
if !status.is_success() {
|
||||
anyhow::bail!("GET {url} failed: {status}; content-type={ct}; body={body}");
|
||||
}
|
||||
let parsed = serde_json::from_str::<T>(&body).map_err(|e| {
|
||||
anyhow::anyhow!("Decode error for {url}: {e}; content-type={ct}; body={body}")
|
||||
})?;
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
fn get_git_origins() -> Vec<String> {
|
||||
// Prefer: git config --get-regexp remote\..*\.url
|
||||
let out = std::process::Command::new("git")
|
||||
.args(["config", "--get-regexp", "remote\\..*\\.url"])
|
||||
.output();
|
||||
if let Ok(ok) = out
|
||||
&& ok.status.success()
|
||||
{
|
||||
let s = String::from_utf8_lossy(&ok.stdout);
|
||||
let mut urls = Vec::new();
|
||||
for line in s.lines() {
|
||||
if let Some((_, url)) = line.split_once(' ') {
|
||||
urls.push(url.trim().to_string());
|
||||
}
|
||||
}
|
||||
if !urls.is_empty() {
|
||||
return uniq(urls);
|
||||
}
|
||||
}
|
||||
// Fallback: git remote -v
|
||||
let out = std::process::Command::new("git")
|
||||
.args(["remote", "-v"])
|
||||
.output();
|
||||
if let Ok(ok) = out
|
||||
&& ok.status.success()
|
||||
{
|
||||
let s = String::from_utf8_lossy(&ok.stdout);
|
||||
let mut urls = Vec::new();
|
||||
for line in s.lines() {
|
||||
let parts: Vec<&str> = line.split_whitespace().collect();
|
||||
if parts.len() >= 2 {
|
||||
urls.push(parts[1].to_string());
|
||||
}
|
||||
}
|
||||
if !urls.is_empty() {
|
||||
return uniq(urls);
|
||||
}
|
||||
}
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn uniq(mut v: Vec<String>) -> Vec<String> {
|
||||
v.sort();
|
||||
v.dedup();
|
||||
v
|
||||
}
|
||||
|
||||
fn parse_owner_repo(url: &str) -> Option<(String, String)> {
|
||||
// Normalize common prefixes and handle multiple SSH/HTTPS variants.
|
||||
let mut s = url.trim().to_string();
|
||||
// Drop protocol scheme for ssh URLs
|
||||
if let Some(rest) = s.strip_prefix("ssh://") {
|
||||
s = rest.to_string();
|
||||
}
|
||||
// Accept any user before @github.com (e.g., git@, org-123@)
|
||||
if let Some(idx) = s.find("@github.com:") {
|
||||
let rest = &s[idx + "@github.com:".len()..];
|
||||
let rest = rest.trim_start_matches('/').trim_end_matches(".git");
|
||||
let mut parts = rest.splitn(2, '/');
|
||||
let owner = parts.next()?.to_string();
|
||||
let repo = parts.next()?.to_string();
|
||||
crate::append_error_log(format!("env: parsed SSH GitHub origin => {owner}/{repo}"));
|
||||
return Some((owner, repo));
|
||||
}
|
||||
// HTTPS or git protocol
|
||||
for prefix in [
|
||||
"https://github.com/",
|
||||
"http://github.com/",
|
||||
"git://github.com/",
|
||||
"github.com/",
|
||||
] {
|
||||
if let Some(rest) = s.strip_prefix(prefix) {
|
||||
let rest = rest.trim_start_matches('/').trim_end_matches(".git");
|
||||
let mut parts = rest.splitn(2, '/');
|
||||
let owner = parts.next()?.to_string();
|
||||
let repo = parts.next()?.to_string();
|
||||
crate::append_error_log(format!("env: parsed HTTP GitHub origin => {owner}/{repo}"));
|
||||
return Some((owner, repo));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// List environments for the current repo(s) with a fallback to the global list.
|
||||
/// Returns a de-duplicated, sorted set suitable for the TUI modal.
|
||||
pub async fn list_environments(
|
||||
base_url: &str,
|
||||
headers: &HeaderMap,
|
||||
) -> anyhow::Result<Vec<crate::app::EnvironmentRow>> {
|
||||
let mut map: HashMap<String, crate::app::EnvironmentRow> = HashMap::new();
|
||||
|
||||
// 1) By-repo lookup for each parsed GitHub origin
|
||||
let origins = get_git_origins();
|
||||
for origin in &origins {
|
||||
if let Some((owner, repo)) = parse_owner_repo(origin) {
|
||||
let url = if base_url.contains("/backend-api") {
|
||||
format!(
|
||||
"{}/wham/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{}/api/codex/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
};
|
||||
match get_json::<Vec<CodeEnvironment>>(&url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: by-repo {}:{} -> {} envs", owner, repo, list.len());
|
||||
for env in list {
|
||||
let repo_hint = format!("{owner}/{repo}");
|
||||
merge_environment_row(&mut map, &env, Some(repo_hint.as_str()));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"env_tui: by-repo fetch failed for {}/{}: {}",
|
||||
owner, repo, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2) Fallback to the full list; on error return what we have if any.
|
||||
let list_url = if base_url.contains("/backend-api") {
|
||||
format!("{base_url}/wham/environments")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/environments")
|
||||
};
|
||||
match get_json::<Vec<CodeEnvironment>>(&list_url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: global list -> {} envs", list.len());
|
||||
for env in list {
|
||||
merge_environment_row(&mut map, &env, None);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
if map.is_empty() {
|
||||
return Err(e);
|
||||
} else {
|
||||
warn!(
|
||||
"env_tui: global list failed; using by-repo results only: {}",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut rows: Vec<crate::app::EnvironmentRow> = map.into_values().collect();
|
||||
rows.sort_by(|a, b| {
|
||||
// pinned first
|
||||
let p = b.is_pinned.cmp(&a.is_pinned);
|
||||
if p != std::cmp::Ordering::Equal {
|
||||
return p;
|
||||
}
|
||||
// then label (ci), then id
|
||||
let al = a.label.as_deref().unwrap_or("").to_lowercase();
|
||||
let bl = b.label.as_deref().unwrap_or("").to_lowercase();
|
||||
let l = al.cmp(&bl);
|
||||
if l != std::cmp::Ordering::Equal {
|
||||
return l;
|
||||
}
|
||||
a.id.cmp(&b.id)
|
||||
});
|
||||
Ok(rows)
|
||||
}
|
||||
1885
codex-rs/cloud-tasks/src/lib.rs
Normal file
1885
codex-rs/cloud-tasks/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
1298
codex-rs/cloud-tasks/src/new_task.rs
Normal file
1298
codex-rs/cloud-tasks/src/new_task.rs
Normal file
File diff suppressed because it is too large
Load Diff
176
codex-rs/cloud-tasks/src/scrollable_diff.rs
Normal file
176
codex-rs/cloud-tasks/src/scrollable_diff.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use unicode_width::UnicodeWidthChar;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
/// Scroll position and geometry for a vertical scroll view.
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct ScrollViewState {
|
||||
pub scroll: u16,
|
||||
pub viewport_h: u16,
|
||||
pub content_h: u16,
|
||||
}
|
||||
|
||||
impl ScrollViewState {
|
||||
pub fn clamp(&mut self) {
|
||||
let max_scroll = self.content_h.saturating_sub(self.viewport_h);
|
||||
if self.scroll > max_scroll {
|
||||
self.scroll = max_scroll;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A simple, local scrollable view for diffs or message text.
|
||||
///
|
||||
/// Owns raw lines, caches wrapped lines for a given width, and maintains
|
||||
/// a small scroll state that is clamped whenever geometry shrinks.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ScrollableDiff {
|
||||
raw: Vec<String>,
|
||||
wrapped: Vec<String>,
|
||||
wrapped_src_idx: Vec<usize>,
|
||||
wrap_cols: Option<u16>,
|
||||
pub state: ScrollViewState,
|
||||
}
|
||||
|
||||
impl ScrollableDiff {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Replace the raw content lines. Does not rewrap immediately; call `set_width` next.
|
||||
pub fn set_content(&mut self, lines: Vec<String>) {
|
||||
self.raw = lines;
|
||||
self.wrapped.clear();
|
||||
self.wrapped_src_idx.clear();
|
||||
self.state.content_h = 0;
|
||||
// Force rewrap on next set_width even if width is unchanged
|
||||
self.wrap_cols = None;
|
||||
}
|
||||
|
||||
/// Set the wrap width. If changed, rebuild wrapped lines and clamp scroll.
|
||||
pub fn set_width(&mut self, width: u16) {
|
||||
if self.wrap_cols == Some(width) {
|
||||
return;
|
||||
}
|
||||
self.wrap_cols = Some(width);
|
||||
self.rewrap(width);
|
||||
self.state.clamp();
|
||||
}
|
||||
|
||||
/// Update viewport height and clamp scroll if needed.
|
||||
pub fn set_viewport(&mut self, height: u16) {
|
||||
self.state.viewport_h = height;
|
||||
self.state.clamp();
|
||||
}
|
||||
|
||||
/// Return the cached wrapped lines. Call `set_width` first when area changes.
|
||||
pub fn wrapped_lines(&self) -> &[String] {
|
||||
&self.wrapped
|
||||
}
|
||||
|
||||
pub fn wrapped_src_indices(&self) -> &[usize] {
|
||||
&self.wrapped_src_idx
|
||||
}
|
||||
|
||||
pub fn raw_line_at(&self, idx: usize) -> &str {
|
||||
self.raw.get(idx).map(String::as_str).unwrap_or("")
|
||||
}
|
||||
|
||||
/// Scroll by a signed delta; clamps to content.
|
||||
pub fn scroll_by(&mut self, delta: i16) {
|
||||
let s = self.state.scroll as i32 + delta as i32;
|
||||
self.state.scroll = s.clamp(0, self.max_scroll() as i32) as u16;
|
||||
}
|
||||
|
||||
/// Page by a signed delta; typically viewport_h - 1.
|
||||
pub fn page_by(&mut self, delta: i16) {
|
||||
self.scroll_by(delta);
|
||||
}
|
||||
|
||||
pub fn to_top(&mut self) {
|
||||
self.state.scroll = 0;
|
||||
}
|
||||
|
||||
pub fn to_bottom(&mut self) {
|
||||
self.state.scroll = self.max_scroll();
|
||||
}
|
||||
|
||||
/// Optional percent scrolled; None when not enough geometry is known.
|
||||
pub fn percent_scrolled(&self) -> Option<u8> {
|
||||
if self.state.content_h == 0 || self.state.viewport_h == 0 {
|
||||
return None;
|
||||
}
|
||||
if self.state.content_h <= self.state.viewport_h {
|
||||
return None;
|
||||
}
|
||||
let visible_bottom = self.state.scroll.saturating_add(self.state.viewport_h) as f32;
|
||||
let pct = (visible_bottom / self.state.content_h as f32 * 100.0).round();
|
||||
Some(pct.clamp(0.0, 100.0) as u8)
|
||||
}
|
||||
|
||||
fn max_scroll(&self) -> u16 {
|
||||
self.state.content_h.saturating_sub(self.state.viewport_h)
|
||||
}
|
||||
|
||||
fn rewrap(&mut self, width: u16) {
|
||||
if width == 0 {
|
||||
self.wrapped = self.raw.clone();
|
||||
self.state.content_h = self.wrapped.len() as u16;
|
||||
return;
|
||||
}
|
||||
let max_cols = width as usize;
|
||||
let mut out: Vec<String> = Vec::new();
|
||||
let mut out_idx: Vec<usize> = Vec::new();
|
||||
for (raw_idx, raw) in self.raw.iter().enumerate() {
|
||||
// Normalize tabs for width accounting (MVP: 4 spaces).
|
||||
let raw = raw.replace('\t', " ");
|
||||
if raw.is_empty() {
|
||||
out.push(String::new());
|
||||
out_idx.push(raw_idx);
|
||||
continue;
|
||||
}
|
||||
let mut line = String::new();
|
||||
let mut line_cols = 0usize;
|
||||
let mut last_soft_idx: Option<usize> = None; // last whitespace or punctuation break
|
||||
for (_i, ch) in raw.char_indices() {
|
||||
if ch == '\n' {
|
||||
out.push(std::mem::take(&mut line));
|
||||
out_idx.push(raw_idx);
|
||||
line_cols = 0;
|
||||
last_soft_idx = None;
|
||||
continue;
|
||||
}
|
||||
let w = UnicodeWidthChar::width(ch).unwrap_or(0);
|
||||
if line_cols.saturating_add(w) > max_cols {
|
||||
if let Some(split) = last_soft_idx {
|
||||
let (prefix, rest) = line.split_at(split);
|
||||
out.push(prefix.trim_end().to_string());
|
||||
out_idx.push(raw_idx);
|
||||
line = rest.trim_start().to_string();
|
||||
last_soft_idx = None;
|
||||
// retry add current ch now that line may be shorter
|
||||
} else if !line.is_empty() {
|
||||
out.push(std::mem::take(&mut line));
|
||||
out_idx.push(raw_idx);
|
||||
}
|
||||
}
|
||||
if ch.is_whitespace()
|
||||
|| matches!(
|
||||
ch,
|
||||
',' | ';' | '.' | ':' | ')' | ']' | '}' | '|' | '/' | '?' | '!' | '-' | '_'
|
||||
)
|
||||
{
|
||||
last_soft_idx = Some(line.len());
|
||||
}
|
||||
line.push(ch);
|
||||
line_cols = UnicodeWidthStr::width(line.as_str());
|
||||
}
|
||||
if !line.is_empty() {
|
||||
out.push(line);
|
||||
out_idx.push(raw_idx);
|
||||
}
|
||||
}
|
||||
self.wrapped = out;
|
||||
self.wrapped_src_idx = out_idx;
|
||||
self.state.content_h = self.wrapped.len() as u16;
|
||||
}
|
||||
}
|
||||
1155
codex-rs/cloud-tasks/src/ui.rs
Normal file
1155
codex-rs/cloud-tasks/src/ui.rs
Normal file
File diff suppressed because it is too large
Load Diff
207
codex-rs/cloud-tasks/src/util.rs
Normal file
207
codex-rs/cloud-tasks/src/util.rs
Normal file
@@ -0,0 +1,207 @@
|
||||
use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
guard.replace(suffix.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_error_log(message: impl AsRef<str>) {
|
||||
let message = message.as_ref();
|
||||
let timestamp = Utc::now().to_rfc3339();
|
||||
|
||||
if let Some(path) = log_file_path()
|
||||
&& write_log_line(&path, ×tamp, message)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let fallback = Path::new("error.log");
|
||||
let _ = write_log_line(fallback, ×tamp, message);
|
||||
}
|
||||
|
||||
/// Normalize the configured base URL to a canonical form used by the backend client.
|
||||
/// - trims trailing '/'
|
||||
/// - appends '/backend-api' for ChatGPT hosts when missing
|
||||
pub fn normalize_base_url(input: &str) -> String {
|
||||
let mut base_url = input.to_string();
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
base_url
|
||||
}
|
||||
|
||||
fn log_file_path() -> Option<PathBuf> {
|
||||
let mut log_dir = codex_core::config::find_codex_home().ok()?;
|
||||
log_dir.push("log");
|
||||
std::fs::create_dir_all(&log_dir).ok()?;
|
||||
Some(log_dir.join("codex-cloud-tasks.log"))
|
||||
}
|
||||
|
||||
fn write_log_line(path: &Path, timestamp: &str, message: &str) -> bool {
|
||||
let mut opts = std::fs::OpenOptions::new();
|
||||
opts.create(true).append(true);
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
opts.mode(0o600);
|
||||
}
|
||||
|
||||
match opts.open(path) {
|
||||
Ok(mut file) => {
|
||||
use std::io::Write as _;
|
||||
writeln!(file, "[{timestamp}] {message}").is_ok()
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the ChatGPT account id from a JWT token, when present.
|
||||
pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
let mut parts = token.split('.');
|
||||
let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
|
||||
_ => return None,
|
||||
};
|
||||
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.decode(payload_b64)
|
||||
.ok()?;
|
||||
let v: serde_json::Value = serde_json::from_slice(&payload_bytes).ok()?;
|
||||
v.get("https://api.openai.com/auth")
|
||||
.and_then(|auth| auth.get("chatgpt_account_id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
pub fn switch_to_branch(branch: &str) -> Result<(), String> {
|
||||
let branch = branch.trim();
|
||||
if branch.is_empty() {
|
||||
return Err("default branch name is empty".to_string());
|
||||
}
|
||||
|
||||
if let Ok(current) = current_branch()
|
||||
&& current == branch
|
||||
{
|
||||
append_error_log(format!("git.switch: already on branch {branch}"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
append_error_log(format!("git.switch: switching to branch {branch}"));
|
||||
match ensure_success(&["checkout", branch]) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) => {
|
||||
append_error_log(format!("git.switch: checkout {branch} failed: {err}"));
|
||||
if ensure_success(&["rev-parse", "--verify", branch]).is_ok() {
|
||||
return Err(err);
|
||||
}
|
||||
if let Err(fetch_err) = ensure_success(&["fetch", "origin", branch]) {
|
||||
append_error_log(format!(
|
||||
"git.switch: fetch origin/{branch} failed: {fetch_err}"
|
||||
));
|
||||
return Err(err);
|
||||
}
|
||||
let tracking = format!("origin/{branch}");
|
||||
ensure_success(&["checkout", "-b", branch, &tracking]).map_err(|create_err| {
|
||||
append_error_log(format!(
|
||||
"git.switch: checkout -b {branch} {tracking} failed: {create_err}"
|
||||
));
|
||||
create_err
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn current_branch() -> Result<String, String> {
|
||||
let output = run_git(&["rev-parse", "--abbrev-ref", "HEAD"])?;
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"git rev-parse --abbrev-ref failed: {}",
|
||||
format_command_failure(output, &["rev-parse", "--abbrev-ref", "HEAD"])
|
||||
));
|
||||
}
|
||||
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||
}
|
||||
|
||||
fn ensure_success(args: &[&str]) -> Result<(), String> {
|
||||
let output = run_git(args)?;
|
||||
if output.status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(format_command_failure(output, args))
|
||||
}
|
||||
|
||||
fn run_git(args: &[&str]) -> Result<std::process::Output, String> {
|
||||
Command::new("git")
|
||||
.args(args)
|
||||
.output()
|
||||
.map_err(|e| format!("failed to launch git {}: {e}", join_args(args)))
|
||||
}
|
||||
|
||||
fn format_command_failure(output: std::process::Output, args: &[&str]) -> String {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
format!(
|
||||
"git {} exited with status {}. stdout: {} stderr: {}",
|
||||
join_args(args),
|
||||
output
|
||||
.status
|
||||
.code()
|
||||
.map(|c| c.to_string())
|
||||
.unwrap_or_else(|| "<signal>".to_string()),
|
||||
stdout.trim(),
|
||||
stderr.trim()
|
||||
)
|
||||
}
|
||||
|
||||
fn join_args(args: &[&str]) -> String {
|
||||
args.join(" ")
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use reqwest::header::USER_AGENT;
|
||||
|
||||
set_user_agent_suffix("codex_cloud_tasks_tui");
|
||||
let ua = codex_core::default_client::get_codex_user_agent();
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Ok(home) = codex_core::config::find_codex_home() {
|
||||
let am = codex_login::AuthManager::new(home);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
}
|
||||
headers
|
||||
}
|
||||
22
codex-rs/cloud-tasks/tests/env_filter.rs
Normal file
22
codex-rs/cloud-tasks/tests/env_filter.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use codex_cloud_tasks_client::CloudBackend;
|
||||
use codex_cloud_tasks_client::MockClient;
|
||||
|
||||
#[tokio::test]
|
||||
async fn mock_backend_varies_by_env() {
|
||||
let client = MockClient;
|
||||
|
||||
let root = CloudBackend::list_tasks(&client, None).await.unwrap();
|
||||
assert!(root.iter().any(|t| t.title.contains("Update README")));
|
||||
|
||||
let a = CloudBackend::list_tasks(&client, Some("env-A"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(a.len(), 1);
|
||||
assert_eq!(a[0].title, "A: First");
|
||||
|
||||
let b = CloudBackend::list_tasks(&client, Some("env-B"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(b.len(), 2);
|
||||
assert!(b[0].title.starts_with("B: "));
|
||||
}
|
||||
18
codex-rs/codex-backend-openapi-models/Cargo.toml
Normal file
18
codex-rs/codex-backend-openapi-models/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "codex-backend-openapi-models"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_backend_openapi_models"
|
||||
path = "src/lib.rs"
|
||||
|
||||
# Important: generated code often violates our workspace lints.
|
||||
# Allow unwrap/expect in this crate so the workspace builds cleanly
|
||||
# after models are regenerated.
|
||||
# Lint overrides are applied in src/lib.rs via crate attributes
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
uuid = { version = "1", features = ["serde"] }
|
||||
6
codex-rs/codex-backend-openapi-models/src/lib.rs
Normal file
6
codex-rs/codex-backend-openapi-models/src/lib.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
// Re-export generated OpenAPI models.
|
||||
// The regen script populates `src/models/*.rs` and writes `src/models/mod.rs`.
|
||||
// This module intentionally contains no hand-written types.
|
||||
pub mod models;
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct CodeTaskDetailsResponse {
|
||||
#[serde(rename = "task")]
|
||||
pub task: Box<models::TaskResponse>,
|
||||
#[serde(rename = "current_user_turn", skip_serializing_if = "Option::is_none")]
|
||||
pub current_user_turn: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(
|
||||
rename = "current_assistant_turn",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub current_assistant_turn: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(
|
||||
rename = "current_diff_task_turn",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub current_diff_task_turn: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
impl CodeTaskDetailsResponse {
|
||||
pub fn new(task: models::TaskResponse) -> CodeTaskDetailsResponse {
|
||||
CodeTaskDetailsResponse {
|
||||
task: Box::new(task),
|
||||
current_user_turn: None,
|
||||
current_assistant_turn: None,
|
||||
current_diff_task_turn: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ExternalPullRequestResponse {
|
||||
#[serde(rename = "id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "assistant_turn_id")]
|
||||
pub assistant_turn_id: String,
|
||||
#[serde(rename = "pull_request")]
|
||||
pub pull_request: Box<models::GitPullRequest>,
|
||||
#[serde(rename = "codex_updated_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub codex_updated_sha: Option<String>,
|
||||
}
|
||||
|
||||
impl ExternalPullRequestResponse {
|
||||
pub fn new(
|
||||
id: String,
|
||||
assistant_turn_id: String,
|
||||
pull_request: models::GitPullRequest,
|
||||
) -> ExternalPullRequestResponse {
|
||||
ExternalPullRequestResponse {
|
||||
id,
|
||||
assistant_turn_id,
|
||||
pull_request: Box::new(pull_request),
|
||||
codex_updated_sha: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct GitPullRequest {
|
||||
#[serde(rename = "number")]
|
||||
pub number: i32,
|
||||
#[serde(rename = "url")]
|
||||
pub url: String,
|
||||
#[serde(rename = "state")]
|
||||
pub state: String,
|
||||
#[serde(rename = "merged")]
|
||||
pub merged: bool,
|
||||
#[serde(rename = "mergeable")]
|
||||
pub mergeable: bool,
|
||||
#[serde(rename = "draft", skip_serializing_if = "Option::is_none")]
|
||||
pub draft: Option<bool>,
|
||||
#[serde(rename = "title", skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
#[serde(rename = "body", skip_serializing_if = "Option::is_none")]
|
||||
pub body: Option<String>,
|
||||
#[serde(rename = "base", skip_serializing_if = "Option::is_none")]
|
||||
pub base: Option<String>,
|
||||
#[serde(rename = "head", skip_serializing_if = "Option::is_none")]
|
||||
pub head: Option<String>,
|
||||
#[serde(rename = "base_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub base_sha: Option<String>,
|
||||
#[serde(rename = "head_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub head_sha: Option<String>,
|
||||
#[serde(rename = "merge_commit_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub merge_commit_sha: Option<String>,
|
||||
#[serde(rename = "comments", skip_serializing_if = "Option::is_none")]
|
||||
pub comments: Option<serde_json::Value>,
|
||||
#[serde(rename = "diff", skip_serializing_if = "Option::is_none")]
|
||||
pub diff: Option<serde_json::Value>,
|
||||
#[serde(rename = "user", skip_serializing_if = "Option::is_none")]
|
||||
pub user: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl GitPullRequest {
|
||||
pub fn new(
|
||||
number: i32,
|
||||
url: String,
|
||||
state: String,
|
||||
merged: bool,
|
||||
mergeable: bool,
|
||||
) -> GitPullRequest {
|
||||
GitPullRequest {
|
||||
number,
|
||||
url,
|
||||
state,
|
||||
merged,
|
||||
mergeable,
|
||||
draft: None,
|
||||
title: None,
|
||||
body: None,
|
||||
base: None,
|
||||
head: None,
|
||||
base_sha: None,
|
||||
head_sha: None,
|
||||
merge_commit_sha: None,
|
||||
comments: None,
|
||||
diff: None,
|
||||
user: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
22
codex-rs/codex-backend-openapi-models/src/models/mod.rs
Normal file
22
codex-rs/codex-backend-openapi-models/src/models/mod.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
// Curated minimal export list for current workspace usage.
|
||||
// NOTE: This file was previously auto-generated by the OpenAPI generator.
|
||||
// Currently export only the types referenced by the workspace
|
||||
// The process for this will change
|
||||
|
||||
pub mod code_task_details_response;
|
||||
pub use self::code_task_details_response::CodeTaskDetailsResponse;
|
||||
|
||||
pub mod task_response;
|
||||
pub use self::task_response::TaskResponse;
|
||||
|
||||
pub mod external_pull_request_response;
|
||||
pub use self::external_pull_request_response::ExternalPullRequestResponse;
|
||||
|
||||
pub mod git_pull_request;
|
||||
pub use self::git_pull_request::GitPullRequest;
|
||||
|
||||
pub mod task_list_item;
|
||||
pub use self::task_list_item::TaskListItem;
|
||||
|
||||
pub mod paginated_list_task_list_item_;
|
||||
pub use self::paginated_list_task_list_item_::PaginatedListTaskListItem;
|
||||
@@ -0,0 +1,30 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct PaginatedListTaskListItem {
|
||||
#[serde(rename = "items")]
|
||||
pub items: Vec<models::TaskListItem>,
|
||||
#[serde(rename = "cursor", skip_serializing_if = "Option::is_none")]
|
||||
pub cursor: Option<String>,
|
||||
}
|
||||
|
||||
impl PaginatedListTaskListItem {
|
||||
pub fn new(items: Vec<models::TaskListItem>) -> PaginatedListTaskListItem {
|
||||
PaginatedListTaskListItem {
|
||||
items,
|
||||
cursor: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct TaskListItem {
|
||||
#[serde(rename = "id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "title")]
|
||||
pub title: String,
|
||||
#[serde(
|
||||
rename = "has_generated_title",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub has_generated_title: Option<bool>,
|
||||
#[serde(rename = "updated_at", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_at: Option<f64>,
|
||||
#[serde(rename = "created_at", skip_serializing_if = "Option::is_none")]
|
||||
pub created_at: Option<f64>,
|
||||
#[serde(
|
||||
rename = "task_status_display",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub task_status_display: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(rename = "archived")]
|
||||
pub archived: bool,
|
||||
#[serde(rename = "has_unread_turn")]
|
||||
pub has_unread_turn: bool,
|
||||
#[serde(rename = "pull_requests", skip_serializing_if = "Option::is_none")]
|
||||
pub pull_requests: Option<Vec<models::ExternalPullRequestResponse>>,
|
||||
}
|
||||
|
||||
impl TaskListItem {
|
||||
pub fn new(
|
||||
id: String,
|
||||
title: String,
|
||||
has_generated_title: Option<bool>,
|
||||
archived: bool,
|
||||
has_unread_turn: bool,
|
||||
) -> TaskListItem {
|
||||
TaskListItem {
|
||||
id,
|
||||
title,
|
||||
has_generated_title,
|
||||
updated_at: None,
|
||||
created_at: None,
|
||||
task_status_display: None,
|
||||
archived,
|
||||
has_unread_turn,
|
||||
pull_requests: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct TaskResponse {
|
||||
#[serde(rename = "id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "created_at", skip_serializing_if = "Option::is_none")]
|
||||
pub created_at: Option<f64>,
|
||||
#[serde(rename = "title")]
|
||||
pub title: String,
|
||||
#[serde(
|
||||
rename = "has_generated_title",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub has_generated_title: Option<bool>,
|
||||
#[serde(rename = "current_turn_id", skip_serializing_if = "Option::is_none")]
|
||||
pub current_turn_id: Option<String>,
|
||||
#[serde(rename = "has_unread_turn", skip_serializing_if = "Option::is_none")]
|
||||
pub has_unread_turn: Option<bool>,
|
||||
#[serde(
|
||||
rename = "denormalized_metadata",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub denormalized_metadata: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(rename = "archived")]
|
||||
pub archived: bool,
|
||||
#[serde(rename = "external_pull_requests")]
|
||||
pub external_pull_requests: Vec<models::ExternalPullRequestResponse>,
|
||||
}
|
||||
|
||||
impl TaskResponse {
|
||||
pub fn new(
|
||||
id: String,
|
||||
title: String,
|
||||
archived: bool,
|
||||
external_pull_requests: Vec<models::ExternalPullRequestResponse>,
|
||||
) -> TaskResponse {
|
||||
TaskResponse {
|
||||
id,
|
||||
created_at: None,
|
||||
title,
|
||||
has_generated_title: None,
|
||||
current_turn_id: None,
|
||||
has_unread_turn: None,
|
||||
denormalized_metadata: None,
|
||||
archived,
|
||||
external_pull_requests,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,7 @@ const PRESETS: &[ModelPreset] = &[
|
||||
label: "gpt-5-codex medium",
|
||||
description: "",
|
||||
model: "gpt-5-codex",
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
effort: None,
|
||||
},
|
||||
ModelPreset {
|
||||
id: "gpt-5-codex-high",
|
||||
|
||||
@@ -559,6 +559,10 @@ fn parse_rate_limit_snapshot(headers: &HeaderMap) -> Option<RateLimitSnapshot> {
|
||||
"x-codex-secondary-reset-after-seconds",
|
||||
);
|
||||
|
||||
if primary.is_none() && secondary.is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(RateLimitSnapshot { primary, secondary })
|
||||
}
|
||||
|
||||
|
||||
@@ -68,7 +68,6 @@ use crate::exec_command::ExecSessionManager;
|
||||
use crate::exec_command::WRITE_STDIN_TOOL_NAME;
|
||||
use crate::exec_command::WriteStdinParams;
|
||||
use crate::exec_env::create_env;
|
||||
use crate::git_worktree::WorktreeHandle;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::mcp_tool_call::handle_mcp_tool_call;
|
||||
use crate::model_family::find_family_for_model;
|
||||
@@ -110,7 +109,6 @@ use crate::protocol::TokenCountEvent;
|
||||
use crate::protocol::TokenUsage;
|
||||
use crate::protocol::TurnDiffEvent;
|
||||
use crate::protocol::WebSearchBeginEvent;
|
||||
use crate::protocol::WorktreeRemovedEvent;
|
||||
use crate::rollout::RolloutRecorder;
|
||||
use crate::rollout::RolloutRecorderParams;
|
||||
use crate::safety::SafetyCheck;
|
||||
@@ -127,7 +125,6 @@ use crate::unified_exec::UnifiedExecSessionManager;
|
||||
use crate::user_instructions::UserInstructions;
|
||||
use crate::user_notification::UserNotification;
|
||||
use crate::util::backoff;
|
||||
use anyhow::Context;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
|
||||
use codex_protocol::custom_prompts::CustomPrompt;
|
||||
@@ -425,14 +422,6 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare the per-session working directory. When git worktrees are enabled
|
||||
// we create (or reuse) a linked checkout under `cwd/codex/<conversation>`.
|
||||
let (effective_cwd, worktree_handle_opt, worktree_path_opt, worktree_error_event) =
|
||||
maybe_initialize_worktree(&cwd, &conversation_id, config.enable_git_worktree).await;
|
||||
if let Some(event) = worktree_error_event {
|
||||
post_session_configured_error_events.push(event);
|
||||
}
|
||||
|
||||
// Now that the conversation id is final (may have been updated by resume),
|
||||
// construct the model client.
|
||||
let client = ModelClient::new(
|
||||
@@ -459,7 +448,7 @@ impl Session {
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
shell_environment_policy: config.shell_environment_policy.clone(),
|
||||
cwd: effective_cwd.clone(),
|
||||
cwd,
|
||||
is_review_mode: false,
|
||||
final_output_json_schema: None,
|
||||
};
|
||||
@@ -469,7 +458,6 @@ impl Session {
|
||||
unified_exec_manager: UnifiedExecSessionManager::default(),
|
||||
notifier: notify,
|
||||
rollout: Mutex::new(Some(rollout_recorder)),
|
||||
worktree: Mutex::new(worktree_handle_opt),
|
||||
codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(),
|
||||
user_shell: default_shell,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
@@ -500,7 +488,6 @@ impl Session {
|
||||
history_entry_count,
|
||||
initial_messages,
|
||||
rollout_path,
|
||||
worktree_path: worktree_path_opt,
|
||||
}),
|
||||
})
|
||||
.chain(post_session_configured_error_events.into_iter());
|
||||
@@ -745,33 +732,6 @@ impl Session {
|
||||
state.history_snapshot()
|
||||
}
|
||||
|
||||
async fn send_error<S: Into<String>>(&self, sub_id: &str, message: S) {
|
||||
let event = Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::Error(ErrorEvent {
|
||||
message: message.into(),
|
||||
}),
|
||||
};
|
||||
self.send_event(event).await;
|
||||
}
|
||||
|
||||
async fn remove_worktree(&self) -> anyhow::Result<Option<PathBuf>> {
|
||||
let handle_opt = {
|
||||
let mut guard = self.services.worktree.lock().await;
|
||||
guard.take()
|
||||
};
|
||||
if let Some(handle) = handle_opt {
|
||||
let path = handle.path().to_path_buf();
|
||||
handle
|
||||
.remove()
|
||||
.await
|
||||
.with_context(|| format!("failed to remove git worktree `{}`", path.display()))?;
|
||||
Ok(Some(path))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_token_usage_info(
|
||||
&self,
|
||||
sub_id: &str,
|
||||
@@ -1091,41 +1051,6 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
async fn maybe_initialize_worktree(
|
||||
base_cwd: &Path,
|
||||
conversation_id: &ConversationId,
|
||||
enable_git_worktree: bool,
|
||||
) -> (
|
||||
PathBuf,
|
||||
Option<WorktreeHandle>,
|
||||
Option<PathBuf>,
|
||||
Option<Event>,
|
||||
) {
|
||||
if !enable_git_worktree {
|
||||
return (base_cwd.to_path_buf(), None, None, None);
|
||||
}
|
||||
|
||||
match WorktreeHandle::create(base_cwd, conversation_id).await {
|
||||
Ok(handle) => {
|
||||
let path = handle.path().to_path_buf();
|
||||
(path.clone(), Some(handle), Some(path), None)
|
||||
}
|
||||
Err(e) => {
|
||||
let message = format!("Failed to create git worktree: {e:#}");
|
||||
error!("{message}");
|
||||
(
|
||||
base_cwd.to_path_buf(),
|
||||
None,
|
||||
None,
|
||||
Some(Event {
|
||||
id: INITIAL_SUBMIT_ID.to_owned(),
|
||||
msg: EventMsg::Error(ErrorEvent { message }),
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Session {
|
||||
fn drop(&mut self) {
|
||||
self.interrupt_task_sync();
|
||||
@@ -1502,24 +1427,6 @@ async fn submission_loop(
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Op::RemoveWorktree => match sess.remove_worktree().await {
|
||||
Ok(Some(path)) => {
|
||||
let event = Event {
|
||||
id: sub.id.clone(),
|
||||
msg: EventMsg::WorktreeRemoved(WorktreeRemovedEvent { path }),
|
||||
};
|
||||
sess.send_event(event).await;
|
||||
}
|
||||
Ok(None) => {
|
||||
sess.send_error(&sub.id, "No git worktree is active for this session")
|
||||
.await;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("failed to remove git worktree: {e:#}");
|
||||
sess.send_error(&sub.id, format!("Failed to remove git worktree: {e:#}"))
|
||||
.await;
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
// Ignore unknown ops; enum is non_exhaustive to allow extensions.
|
||||
}
|
||||
@@ -3509,7 +3416,6 @@ mod tests {
|
||||
unified_exec_manager: UnifiedExecSessionManager::default(),
|
||||
notifier: UserNotifier::default(),
|
||||
rollout: Mutex::new(None),
|
||||
worktree: Mutex::new(None),
|
||||
codex_linux_sandbox_exe: None,
|
||||
user_shell: shell::Shell::Unknown,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
@@ -3577,7 +3483,6 @@ mod tests {
|
||||
unified_exec_manager: UnifiedExecSessionManager::default(),
|
||||
notifier: UserNotifier::default(),
|
||||
rollout: Mutex::new(None),
|
||||
worktree: Mutex::new(None),
|
||||
codex_linux_sandbox_exe: None,
|
||||
user_shell: shell::Shell::Unknown,
|
||||
show_raw_agent_reasoning: config.show_raw_agent_reasoning,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use crate::config_profile::ConfigProfile;
|
||||
use crate::config_types::History;
|
||||
use crate::config_types::McpServerConfig;
|
||||
use crate::config_types::McpServerTransportConfig;
|
||||
use crate::config_types::Notifications;
|
||||
use crate::config_types::ReasoningSummaryFormat;
|
||||
use crate::config_types::SandboxWorkspaceWrite;
|
||||
@@ -191,8 +190,6 @@ pub struct Config {
|
||||
|
||||
/// Include the `view_image` tool that lets the agent attach a local image path to context.
|
||||
pub include_view_image_tool: bool,
|
||||
/// When true, sessions run inside a linked git worktree under `cwd/codex/<conversation>`.
|
||||
pub enable_git_worktree: bool,
|
||||
|
||||
/// The active profile name used to derive this `Config` (if any).
|
||||
pub active_profile: Option<String>,
|
||||
@@ -317,37 +314,27 @@ pub fn write_global_mcp_servers(
|
||||
for (name, config) in servers {
|
||||
let mut entry = TomlTable::new();
|
||||
entry.set_implicit(false);
|
||||
match &config.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
entry["command"] = toml_edit::value(command.clone());
|
||||
entry["command"] = toml_edit::value(config.command.clone());
|
||||
|
||||
if !args.is_empty() {
|
||||
let mut args_array = TomlArray::new();
|
||||
for arg in args {
|
||||
args_array.push(arg.clone());
|
||||
}
|
||||
entry["args"] = TomlItem::Value(args_array.into());
|
||||
}
|
||||
if !config.args.is_empty() {
|
||||
let mut args = TomlArray::new();
|
||||
for arg in &config.args {
|
||||
args.push(arg.clone());
|
||||
}
|
||||
entry["args"] = TomlItem::Value(args.into());
|
||||
}
|
||||
|
||||
if let Some(env) = env
|
||||
&& !env.is_empty()
|
||||
{
|
||||
let mut env_table = TomlTable::new();
|
||||
env_table.set_implicit(false);
|
||||
let mut pairs: Vec<_> = env.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
for (key, value) in pairs {
|
||||
env_table.insert(key, toml_edit::value(value.clone()));
|
||||
}
|
||||
entry["env"] = TomlItem::Table(env_table);
|
||||
}
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
entry["url"] = toml_edit::value(url.clone());
|
||||
if let Some(token) = bearer_token {
|
||||
entry["bearer_token"] = toml_edit::value(token.clone());
|
||||
}
|
||||
if let Some(env) = &config.env
|
||||
&& !env.is_empty()
|
||||
{
|
||||
let mut env_table = TomlTable::new();
|
||||
env_table.set_implicit(false);
|
||||
let mut pairs: Vec<_> = env.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
for (key, value) in pairs {
|
||||
env_table.insert(key, toml_edit::value(value.clone()));
|
||||
}
|
||||
entry["env"] = TomlItem::Table(env_table);
|
||||
}
|
||||
|
||||
if let Some(timeout) = config.startup_timeout_sec {
|
||||
@@ -428,7 +415,7 @@ fn set_project_trusted_inner(doc: &mut DocumentMut, project_path: &Path) -> anyh
|
||||
.get_mut(project_key.as_str())
|
||||
.and_then(|i| i.as_table_mut())
|
||||
else {
|
||||
return Err(anyhow::anyhow!("project table missing for {}", project_key));
|
||||
return Err(anyhow::anyhow!("project table missing for {project_key}"));
|
||||
};
|
||||
proj_tbl.set_implicit(false);
|
||||
proj_tbl["trust_level"] = toml_edit::value("trusted");
|
||||
@@ -691,9 +678,6 @@ pub struct ConfigToml {
|
||||
/// Defaults to `false`.
|
||||
pub show_raw_agent_reasoning: Option<bool>,
|
||||
|
||||
/// Enable per-session git worktree checkouts.
|
||||
pub enable_git_worktree: Option<bool>,
|
||||
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
/// Optional verbosity control for GPT-5 models (Responses API `text.verbosity`).
|
||||
@@ -864,7 +848,6 @@ pub struct ConfigOverrides {
|
||||
pub include_view_image_tool: Option<bool>,
|
||||
pub show_raw_agent_reasoning: Option<bool>,
|
||||
pub tools_web_search_request: Option<bool>,
|
||||
pub enable_git_worktree: Option<bool>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -893,7 +876,6 @@ impl Config {
|
||||
include_view_image_tool,
|
||||
show_raw_agent_reasoning,
|
||||
tools_web_search_request: override_tools_web_search_request,
|
||||
enable_git_worktree: override_enable_git_worktree,
|
||||
} = overrides;
|
||||
|
||||
let active_profile_name = config_profile_key
|
||||
@@ -967,10 +949,6 @@ impl Config {
|
||||
.or(cfg.tools.as_ref().and_then(|t| t.view_image))
|
||||
.unwrap_or(true);
|
||||
|
||||
let enable_git_worktree = override_enable_git_worktree
|
||||
.or(cfg.enable_git_worktree)
|
||||
.unwrap_or(false);
|
||||
|
||||
let model = model
|
||||
.or(config_profile.model)
|
||||
.or(cfg.model)
|
||||
@@ -1072,7 +1050,6 @@ impl Config {
|
||||
.unwrap_or(false),
|
||||
use_experimental_use_rmcp_client: cfg.experimental_use_rmcp_client.unwrap_or(false),
|
||||
include_view_image_tool,
|
||||
enable_git_worktree,
|
||||
active_profile: active_profile_name,
|
||||
disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false),
|
||||
tui_notifications: cfg
|
||||
@@ -1317,11 +1294,9 @@ exclude_slash_tmp = true
|
||||
servers.insert(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string()],
|
||||
env: None,
|
||||
},
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string()],
|
||||
env: None,
|
||||
startup_timeout_sec: Some(Duration::from_secs(3)),
|
||||
tool_timeout_sec: Some(Duration::from_secs(5)),
|
||||
},
|
||||
@@ -1332,14 +1307,8 @@ exclude_slash_tmp = true
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
assert_eq!(loaded.len(), 1);
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "echo");
|
||||
assert_eq!(args, &vec!["hello".to_string()]);
|
||||
assert!(env.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
assert_eq!(docs.command, "echo");
|
||||
assert_eq!(docs.args, vec!["hello".to_string()]);
|
||||
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_secs(3)));
|
||||
assert_eq!(docs.tool_timeout_sec, Some(Duration::from_secs(5)));
|
||||
|
||||
@@ -1373,134 +1342,6 @@ startup_timeout_ms = 2500
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_global_mcp_servers_serializes_env_sorted() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let servers = BTreeMap::from([(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: "docs-server".to_string(),
|
||||
args: vec!["--verbose".to_string()],
|
||||
env: Some(HashMap::from([
|
||||
("ZIG_VAR".to_string(), "3".to_string()),
|
||||
("ALPHA_VAR".to_string(), "1".to_string()),
|
||||
])),
|
||||
},
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
let serialized = std::fs::read_to_string(&config_path)?;
|
||||
assert_eq!(
|
||||
serialized,
|
||||
r#"[mcp_servers.docs]
|
||||
command = "docs-server"
|
||||
args = ["--verbose"]
|
||||
|
||||
[mcp_servers.docs.env]
|
||||
ALPHA_VAR = "1"
|
||||
ZIG_VAR = "3"
|
||||
"#
|
||||
);
|
||||
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "docs-server");
|
||||
assert_eq!(args, &vec!["--verbose".to_string()]);
|
||||
let env = env
|
||||
.as_ref()
|
||||
.expect("env should be preserved for stdio transport");
|
||||
assert_eq!(env.get("ALPHA_VAR"), Some(&"1".to_string()));
|
||||
assert_eq!(env.get("ZIG_VAR"), Some(&"3".to_string()));
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn write_global_mcp_servers_serializes_streamable_http() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut servers = BTreeMap::from([(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: Some("secret-token".to_string()),
|
||||
},
|
||||
startup_timeout_sec: Some(Duration::from_secs(2)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
)]);
|
||||
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let config_path = codex_home.path().join(CONFIG_TOML_FILE);
|
||||
let serialized = std::fs::read_to_string(&config_path)?;
|
||||
assert_eq!(
|
||||
serialized,
|
||||
r#"[mcp_servers.docs]
|
||||
url = "https://example.com/mcp"
|
||||
bearer_token = "secret-token"
|
||||
startup_timeout_sec = 2.0
|
||||
"#
|
||||
);
|
||||
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert_eq!(bearer_token.as_deref(), Some("secret-token"));
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
assert_eq!(docs.startup_timeout_sec, Some(Duration::from_secs(2)));
|
||||
|
||||
servers.insert(
|
||||
"docs".to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: None,
|
||||
},
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
);
|
||||
write_global_mcp_servers(codex_home.path(), &servers)?;
|
||||
|
||||
let serialized = std::fs::read_to_string(&config_path)?;
|
||||
assert_eq!(
|
||||
serialized,
|
||||
r#"[mcp_servers.docs]
|
||||
url = "https://example.com/mcp"
|
||||
"#
|
||||
);
|
||||
|
||||
let loaded = load_global_mcp_servers(codex_home.path())?;
|
||||
let docs = loaded.get("docs").expect("docs entry");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
assert_eq!(url, "https://example.com/mcp");
|
||||
assert!(bearer_token.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn persist_model_selection_updates_defaults() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -1818,7 +1659,6 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
enable_git_worktree: false,
|
||||
active_profile: Some("o3".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
@@ -1878,7 +1718,6 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
enable_git_worktree: false,
|
||||
active_profile: Some("gpt3".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
@@ -1953,7 +1792,6 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
enable_git_worktree: false,
|
||||
active_profile: Some("zdr".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
@@ -2014,7 +1852,6 @@ model_verbosity = "high"
|
||||
use_experimental_unified_exec_tool: false,
|
||||
use_experimental_use_rmcp_client: false,
|
||||
include_view_image_tool: true,
|
||||
enable_git_worktree: false,
|
||||
active_profile: Some("gpt5".to_string()),
|
||||
disable_paste_burst: false,
|
||||
tui_notifications: Default::default(),
|
||||
|
||||
@@ -3,20 +3,25 @@
|
||||
// Note this file should generally be restricted to simple struct/enum
|
||||
// definitions that do not contain business logic.
|
||||
|
||||
use serde::Deserializer;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
use wildmatch::WildMatchPattern;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
use serde::Serialize;
|
||||
use serde::de::Error as SerdeError;
|
||||
|
||||
#[derive(Serialize, Debug, Clone, PartialEq)]
|
||||
pub struct McpServerConfig {
|
||||
#[serde(flatten)]
|
||||
pub transport: McpServerTransportConfig,
|
||||
pub command: String,
|
||||
|
||||
#[serde(default)]
|
||||
pub args: Vec<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
|
||||
/// Startup timeout in seconds for initializing MCP server & initially listing tools.
|
||||
#[serde(
|
||||
@@ -38,15 +43,11 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
struct RawMcpServerConfig {
|
||||
command: Option<String>,
|
||||
command: String,
|
||||
#[serde(default)]
|
||||
args: Option<Vec<String>>,
|
||||
args: Vec<String>,
|
||||
#[serde(default)]
|
||||
env: Option<HashMap<String, String>>,
|
||||
|
||||
url: Option<String>,
|
||||
bearer_token: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
startup_timeout_sec: Option<f64>,
|
||||
#[serde(default)]
|
||||
@@ -66,81 +67,16 @@ impl<'de> Deserialize<'de> for McpServerConfig {
|
||||
(None, None) => None,
|
||||
};
|
||||
|
||||
fn throw_if_set<E, T>(transport: &str, field: &str, value: Option<&T>) -> Result<(), E>
|
||||
where
|
||||
E: SerdeError,
|
||||
{
|
||||
if value.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(E::custom(format!(
|
||||
"{field} is not supported for {transport}",
|
||||
)))
|
||||
}
|
||||
|
||||
let transport = match raw {
|
||||
RawMcpServerConfig {
|
||||
command: Some(command),
|
||||
args,
|
||||
env,
|
||||
url,
|
||||
bearer_token,
|
||||
..
|
||||
} => {
|
||||
throw_if_set("stdio", "url", url.as_ref())?;
|
||||
throw_if_set("stdio", "bearer_token", bearer_token.as_ref())?;
|
||||
McpServerTransportConfig::Stdio {
|
||||
command,
|
||||
args: args.unwrap_or_default(),
|
||||
env,
|
||||
}
|
||||
}
|
||||
RawMcpServerConfig {
|
||||
url: Some(url),
|
||||
bearer_token,
|
||||
command,
|
||||
args,
|
||||
env,
|
||||
..
|
||||
} => {
|
||||
throw_if_set("streamable_http", "command", command.as_ref())?;
|
||||
throw_if_set("streamable_http", "args", args.as_ref())?;
|
||||
throw_if_set("streamable_http", "env", env.as_ref())?;
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token }
|
||||
}
|
||||
_ => return Err(SerdeError::custom("invalid transport")),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
transport,
|
||||
command: raw.command,
|
||||
args: raw.args,
|
||||
env: raw.env,
|
||||
startup_timeout_sec,
|
||||
tool_timeout_sec: raw.tool_timeout_sec,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
||||
#[serde(untagged, deny_unknown_fields, rename_all = "snake_case")]
|
||||
pub enum McpServerTransportConfig {
|
||||
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#stdio
|
||||
Stdio {
|
||||
command: String,
|
||||
#[serde(default)]
|
||||
args: Vec<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
env: Option<HashMap<String, String>>,
|
||||
},
|
||||
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http
|
||||
StreamableHttp {
|
||||
url: String,
|
||||
/// A plain text bearer token to use for authentication.
|
||||
/// This bearer token will be included in the HTTP request header as an `Authorization: Bearer <token>` header.
|
||||
/// This should be used with caution because it lives on disk in clear text.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
bearer_token: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
mod option_duration_secs {
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
@@ -367,139 +303,3 @@ pub enum ReasoningSummaryFormat {
|
||||
None,
|
||||
Experimental,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec![],
|
||||
env: None
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config_with_args() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
args = ["hello", "world"]
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string(), "world".to_string()],
|
||||
env: None
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_stdio_command_server_config_with_arg_with_args_and_env() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
command = "echo"
|
||||
args = ["hello", "world"]
|
||||
env = { "FOO" = "BAR" }
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize command config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::Stdio {
|
||||
command: "echo".to_string(),
|
||||
args: vec!["hello".to_string(), "world".to_string()],
|
||||
env: Some(HashMap::from([("FOO".to_string(), "BAR".to_string())]))
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_streamable_http_server_config() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
url = "https://example.com/mcp"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize http config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: None
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_streamable_http_server_config_with_bearer_token() {
|
||||
let cfg: McpServerConfig = toml::from_str(
|
||||
r#"
|
||||
url = "https://example.com/mcp"
|
||||
bearer_token = "secret"
|
||||
"#,
|
||||
)
|
||||
.expect("should deserialize http config");
|
||||
|
||||
assert_eq!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp {
|
||||
url: "https://example.com/mcp".to_string(),
|
||||
bearer_token: Some("secret".to_string())
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_command_and_url() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
command = "echo"
|
||||
url = "https://example.com"
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject command+url");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_env_for_http_transport() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
url = "https://example.com"
|
||||
env = { "FOO" = "BAR" }
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject env for http transport");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_rejects_bearer_token_for_stdio_transport() {
|
||||
toml::from_str::<McpServerConfig>(
|
||||
r#"
|
||||
command = "echo"
|
||||
bearer_token = "secret"
|
||||
"#,
|
||||
)
|
||||
.expect_err("should reject bearer token for stdio transport");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,355 +0,0 @@
|
||||
use std::fs as std_fs;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use tokio::fs;
|
||||
use tokio::fs::OpenOptions;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::process::Command;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
/// Represents a linked git worktree managed by Codex.
|
||||
///
|
||||
/// The handle tracks whether Codex created the worktree for the current
|
||||
/// conversation. It leaves the checkout in place until [`remove`] is invoked.
|
||||
pub struct WorktreeHandle {
|
||||
repo_root: PathBuf,
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl WorktreeHandle {
|
||||
/// Create (or reuse) a worktree rooted at
|
||||
/// `<repo_root>/codex/worktree/<conversation_id>`.
|
||||
pub async fn create(repo_root: &Path, conversation_id: &ConversationId) -> Result<Self> {
|
||||
if !repo_root.exists() {
|
||||
return Err(anyhow!(
|
||||
"git worktree root `{}` does not exist",
|
||||
repo_root.display()
|
||||
));
|
||||
}
|
||||
|
||||
let repo_root = repo_root.to_path_buf();
|
||||
let codex_dir = repo_root.join("codex");
|
||||
let codex_worktree_dir = codex_dir.join("worktree");
|
||||
fs::create_dir_all(&codex_worktree_dir)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to create codex worktree directory at `{}`",
|
||||
codex_worktree_dir.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
let path = codex_worktree_dir.join(conversation_id.to_string());
|
||||
let is_registered = worktree_registered(&repo_root, &path).await?;
|
||||
|
||||
if is_registered {
|
||||
if path.exists() {
|
||||
if let Err(err) = ensure_codex_excluded(&repo_root).await {
|
||||
warn!("failed to add codex worktree path to git exclude: {err:#}");
|
||||
}
|
||||
info!(
|
||||
worktree = %path.display(),
|
||||
"reusing existing git worktree for conversation"
|
||||
);
|
||||
return Ok(Self { repo_root, path });
|
||||
}
|
||||
|
||||
warn!(
|
||||
worktree = %path.display(),
|
||||
"git worktree is registered but missing on disk; pruning stale entry"
|
||||
);
|
||||
run_git_command(&repo_root, ["worktree", "prune", "--expire", "now"])
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to prune git worktrees while recovering `{}`",
|
||||
path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
if worktree_registered(&repo_root, &path).await? {
|
||||
return Err(anyhow!(
|
||||
"git worktree `{}` is registered but missing on disk; run `git worktree prune --expire now` to remove the stale entry",
|
||||
path.display()
|
||||
));
|
||||
}
|
||||
|
||||
info!(
|
||||
worktree = %path.display(),
|
||||
"recreating git worktree for conversation after pruning stale registration"
|
||||
);
|
||||
}
|
||||
|
||||
if path.exists() {
|
||||
return Err(anyhow!(
|
||||
"git worktree path `{}` already exists but is not registered; remove it manually",
|
||||
path.display()
|
||||
));
|
||||
}
|
||||
|
||||
run_git_command(
|
||||
&repo_root,
|
||||
[
|
||||
"worktree",
|
||||
"add",
|
||||
"--detach",
|
||||
path.to_str().ok_or_else(|| {
|
||||
anyhow!(
|
||||
"failed to convert worktree path `{}` to UTF-8",
|
||||
path.display()
|
||||
)
|
||||
})?,
|
||||
"HEAD",
|
||||
],
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("failed to create git worktree at `{}`", path.display()))?;
|
||||
|
||||
if let Err(err) = ensure_codex_excluded(&repo_root).await {
|
||||
warn!("failed to add codex worktree path to git exclude: {err:#}");
|
||||
}
|
||||
|
||||
info!(
|
||||
worktree = %path.display(),
|
||||
"created git worktree for conversation"
|
||||
);
|
||||
|
||||
Ok(Self { repo_root, path })
|
||||
}
|
||||
|
||||
/// Absolute path to the worktree checkout on disk.
|
||||
pub fn path(&self) -> &Path {
|
||||
&self.path
|
||||
}
|
||||
|
||||
/// Remove the worktree and prune metadata from the repository.
|
||||
pub async fn remove(self) -> Result<()> {
|
||||
let path = self.path.clone();
|
||||
|
||||
// `git worktree remove` fails if refs are missing or the checkout is dirty.
|
||||
// Use --force to ensure best effort removal; the user explicitly requested it.
|
||||
run_git_command(
|
||||
&self.repo_root,
|
||||
[
|
||||
"worktree",
|
||||
"remove",
|
||||
"--force",
|
||||
path.to_str().ok_or_else(|| {
|
||||
anyhow!(
|
||||
"failed to convert worktree path `{}` to UTF-8",
|
||||
path.display()
|
||||
)
|
||||
})?,
|
||||
],
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("failed to remove git worktree `{}`", path.display()))?;
|
||||
|
||||
// Prune dangling metadata so repeated sessions do not accumulate entries.
|
||||
if let Err(err) =
|
||||
run_git_command(&self.repo_root, ["worktree", "prune", "--expire", "now"]).await
|
||||
{
|
||||
warn!("failed to prune git worktrees: {err:#}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn worktree_registered(repo_root: &Path, target: &Path) -> Result<bool> {
|
||||
let output = run_git_command(repo_root, ["worktree", "list", "--porcelain"]).await?;
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
|
||||
let target_canon = std_fs::canonicalize(target).unwrap_or_else(|_| target.to_path_buf());
|
||||
|
||||
for line in stdout.lines() {
|
||||
if let Some(path) = line.strip_prefix("worktree ") {
|
||||
let candidate = Path::new(path);
|
||||
let candidate_canon =
|
||||
std_fs::canonicalize(candidate).unwrap_or_else(|_| candidate.to_path_buf());
|
||||
if candidate_canon == target_canon {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
async fn run_git_command<'a>(
|
||||
repo_root: &Path,
|
||||
args: impl IntoIterator<Item = &'a str>,
|
||||
) -> Result<std::process::Output> {
|
||||
let mut cmd = Command::new("git");
|
||||
cmd.args(args);
|
||||
cmd.current_dir(repo_root);
|
||||
let output = cmd
|
||||
.output()
|
||||
.await
|
||||
.with_context(|| format!("failed to execute git command in `{}`", repo_root.display()))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||||
let status = output
|
||||
.status
|
||||
.code()
|
||||
.map(|c| c.to_string())
|
||||
.unwrap_or_else(|| "signal".to_string());
|
||||
return Err(anyhow!("git command exited with status {status}: {stderr}",));
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
async fn ensure_codex_excluded(repo_root: &Path) -> Result<()> {
|
||||
const PATTERN: &str = "/codex/";
|
||||
|
||||
let git_dir_out = run_git_command(repo_root, ["rev-parse", "--git-dir"]).await?;
|
||||
let git_dir_str = String::from_utf8(git_dir_out.stdout)?.trim().to_string();
|
||||
let git_dir_path = if Path::new(&git_dir_str).is_absolute() {
|
||||
PathBuf::from(&git_dir_str)
|
||||
} else {
|
||||
repo_root.join(&git_dir_str)
|
||||
};
|
||||
|
||||
let info_dir = git_dir_path.join("info");
|
||||
fs::create_dir_all(&info_dir).await?;
|
||||
let exclude_path = info_dir.join("exclude");
|
||||
|
||||
let existing_bytes = fs::read(&exclude_path).await.unwrap_or_default();
|
||||
let existing = String::from_utf8(existing_bytes).unwrap_or_default();
|
||||
if existing.lines().any(|line| line.trim() == PATTERN) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&exclude_path)
|
||||
.await?;
|
||||
|
||||
if !existing.is_empty() && !existing.ends_with('\n') {
|
||||
file.write_all(b"\n").await?;
|
||||
}
|
||||
file.write_all(PATTERN.as_bytes()).await?;
|
||||
file.write_all(b"\n").await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use tempfile::TempDir;
|
||||
|
||||
const GIT_ENV: [(&str, &str); 2] = [
|
||||
("GIT_CONFIG_GLOBAL", "/dev/null"),
|
||||
("GIT_CONFIG_NOSYSTEM", "1"),
|
||||
];
|
||||
|
||||
async fn init_repo() -> (TempDir, PathBuf) {
|
||||
let temp = TempDir::new().expect("tempdir");
|
||||
let repo_path = temp.path().join("repo");
|
||||
fs::create_dir_all(&repo_path)
|
||||
.await
|
||||
.expect("create repo dir");
|
||||
|
||||
run_git_with_env(&repo_path, ["init"], &GIT_ENV)
|
||||
.await
|
||||
.expect("git init");
|
||||
run_git_with_env(&repo_path, ["config", "user.name", "Test User"], &GIT_ENV)
|
||||
.await
|
||||
.expect("config user.name");
|
||||
run_git_with_env(
|
||||
&repo_path,
|
||||
["config", "user.email", "test@example.com"],
|
||||
&GIT_ENV,
|
||||
)
|
||||
.await
|
||||
.expect("config user.email");
|
||||
|
||||
fs::write(repo_path.join("README.md"), b"hello world")
|
||||
.await
|
||||
.expect("write file");
|
||||
run_git_with_env(&repo_path, ["add", "README.md"], &GIT_ENV)
|
||||
.await
|
||||
.expect("git add");
|
||||
run_git_with_env(&repo_path, ["commit", "-m", "init"], &GIT_ENV)
|
||||
.await
|
||||
.expect("git commit");
|
||||
|
||||
(temp, repo_path)
|
||||
}
|
||||
|
||||
async fn run_git_with_env<'a>(
|
||||
cwd: &Path,
|
||||
args: impl IntoIterator<Item = &'a str>,
|
||||
envs: &[(&str, &str)],
|
||||
) -> Result<()> {
|
||||
let mut cmd = Command::new("git");
|
||||
cmd.args(args);
|
||||
cmd.current_dir(cwd);
|
||||
for (key, value) in envs {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
let status = cmd.status().await.context("failed to spawn git command")?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!(
|
||||
"git command exited with status {status} (cwd: {})",
|
||||
cwd.display()
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn is_registered(repo_root: &Path, path: &Path) -> bool {
|
||||
worktree_registered(repo_root, path).await.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn creates_and_removes_worktree() {
|
||||
let (_temp, repo) = init_repo().await;
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
let handle = WorktreeHandle::create(&repo, &conversation_id)
|
||||
.await
|
||||
.expect("create worktree");
|
||||
let path = handle.path().to_path_buf();
|
||||
assert!(path.exists(), "worktree path should exist on disk");
|
||||
assert!(
|
||||
is_registered(&repo, &path).await,
|
||||
"worktree should be registered"
|
||||
);
|
||||
|
||||
handle.remove().await.expect("remove worktree");
|
||||
assert!(
|
||||
!is_registered(&repo, &path).await,
|
||||
"worktree should be removed from registration"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn reuses_existing_worktree() {
|
||||
let (_temp, repo) = init_repo().await;
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
let first = WorktreeHandle::create(&repo, &conversation_id)
|
||||
.await
|
||||
.expect("create worktree");
|
||||
let path = first.path().to_path_buf();
|
||||
drop(first);
|
||||
|
||||
let second = WorktreeHandle::create(&repo, &conversation_id)
|
||||
.await
|
||||
.expect("reuse worktree");
|
||||
assert_eq!(path, second.path());
|
||||
assert!(is_registered(&repo, second.path()).await);
|
||||
|
||||
second.remove().await.expect("remove worktree");
|
||||
}
|
||||
}
|
||||
89
codex-rs/core/src/internal_storage.rs
Normal file
89
codex-rs/core/src/internal_storage.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use anyhow::Context;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub(crate) const INTERNAL_STORAGE_FILE: &str = "internal_storage.json";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct InternalStorage {
|
||||
#[serde(skip)]
|
||||
storage_path: PathBuf,
|
||||
#[serde(default = "default_gpt_5_codex_model_prompt_seen")]
|
||||
pub gpt_5_codex_model_prompt_seen: bool,
|
||||
}
|
||||
|
||||
const fn default_gpt_5_codex_model_prompt_seen() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
impl Default for InternalStorage {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
storage_path: PathBuf::new(),
|
||||
gpt_5_codex_model_prompt_seen: default_gpt_5_codex_model_prompt_seen(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(jif) generalise all the file writers and build proper async channel inserters.
|
||||
impl InternalStorage {
|
||||
pub fn load(codex_home: &Path) -> Self {
|
||||
let storage_path = codex_home.join(INTERNAL_STORAGE_FILE);
|
||||
|
||||
match std::fs::read_to_string(&storage_path) {
|
||||
Ok(serialized) => match serde_json::from_str::<Self>(&serialized) {
|
||||
Ok(mut storage) => {
|
||||
storage.storage_path = storage_path;
|
||||
storage
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to parse internal storage: {error:?}");
|
||||
Self::empty(storage_path)
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
if error.kind() == ErrorKind::NotFound {
|
||||
tracing::debug!(
|
||||
"internal storage not found at {}; initializing defaults",
|
||||
storage_path.display()
|
||||
);
|
||||
} else {
|
||||
tracing::warn!("failed to read internal storage: {error:?}");
|
||||
}
|
||||
Self::empty(storage_path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(storage_path: PathBuf) -> Self {
|
||||
Self {
|
||||
storage_path,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn persist(&self) -> anyhow::Result<()> {
|
||||
let serialized = serde_json::to_string_pretty(self)?;
|
||||
|
||||
if let Some(parent) = self.storage_path.parent() {
|
||||
tokio::fs::create_dir_all(parent).await.with_context(|| {
|
||||
format!(
|
||||
"failed to create internal storage directory at {}",
|
||||
parent.display()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
tokio::fs::write(&self.storage_path, serialized)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to persist internal storage at {}",
|
||||
self.storage_path.display()
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,7 @@ mod exec_command;
|
||||
pub mod exec_env;
|
||||
mod flags;
|
||||
pub mod git_info;
|
||||
mod git_worktree;
|
||||
pub mod internal_storage;
|
||||
pub mod landlock;
|
||||
mod mcp_connection_manager;
|
||||
mod mcp_tool_call;
|
||||
|
||||
@@ -29,7 +29,6 @@ use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::config_types::McpServerConfig;
|
||||
use crate::config_types::McpServerTransportConfig;
|
||||
|
||||
/// Delimiter used to separate the server name from the tool name in a fully
|
||||
/// qualified tool name.
|
||||
@@ -108,7 +107,7 @@ impl McpClientAdapter {
|
||||
params: mcp_types::InitializeRequestParams,
|
||||
startup_timeout: Duration,
|
||||
) -> Result<Self> {
|
||||
info!(
|
||||
tracing::error!(
|
||||
"new_stdio_client use_rmcp_client: {use_rmcp_client} program: {program:?} args: {args:?} env: {env:?} params: {params:?} startup_timeout: {startup_timeout:?}"
|
||||
);
|
||||
if use_rmcp_client {
|
||||
@@ -122,17 +121,6 @@ impl McpClientAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
async fn new_streamable_http_client(
|
||||
url: String,
|
||||
bearer_token: Option<String>,
|
||||
params: mcp_types::InitializeRequestParams,
|
||||
startup_timeout: Duration,
|
||||
) -> Result<Self> {
|
||||
let client = Arc::new(RmcpClient::new_streamable_http_client(url, bearer_token)?);
|
||||
client.initialize(params, Some(startup_timeout)).await?;
|
||||
Ok(McpClientAdapter::Rmcp(client))
|
||||
}
|
||||
|
||||
async fn list_tools(
|
||||
&self,
|
||||
params: Option<mcp_types::ListToolsRequestParams>,
|
||||
@@ -188,6 +176,8 @@ impl McpConnectionManager {
|
||||
return Ok((Self::default(), ClientStartErrors::default()));
|
||||
}
|
||||
|
||||
tracing::error!("new mcp_servers: {mcp_servers:?} use_rmcp_client: {use_rmcp_client}");
|
||||
|
||||
// Launch all configured servers concurrently.
|
||||
let mut join_set = JoinSet::new();
|
||||
let mut errors = ClientStartErrors::new();
|
||||
@@ -196,31 +186,22 @@ impl McpConnectionManager {
|
||||
// Validate server name before spawning
|
||||
if !is_valid_mcp_server_name(&server_name) {
|
||||
let error = anyhow::anyhow!(
|
||||
"invalid server name '{}': must match pattern ^[a-zA-Z0-9_-]+$",
|
||||
server_name
|
||||
"invalid server name '{server_name}': must match pattern ^[a-zA-Z0-9_-]+$"
|
||||
);
|
||||
errors.insert(server_name, error);
|
||||
continue;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
cfg.transport,
|
||||
McpServerTransportConfig::StreamableHttp { .. }
|
||||
) && !use_rmcp_client
|
||||
{
|
||||
info!(
|
||||
"skipping MCP server `{}` configured with url because rmcp client is disabled",
|
||||
server_name
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let startup_timeout = cfg.startup_timeout_sec.unwrap_or(DEFAULT_STARTUP_TIMEOUT);
|
||||
let tool_timeout = cfg.tool_timeout_sec.unwrap_or(DEFAULT_TOOL_TIMEOUT);
|
||||
|
||||
let use_rmcp_client_flag = use_rmcp_client;
|
||||
join_set.spawn(async move {
|
||||
let McpServerConfig { transport, .. } = cfg;
|
||||
let McpServerConfig {
|
||||
command, args, env, ..
|
||||
} = cfg;
|
||||
let command_os: OsString = command.into();
|
||||
let args_os: Vec<OsString> = args.into_iter().map(Into::into).collect();
|
||||
let params = mcp_types::InitializeRequestParams {
|
||||
capabilities: ClientCapabilities {
|
||||
experimental: None,
|
||||
@@ -242,30 +223,15 @@ impl McpConnectionManager {
|
||||
protocol_version: mcp_types::MCP_SCHEMA_VERSION.to_owned(),
|
||||
};
|
||||
|
||||
let client = match transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
let command_os: OsString = command.into();
|
||||
let args_os: Vec<OsString> = args.into_iter().map(Into::into).collect();
|
||||
McpClientAdapter::new_stdio_client(
|
||||
use_rmcp_client_flag,
|
||||
command_os,
|
||||
args_os,
|
||||
env,
|
||||
params.clone(),
|
||||
startup_timeout,
|
||||
)
|
||||
.await
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
McpClientAdapter::new_streamable_http_client(
|
||||
url,
|
||||
bearer_token,
|
||||
params,
|
||||
startup_timeout,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
let client = McpClientAdapter::new_stdio_client(
|
||||
use_rmcp_client_flag,
|
||||
command_os,
|
||||
args_os,
|
||||
env,
|
||||
params,
|
||||
startup_timeout,
|
||||
)
|
||||
.await
|
||||
.map(|c| (c, startup_timeout));
|
||||
|
||||
((server_name, tool_timeout), client)
|
||||
|
||||
@@ -70,7 +70,6 @@ pub(crate) fn should_persist_event_msg(ev: &EventMsg) -> bool {
|
||||
| EventMsg::ListCustomPromptsResponse(_)
|
||||
| EventMsg::PlanUpdate(_)
|
||||
| EventMsg::ShutdownComplete
|
||||
| EventMsg::ConversationPath(_)
|
||||
| EventMsg::WorktreeRemoved(_) => false,
|
||||
| EventMsg::ConversationPath(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,15 +89,8 @@ pub fn assess_command_safety(
|
||||
) -> SafetyCheck {
|
||||
// Some commands look dangerous. Even if they are run inside a sandbox,
|
||||
// unless the user has explicitly approved them, we should ask,
|
||||
// or reject if the approval_policy tells us not to ask.
|
||||
// regardless of the approval policy and sandbox policy.
|
||||
if command_might_be_dangerous(command) && !approved.contains(command) {
|
||||
if approval_policy == AskForApproval::Never {
|
||||
return SafetyCheck::Reject {
|
||||
reason: "dangerous command detected; rejected by user approval settings"
|
||||
.to_string(),
|
||||
};
|
||||
}
|
||||
|
||||
return SafetyCheck::AskUser;
|
||||
}
|
||||
|
||||
@@ -383,13 +376,7 @@ mod tests {
|
||||
request_escalated_privileges,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
safety_check,
|
||||
SafetyCheck::Reject {
|
||||
reason: "dangerous command detected; rejected by user approval settings"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(safety_check, SafetyCheck::AskUser);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use crate::RolloutRecorder;
|
||||
use crate::exec_command::ExecSessionManager;
|
||||
use crate::git_worktree::WorktreeHandle;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::unified_exec::UnifiedExecSessionManager;
|
||||
use crate::user_notification::UserNotifier;
|
||||
@@ -13,7 +12,6 @@ pub(crate) struct SessionServices {
|
||||
pub(crate) unified_exec_manager: UnifiedExecSessionManager,
|
||||
pub(crate) notifier: UserNotifier,
|
||||
pub(crate) rollout: Mutex<Option<RolloutRecorder>>,
|
||||
pub(crate) worktree: Mutex<Option<WorktreeHandle>>,
|
||||
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub(crate) user_shell: crate::shell::Shell,
|
||||
pub(crate) show_raw_agent_reasoning: bool,
|
||||
|
||||
@@ -361,7 +361,6 @@ async fn includes_conversation_id_and_model_headers_in_request() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn includes_base_instructions_override_in_request() {
|
||||
skip_if_no_network!();
|
||||
// Mock server
|
||||
let server = MockServer::start().await;
|
||||
|
||||
@@ -559,7 +558,6 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn includes_user_instructions_message_in_request() {
|
||||
skip_if_no_network!();
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let first = ResponseTemplate::new(200)
|
||||
@@ -757,7 +755,6 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn token_count_includes_rate_limits_snapshot() {
|
||||
skip_if_no_network!();
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let sse_body = responses::sse(vec![responses::ev_completed_with_tokens("resp_rate", 123)]);
|
||||
@@ -902,7 +899,6 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn usage_limit_error_emits_rate_limit_event() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let response = ResponseTemplate::new(429)
|
||||
@@ -982,7 +978,6 @@ async fn usage_limit_error_emits_rate_limit_event() -> anyhow::Result<()> {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
skip_if_no_network!();
|
||||
let existing_env_var_with_random_value = if cfg!(windows) { "USERNAME" } else { "USER" };
|
||||
|
||||
// Mock server
|
||||
@@ -1059,7 +1054,6 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn env_var_overrides_loaded_auth() {
|
||||
skip_if_no_network!();
|
||||
let existing_env_var_with_random_value = if cfg!(windows) { "USERNAME" } else { "USER" };
|
||||
|
||||
// Mock server
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::net::TcpListener;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InputItem;
|
||||
@@ -19,15 +16,10 @@ use core_test_support::wait_for_event;
|
||||
use core_test_support::wait_for_event_with_timeout;
|
||||
use escargot::CargoBuild;
|
||||
use serde_json::Value;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::process::Child;
|
||||
use tokio::process::Command;
|
||||
use tokio::time::Instant;
|
||||
use tokio::time::sleep;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
||||
async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn rmcp_tool_call_round_trip() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
@@ -62,7 +54,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
let expected_env_value = "propagated-env";
|
||||
let rmcp_test_server_bin = CargoBuild::new()
|
||||
.package("codex-rmcp-client")
|
||||
.bin("test_stdio_server")
|
||||
.bin("rmcp_test_server")
|
||||
.run()?
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
@@ -74,14 +66,12 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: rmcp_test_server_bin.clone(),
|
||||
args: Vec::new(),
|
||||
env: Some(HashMap::from([(
|
||||
"MCP_TEST_VALUE".to_string(),
|
||||
expected_env_value.to_string(),
|
||||
)])),
|
||||
},
|
||||
command: rmcp_test_server_bin.clone(),
|
||||
args: Vec::new(),
|
||||
env: Some(HashMap::from([(
|
||||
"MCP_TEST_VALUE".to_string(),
|
||||
expected_env_value.to_string(),
|
||||
)])),
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
@@ -107,13 +97,18 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
})
|
||||
.await?;
|
||||
|
||||
eprintln!("waiting for mcp tool call begin event");
|
||||
let begin_event = wait_for_event_with_timeout(
|
||||
&fixture.codex,
|
||||
|ev| matches!(ev, EventMsg::McpToolCallBegin(_)),
|
||||
|ev| {
|
||||
eprintln!("ev: {ev:?}");
|
||||
matches!(ev, EventMsg::McpToolCallBegin(_))
|
||||
},
|
||||
Duration::from_secs(10),
|
||||
)
|
||||
.await;
|
||||
|
||||
eprintln!("mcp tool call begin event: {begin_event:?}");
|
||||
let EventMsg::McpToolCallBegin(begin) = begin_event else {
|
||||
unreachable!("event guard guarantees McpToolCallBegin");
|
||||
};
|
||||
@@ -124,6 +119,7 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
matches!(ev, EventMsg::McpToolCallEnd(_))
|
||||
})
|
||||
.await;
|
||||
eprintln!("end_event: {end_event:?}");
|
||||
let EventMsg::McpToolCallEnd(end) = end_event else {
|
||||
unreachable!("event guard guarantees McpToolCallEnd");
|
||||
};
|
||||
@@ -149,223 +145,18 @@ async fn stdio_server_round_trip() -> anyhow::Result<()> {
|
||||
.get("echo")
|
||||
.and_then(Value::as_str)
|
||||
.expect("echo payload present");
|
||||
assert_eq!(echo_value, "ECHOING: ping");
|
||||
assert_eq!(echo_value, "ping");
|
||||
let env_value = map
|
||||
.get("env")
|
||||
.and_then(Value::as_str)
|
||||
.expect("env snapshot inserted");
|
||||
assert_eq!(env_value, expected_env_value);
|
||||
|
||||
wait_for_event(&fixture.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
let task_complete_event =
|
||||
wait_for_event(&fixture.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
eprintln!("task_complete_event: {task_complete_event:?}");
|
||||
|
||||
server.verify().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
||||
async fn streamable_http_tool_call_round_trip() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
|
||||
let call_id = "call-456";
|
||||
let server_name = "rmcp_http";
|
||||
let tool_name = format!("{server_name}__echo");
|
||||
|
||||
mount_sse_once(
|
||||
&server,
|
||||
any(),
|
||||
responses::sse(vec![
|
||||
serde_json::json!({
|
||||
"type": "response.created",
|
||||
"response": {"id": "resp-1"}
|
||||
}),
|
||||
responses::ev_function_call(call_id, &tool_name, "{\"message\":\"ping\"}"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
mount_sse_once(
|
||||
&server,
|
||||
any(),
|
||||
responses::sse(vec![
|
||||
responses::ev_assistant_message(
|
||||
"msg-1",
|
||||
"rmcp streamable http echo tool completed successfully.",
|
||||
),
|
||||
responses::ev_completed("resp-2"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
|
||||
let expected_env_value = "propagated-env-http";
|
||||
let rmcp_http_server_bin = CargoBuild::new()
|
||||
.package("codex-rmcp-client")
|
||||
.bin("test_streamable_http_server")
|
||||
.run()?
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
|
||||
let listener = TcpListener::bind("127.0.0.1:0")?;
|
||||
let port = listener.local_addr()?.port();
|
||||
drop(listener);
|
||||
let bind_addr = format!("127.0.0.1:{port}");
|
||||
let server_url = format!("http://{bind_addr}/mcp");
|
||||
|
||||
let mut http_server_child = Command::new(&rmcp_http_server_bin)
|
||||
.kill_on_drop(true)
|
||||
.env("MCP_STREAMABLE_HTTP_BIND_ADDR", &bind_addr)
|
||||
.env("MCP_TEST_VALUE", expected_env_value)
|
||||
.spawn()?;
|
||||
|
||||
wait_for_streamable_http_server(&mut http_server_child, &bind_addr, Duration::from_secs(5))
|
||||
.await?;
|
||||
|
||||
let fixture = test_codex()
|
||||
.with_config(move |config| {
|
||||
config.use_experimental_use_rmcp_client = true;
|
||||
config.mcp_servers.insert(
|
||||
server_name.to_string(),
|
||||
McpServerConfig {
|
||||
transport: McpServerTransportConfig::StreamableHttp {
|
||||
url: server_url,
|
||||
bearer_token: None,
|
||||
},
|
||||
startup_timeout_sec: Some(Duration::from_secs(10)),
|
||||
tool_timeout_sec: None,
|
||||
},
|
||||
);
|
||||
})
|
||||
.build(&server)
|
||||
.await?;
|
||||
let session_model = fixture.session_configured.model.clone();
|
||||
|
||||
fixture
|
||||
.codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![InputItem::Text {
|
||||
text: "call the rmcp streamable http echo tool".into(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: fixture.cwd.path().to_path_buf(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let begin_event = wait_for_event_with_timeout(
|
||||
&fixture.codex,
|
||||
|ev| matches!(ev, EventMsg::McpToolCallBegin(_)),
|
||||
Duration::from_secs(10),
|
||||
)
|
||||
.await;
|
||||
|
||||
let EventMsg::McpToolCallBegin(begin) = begin_event else {
|
||||
unreachable!("event guard guarantees McpToolCallBegin");
|
||||
};
|
||||
assert_eq!(begin.invocation.server, server_name);
|
||||
assert_eq!(begin.invocation.tool, "echo");
|
||||
|
||||
let end_event = wait_for_event(&fixture.codex, |ev| {
|
||||
matches!(ev, EventMsg::McpToolCallEnd(_))
|
||||
})
|
||||
.await;
|
||||
let EventMsg::McpToolCallEnd(end) = end_event else {
|
||||
unreachable!("event guard guarantees McpToolCallEnd");
|
||||
};
|
||||
|
||||
let result = end
|
||||
.result
|
||||
.as_ref()
|
||||
.expect("rmcp echo tool should return success");
|
||||
assert_eq!(result.is_error, Some(false));
|
||||
assert!(
|
||||
result.content.is_empty(),
|
||||
"content should default to an empty array"
|
||||
);
|
||||
|
||||
let structured = result
|
||||
.structured_content
|
||||
.as_ref()
|
||||
.expect("structured content");
|
||||
let Value::Object(map) = structured else {
|
||||
panic!("structured content should be an object: {structured:?}");
|
||||
};
|
||||
let echo_value = map
|
||||
.get("echo")
|
||||
.and_then(Value::as_str)
|
||||
.expect("echo payload present");
|
||||
assert_eq!(echo_value, "ECHOING: ping");
|
||||
let env_value = map
|
||||
.get("env")
|
||||
.and_then(Value::as_str)
|
||||
.expect("env snapshot inserted");
|
||||
assert_eq!(env_value, expected_env_value);
|
||||
|
||||
wait_for_event(&fixture.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
server.verify().await;
|
||||
|
||||
match http_server_child.try_wait() {
|
||||
Ok(Some(_)) => {}
|
||||
Ok(None) => {
|
||||
let _ = http_server_child.kill().await;
|
||||
}
|
||||
Err(error) => {
|
||||
eprintln!("failed to check streamable http server status: {error}");
|
||||
let _ = http_server_child.kill().await;
|
||||
}
|
||||
}
|
||||
if let Err(error) = http_server_child.wait().await {
|
||||
eprintln!("failed to await streamable http server shutdown: {error}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn wait_for_streamable_http_server(
|
||||
server_child: &mut Child,
|
||||
address: &str,
|
||||
timeout: Duration,
|
||||
) -> anyhow::Result<()> {
|
||||
let deadline = Instant::now() + timeout;
|
||||
|
||||
loop {
|
||||
if let Some(status) = server_child.try_wait()? {
|
||||
return Err(anyhow::anyhow!(
|
||||
"streamable HTTP server exited early with status {status}"
|
||||
));
|
||||
}
|
||||
|
||||
let remaining = deadline.saturating_duration_since(Instant::now());
|
||||
|
||||
if remaining.is_zero() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"timed out waiting for streamable HTTP server at {address}: deadline reached"
|
||||
));
|
||||
}
|
||||
|
||||
match tokio::time::timeout(remaining, TcpStream::connect(address)).await {
|
||||
Ok(Ok(_)) => return Ok(()),
|
||||
Ok(Err(error)) => {
|
||||
if Instant::now() >= deadline {
|
||||
return Err(anyhow::anyhow!(
|
||||
"timed out waiting for streamable HTTP server at {address}: {error}"
|
||||
));
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(anyhow::anyhow!(
|
||||
"timed out waiting for streamable HTTP server at {address}: connect call timed out"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
sleep(Duration::from_millis(50)).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ use codex_core::protocol::TurnAbortReason;
|
||||
use codex_core::protocol::TurnDiffEvent;
|
||||
use codex_core::protocol::WebSearchBeginEvent;
|
||||
use codex_core::protocol::WebSearchEndEvent;
|
||||
use codex_core::protocol::WorktreeRemovedEvent;
|
||||
use codex_protocol::num_format::format_with_separators;
|
||||
use owo_colors::OwoColorize;
|
||||
use owo_colors::Style;
|
||||
@@ -200,14 +199,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
);
|
||||
}
|
||||
}
|
||||
EventMsg::WorktreeRemoved(WorktreeRemovedEvent { path }) => {
|
||||
ts_println!(
|
||||
self,
|
||||
"{} {}",
|
||||
"git worktree removed:".style(self.cyan),
|
||||
path.display()
|
||||
);
|
||||
}
|
||||
EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }) => {
|
||||
if !self.answer_started {
|
||||
ts_println!(self, "{}\n", "codex".style(self.italic).style(self.magenta));
|
||||
@@ -534,7 +525,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
history_entry_count: _,
|
||||
initial_messages: _,
|
||||
rollout_path: _,
|
||||
worktree_path,
|
||||
} = session_configured_event;
|
||||
|
||||
ts_println!(
|
||||
@@ -545,9 +535,6 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
);
|
||||
|
||||
ts_println!(self, "model: {}", model);
|
||||
if let Some(path) = worktree_path {
|
||||
ts_println!(self, "git worktree: {}", path.display());
|
||||
}
|
||||
println!();
|
||||
}
|
||||
EventMsg::PlanUpdate(plan_update_event) => {
|
||||
|
||||
@@ -8,10 +8,6 @@ use ts_rs::TS;
|
||||
pub enum ConversationEvent {
|
||||
#[serde(rename = "session.created")]
|
||||
SessionCreated(SessionCreatedEvent),
|
||||
#[serde(rename = "turn.started")]
|
||||
TurnStarted(TurnStartedEvent),
|
||||
#[serde(rename = "turn.completed")]
|
||||
TurnCompleted(TurnCompletedEvent),
|
||||
#[serde(rename = "item.started")]
|
||||
ItemStarted(ItemStartedEvent),
|
||||
#[serde(rename = "item.updated")]
|
||||
@@ -27,22 +23,6 @@ pub struct SessionCreatedEvent {
|
||||
pub session_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS, Default)]
|
||||
pub struct TurnStartedEvent {}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct TurnCompletedEvent {
|
||||
pub usage: Usage,
|
||||
}
|
||||
|
||||
/// Minimal usage summary for a turn.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS, Default)]
|
||||
pub struct Usage {
|
||||
pub input_tokens: u64,
|
||||
pub cached_input_tokens: u64,
|
||||
pub output_tokens: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)]
|
||||
pub struct ItemStartedEvent {
|
||||
pub item: ConversationItem,
|
||||
|
||||
@@ -23,9 +23,6 @@ use crate::exec_events::ReasoningItem;
|
||||
use crate::exec_events::SessionCreatedEvent;
|
||||
use crate::exec_events::TodoItem;
|
||||
use crate::exec_events::TodoListItem;
|
||||
use crate::exec_events::TurnCompletedEvent;
|
||||
use crate::exec_events::TurnStartedEvent;
|
||||
use crate::exec_events::Usage;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::plan_tool::StepStatus;
|
||||
use codex_core::plan_tool::UpdatePlanArgs;
|
||||
@@ -40,7 +37,6 @@ use codex_core::protocol::PatchApplyBeginEvent;
|
||||
use codex_core::protocol::PatchApplyEndEvent;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::TaskCompleteEvent;
|
||||
use codex_core::protocol::TaskStartedEvent;
|
||||
use tracing::error;
|
||||
use tracing::warn;
|
||||
|
||||
@@ -52,7 +48,6 @@ pub struct ExperimentalEventProcessorWithJsonOutput {
|
||||
running_patch_applies: HashMap<String, PatchApplyBeginEvent>,
|
||||
// Tracks the todo list for the current turn (at most one per turn).
|
||||
running_todo_list: Option<RunningTodoList>,
|
||||
last_total_token_usage: Option<codex_core::protocol::TokenUsage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -75,7 +70,6 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
running_commands: HashMap::new(),
|
||||
running_patch_applies: HashMap::new(),
|
||||
running_todo_list: None,
|
||||
last_total_token_usage: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,14 +82,6 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
EventMsg::ExecCommandEnd(ev) => self.handle_exec_command_end(ev),
|
||||
EventMsg::PatchApplyBegin(ev) => self.handle_patch_apply_begin(ev),
|
||||
EventMsg::PatchApplyEnd(ev) => self.handle_patch_apply_end(ev),
|
||||
EventMsg::TokenCount(ev) => {
|
||||
if let Some(info) = &ev.info {
|
||||
self.last_total_token_usage = Some(info.total_token_usage.clone());
|
||||
}
|
||||
Vec::new()
|
||||
}
|
||||
EventMsg::TaskStarted(ev) => self.handle_task_started(ev),
|
||||
EventMsg::TaskComplete(_) => self.handle_task_complete(),
|
||||
EventMsg::Error(ev) => vec![ConversationEvent::Error(ConversationErrorEvent {
|
||||
message: ev.message.clone(),
|
||||
})],
|
||||
@@ -103,6 +89,7 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
message: ev.message.clone(),
|
||||
})],
|
||||
EventMsg::PlanUpdate(ev) => self.handle_plan_update(ev),
|
||||
EventMsg::TaskComplete(_) => self.handle_task_complete(),
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
@@ -296,23 +283,7 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
vec![ConversationEvent::ItemStarted(ItemStartedEvent { item })]
|
||||
}
|
||||
|
||||
fn handle_task_started(&self, _: &TaskStartedEvent) -> Vec<ConversationEvent> {
|
||||
vec![ConversationEvent::TurnStarted(TurnStartedEvent {})]
|
||||
}
|
||||
|
||||
fn handle_task_complete(&mut self) -> Vec<ConversationEvent> {
|
||||
let usage = if let Some(u) = &self.last_total_token_usage {
|
||||
Usage {
|
||||
input_tokens: u.input_tokens,
|
||||
cached_input_tokens: u.cached_input_tokens,
|
||||
output_tokens: u.output_tokens,
|
||||
}
|
||||
} else {
|
||||
Usage::default()
|
||||
};
|
||||
|
||||
let mut items = Vec::new();
|
||||
|
||||
if let Some(running) = self.running_todo_list.take() {
|
||||
let item = ConversationItem {
|
||||
id: running.item_id,
|
||||
@@ -320,16 +291,11 @@ impl ExperimentalEventProcessorWithJsonOutput {
|
||||
items: running.items,
|
||||
}),
|
||||
};
|
||||
items.push(ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
return vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item,
|
||||
}));
|
||||
})];
|
||||
}
|
||||
|
||||
items.push(ConversationEvent::TurnCompleted(TurnCompletedEvent {
|
||||
usage,
|
||||
}));
|
||||
|
||||
items
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -171,7 +171,6 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
include_view_image_tool: None,
|
||||
show_raw_agent_reasoning: oss.then_some(true),
|
||||
tools_web_search_request: None,
|
||||
enable_git_worktree: None,
|
||||
};
|
||||
// Parse `-c` overrides.
|
||||
let cli_kv_overrides = match config_overrides.parse_overrides() {
|
||||
@@ -332,13 +331,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
info!("Sent prompt with event ID: {initial_prompt_task_id}");
|
||||
|
||||
// Run the loop until the task is complete.
|
||||
// Track whether a fatal error was reported by the server so we can
|
||||
// exit with a non-zero status for automation-friendly signaling.
|
||||
let mut error_seen = false;
|
||||
while let Some(event) = rx.recv().await {
|
||||
if matches!(event.msg, EventMsg::Error(_)) {
|
||||
error_seen = true;
|
||||
}
|
||||
let shutdown: CodexStatus = event_processor.process_event(event);
|
||||
match shutdown {
|
||||
CodexStatus::Running => continue,
|
||||
@@ -350,9 +343,6 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
}
|
||||
}
|
||||
}
|
||||
if error_seen {
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -24,9 +24,6 @@ use codex_exec::exec_events::ReasoningItem;
|
||||
use codex_exec::exec_events::SessionCreatedEvent;
|
||||
use codex_exec::exec_events::TodoItem as ExecTodoItem;
|
||||
use codex_exec::exec_events::TodoListItem as ExecTodoListItem;
|
||||
use codex_exec::exec_events::TurnCompletedEvent;
|
||||
use codex_exec::exec_events::TurnStartedEvent;
|
||||
use codex_exec::exec_events::Usage;
|
||||
use codex_exec::experimental_event_processor_with_json_output::ExperimentalEventProcessorWithJsonOutput;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
@@ -57,7 +54,6 @@ fn session_configured_produces_session_created_event() {
|
||||
history_entry_count: 0,
|
||||
initial_messages: None,
|
||||
rollout_path,
|
||||
worktree_path: None,
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_conversation_events(&ev);
|
||||
@@ -69,22 +65,6 @@ fn session_configured_produces_session_created_event() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn task_started_produces_turn_started_event() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
let out = ep.collect_conversation_events(&event(
|
||||
"t1",
|
||||
EventMsg::TaskStarted(codex_core::protocol::TaskStartedEvent {
|
||||
model_context_window: Some(32_000),
|
||||
}),
|
||||
));
|
||||
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ConversationEvent::TurnStarted(TurnStartedEvent {})]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plan_update_emits_todo_list_started_updated_and_completed() {
|
||||
use codex_core::plan_tool::PlanItemArg;
|
||||
@@ -181,28 +161,23 @@ fn plan_update_emits_todo_list_started_updated_and_completed() {
|
||||
let out_complete = ep.collect_conversation_events(&complete);
|
||||
assert_eq!(
|
||||
out_complete,
|
||||
vec![
|
||||
ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ConversationItemDetails::TodoList(ExecTodoListItem {
|
||||
items: vec![
|
||||
ExecTodoItem {
|
||||
text: "step one".to_string(),
|
||||
completed: true
|
||||
},
|
||||
ExecTodoItem {
|
||||
text: "step two".to_string(),
|
||||
completed: false
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
}),
|
||||
ConversationEvent::TurnCompleted(TurnCompletedEvent {
|
||||
usage: Usage::default(),
|
||||
}),
|
||||
]
|
||||
vec![ConversationEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ConversationItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ConversationItemDetails::TodoList(ExecTodoListItem {
|
||||
items: vec![
|
||||
ExecTodoItem {
|
||||
text: "step one".to_string(),
|
||||
completed: true
|
||||
},
|
||||
ExecTodoItem {
|
||||
text: "step two".to_string(),
|
||||
completed: false
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -610,52 +585,3 @@ fn patch_apply_failure_produces_item_completed_patchapply_failed() {
|
||||
other => panic!("unexpected event: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn task_complete_produces_turn_completed_with_usage() {
|
||||
let mut ep = ExperimentalEventProcessorWithJsonOutput::new(None);
|
||||
|
||||
// First, feed a TokenCount event with known totals.
|
||||
let usage = codex_core::protocol::TokenUsage {
|
||||
input_tokens: 1200,
|
||||
cached_input_tokens: 200,
|
||||
output_tokens: 345,
|
||||
reasoning_output_tokens: 0,
|
||||
total_tokens: 0,
|
||||
};
|
||||
let info = codex_core::protocol::TokenUsageInfo {
|
||||
total_token_usage: usage.clone(),
|
||||
last_token_usage: usage,
|
||||
model_context_window: None,
|
||||
};
|
||||
let token_count_event = event(
|
||||
"e1",
|
||||
EventMsg::TokenCount(codex_core::protocol::TokenCountEvent {
|
||||
info: Some(info),
|
||||
rate_limits: None,
|
||||
}),
|
||||
);
|
||||
assert!(
|
||||
ep.collect_conversation_events(&token_count_event)
|
||||
.is_empty()
|
||||
);
|
||||
|
||||
// Then TaskComplete should produce turn.completed with the captured usage.
|
||||
let complete_event = event(
|
||||
"e2",
|
||||
EventMsg::TaskComplete(codex_core::protocol::TaskCompleteEvent {
|
||||
last_agent_message: Some("done".to_string()),
|
||||
}),
|
||||
);
|
||||
let out = ep.collect_conversation_events(&complete_event);
|
||||
assert_eq!(
|
||||
out,
|
||||
vec![ConversationEvent::TurnCompleted(TurnCompletedEvent {
|
||||
usage: Usage {
|
||||
input_tokens: 1200,
|
||||
cached_input_tokens: 200,
|
||||
output_tokens: 345,
|
||||
},
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,4 +3,3 @@ mod apply_patch;
|
||||
mod output_schema;
|
||||
mod resume;
|
||||
mod sandbox;
|
||||
mod server_error_exit;
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
#![allow(clippy::expect_used, clippy::unwrap_used)]
|
||||
|
||||
use core_test_support::responses;
|
||||
use core_test_support::test_codex_exec::test_codex_exec;
|
||||
use wiremock::matchers::any;
|
||||
|
||||
/// Verify that when the server reports an error, `codex-exec` exits with a
|
||||
/// non-zero status code so automation can detect failures.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn exits_non_zero_when_server_reports_error() -> anyhow::Result<()> {
|
||||
let test = test_codex_exec();
|
||||
|
||||
// Mock a simple Responses API SSE stream that immediately reports a
|
||||
// `response.failed` event with an error message.
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![serde_json::json!({
|
||||
"type": "response.failed",
|
||||
"response": {
|
||||
"id": "resp_err_1",
|
||||
"error": {"code": "rate_limit_exceeded", "message": "synthetic server error"}
|
||||
}
|
||||
})]);
|
||||
responses::mount_sse_once(&server, any(), body).await;
|
||||
|
||||
test.cmd_with_server(&server)
|
||||
.arg("--skip-git-repo-check")
|
||||
.arg("tell me something")
|
||||
.arg("--experimental-json")
|
||||
.assert()
|
||||
.code(1);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
17
codex-rs/git-apply/Cargo.toml
Normal file
17
codex-rs/git-apply/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "codex-git-apply"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_git_apply"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
once_cell = "1"
|
||||
regex = "1"
|
||||
tempfile = "3"
|
||||
|
||||
647
codex-rs/git-apply/src/lib.rs
Normal file
647
codex-rs/git-apply/src/lib.rs
Normal file
@@ -0,0 +1,647 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use std::ffi::OsStr;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ApplyGitRequest {
|
||||
pub cwd: PathBuf,
|
||||
pub diff: String,
|
||||
pub revert: bool,
|
||||
pub preflight: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ApplyGitResult {
|
||||
pub exit_code: i32,
|
||||
pub applied_paths: Vec<String>,
|
||||
pub skipped_paths: Vec<String>,
|
||||
pub conflicted_paths: Vec<String>,
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
pub cmd_for_log: String,
|
||||
}
|
||||
|
||||
pub fn apply_git_patch(req: &ApplyGitRequest) -> io::Result<ApplyGitResult> {
|
||||
let git_root = resolve_git_root(&req.cwd)?;
|
||||
|
||||
// Write unified diff into a temporary file
|
||||
let (tmpdir, patch_path) = write_temp_patch(&req.diff)?;
|
||||
// Keep tmpdir alive until function end to ensure the file exists
|
||||
let _guard = tmpdir;
|
||||
|
||||
if req.revert {
|
||||
// Stage WT paths first to avoid index mismatch on revert.
|
||||
stage_paths(&git_root, &req.diff)?;
|
||||
}
|
||||
|
||||
// Build git args
|
||||
let mut args: Vec<String> = vec!["apply".into(), "--3way".into()];
|
||||
if req.revert {
|
||||
args.push("-R".into());
|
||||
}
|
||||
|
||||
// Optional: additional git config via env knob (defaults OFF)
|
||||
let mut cfg_parts: Vec<String> = Vec::new();
|
||||
if let Ok(cfg) = std::env::var("CODEX_APPLY_GIT_CFG") {
|
||||
for pair in cfg.split(',') {
|
||||
let p = pair.trim();
|
||||
if p.is_empty() || !p.contains('=') {
|
||||
continue;
|
||||
}
|
||||
cfg_parts.push("-c".into());
|
||||
cfg_parts.push(p.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
args.push(patch_path.to_string_lossy().to_string());
|
||||
|
||||
// Optional preflight: dry-run only; do not modify working tree
|
||||
if req.preflight {
|
||||
let mut check_args = vec!["apply".to_string(), "--check".to_string()];
|
||||
if req.revert {
|
||||
check_args.push("-R".to_string());
|
||||
}
|
||||
check_args.push(patch_path.to_string_lossy().to_string());
|
||||
let rendered = render_command_for_log(&git_root, &cfg_parts, &check_args);
|
||||
let (c_code, c_out, c_err) = run_git(&git_root, &cfg_parts, &check_args)?;
|
||||
let (mut applied_paths, mut skipped_paths, mut conflicted_paths) =
|
||||
parse_git_apply_output(&c_out, &c_err);
|
||||
applied_paths.sort();
|
||||
applied_paths.dedup();
|
||||
skipped_paths.sort();
|
||||
skipped_paths.dedup();
|
||||
conflicted_paths.sort();
|
||||
conflicted_paths.dedup();
|
||||
return Ok(ApplyGitResult {
|
||||
exit_code: c_code,
|
||||
applied_paths,
|
||||
skipped_paths,
|
||||
conflicted_paths,
|
||||
stdout: c_out,
|
||||
stderr: c_err,
|
||||
cmd_for_log: rendered,
|
||||
});
|
||||
}
|
||||
|
||||
let cmd_for_log = render_command_for_log(&git_root, &cfg_parts, &args);
|
||||
let (code, stdout, stderr) = run_git(&git_root, &cfg_parts, &args)?;
|
||||
|
||||
let (mut applied_paths, mut skipped_paths, mut conflicted_paths) =
|
||||
parse_git_apply_output(&stdout, &stderr);
|
||||
applied_paths.sort();
|
||||
applied_paths.dedup();
|
||||
skipped_paths.sort();
|
||||
skipped_paths.dedup();
|
||||
conflicted_paths.sort();
|
||||
conflicted_paths.dedup();
|
||||
|
||||
Ok(ApplyGitResult {
|
||||
exit_code: code,
|
||||
applied_paths,
|
||||
skipped_paths,
|
||||
conflicted_paths,
|
||||
stdout,
|
||||
stderr,
|
||||
cmd_for_log,
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_git_root(cwd: &Path) -> io::Result<PathBuf> {
|
||||
let out = std::process::Command::new("git")
|
||||
.arg("rev-parse")
|
||||
.arg("--show-toplevel")
|
||||
.current_dir(cwd)
|
||||
.output()?;
|
||||
let code = out.status.code().unwrap_or(-1);
|
||||
if code != 0 {
|
||||
return Err(io::Error::other(format!(
|
||||
"not a git repository (exit {}): {}",
|
||||
code,
|
||||
String::from_utf8_lossy(&out.stderr)
|
||||
)));
|
||||
}
|
||||
let root = String::from_utf8_lossy(&out.stdout).trim().to_string();
|
||||
Ok(PathBuf::from(root))
|
||||
}
|
||||
|
||||
fn write_temp_patch(diff: &str) -> io::Result<(tempfile::TempDir, PathBuf)> {
|
||||
let dir = tempfile::tempdir()?;
|
||||
let path = dir.path().join("patch.diff");
|
||||
std::fs::write(&path, diff)?;
|
||||
Ok((dir, path))
|
||||
}
|
||||
|
||||
fn run_git(cwd: &Path, git_cfg: &[String], args: &[String]) -> io::Result<(i32, String, String)> {
|
||||
let mut cmd = std::process::Command::new("git");
|
||||
for p in git_cfg {
|
||||
cmd.arg(p);
|
||||
}
|
||||
for a in args {
|
||||
cmd.arg(a);
|
||||
}
|
||||
let out = cmd.current_dir(cwd).output()?;
|
||||
let code = out.status.code().unwrap_or(-1);
|
||||
let stdout = String::from_utf8_lossy(&out.stdout).into_owned();
|
||||
let stderr = String::from_utf8_lossy(&out.stderr).into_owned();
|
||||
Ok((code, stdout, stderr))
|
||||
}
|
||||
|
||||
fn quote_shell(s: &str) -> String {
|
||||
let simple = s
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphanumeric() || "-_.:/@%+".contains(c));
|
||||
if simple {
|
||||
s.to_string()
|
||||
} else {
|
||||
format!("'{}'", s.replace('\'', "'\\''"))
|
||||
}
|
||||
}
|
||||
|
||||
fn render_command_for_log(cwd: &Path, git_cfg: &[String], args: &[String]) -> String {
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
parts.push("git".to_string());
|
||||
for a in git_cfg {
|
||||
parts.push(quote_shell(a));
|
||||
}
|
||||
for a in args {
|
||||
parts.push(quote_shell(a));
|
||||
}
|
||||
format!(
|
||||
"(cd {} && {})",
|
||||
quote_shell(&cwd.display().to_string()),
|
||||
parts.join(" ")
|
||||
)
|
||||
}
|
||||
|
||||
pub fn extract_paths_from_patch(diff_text: &str) -> Vec<String> {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"(?m)^diff --git a/(.*?) b/(.*)$")
|
||||
.unwrap_or_else(|e| panic!("invalid regex: {e}"))
|
||||
});
|
||||
let mut set = std::collections::BTreeSet::new();
|
||||
for caps in RE.captures_iter(diff_text) {
|
||||
if let Some(a) = caps.get(1).map(|m| m.as_str())
|
||||
&& a != "/dev/null"
|
||||
&& !a.trim().is_empty()
|
||||
{
|
||||
set.insert(a.to_string());
|
||||
}
|
||||
if let Some(b) = caps.get(2).map(|m| m.as_str())
|
||||
&& b != "/dev/null"
|
||||
&& !b.trim().is_empty()
|
||||
{
|
||||
set.insert(b.to_string());
|
||||
}
|
||||
}
|
||||
set.into_iter().collect()
|
||||
}
|
||||
|
||||
pub fn stage_paths(git_root: &Path, diff: &str) -> io::Result<()> {
|
||||
let paths = extract_paths_from_patch(diff);
|
||||
let mut existing: Vec<String> = Vec::new();
|
||||
for p in paths {
|
||||
let joined = git_root.join(&p);
|
||||
if std::fs::symlink_metadata(&joined).is_ok() {
|
||||
existing.push(p);
|
||||
}
|
||||
}
|
||||
if existing.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
let mut cmd = std::process::Command::new("git");
|
||||
cmd.arg("add");
|
||||
cmd.arg("--");
|
||||
for p in &existing {
|
||||
cmd.arg(OsStr::new(p));
|
||||
}
|
||||
let out = cmd.current_dir(git_root).output()?;
|
||||
let _code = out.status.code().unwrap_or(-1);
|
||||
// We do not hard fail staging; best-effort is OK. Return Ok even on non-zero.
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ============ Parser ported from VS Code (TS) ============
|
||||
|
||||
pub fn parse_git_apply_output(
|
||||
stdout: &str,
|
||||
stderr: &str,
|
||||
) -> (Vec<String>, Vec<String>, Vec<String>) {
|
||||
let combined = [stdout, stderr]
|
||||
.iter()
|
||||
.filter(|s| !s.is_empty())
|
||||
.cloned()
|
||||
.collect::<Vec<&str>>()
|
||||
.join("\n");
|
||||
|
||||
let mut applied = std::collections::BTreeSet::new();
|
||||
let mut skipped = std::collections::BTreeSet::new();
|
||||
let mut conflicted = std::collections::BTreeSet::new();
|
||||
let mut last_seen_path: Option<String> = None;
|
||||
|
||||
fn add(set: &mut std::collections::BTreeSet<String>, raw: &str) {
|
||||
let trimmed = raw.trim();
|
||||
if trimmed.is_empty() {
|
||||
return;
|
||||
}
|
||||
let first = trimmed.chars().next().unwrap_or('\0');
|
||||
let last = trimmed.chars().last().unwrap_or('\0');
|
||||
let unquoted = if (first == '"' || first == '\'') && last == first && trimmed.len() >= 2 {
|
||||
&trimmed[1..trimmed.len() - 1]
|
||||
} else {
|
||||
trimmed
|
||||
};
|
||||
if !unquoted.is_empty() {
|
||||
set.insert(unquoted.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
static APPLIED_CLEAN: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^Applied patch(?: to)?\\s+(?P<path>.+?)\\s+cleanly\\.?$"));
|
||||
static APPLIED_CONFLICTS: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^Applied patch(?: to)?\\s+(?P<path>.+?)\\s+with conflicts\\.?$"));
|
||||
static APPLYING_WITH_REJECTS: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci("^Applying patch\\s+(?P<path>.+?)\\s+with\\s+\\d+\\s+rejects?\\.{0,3}$")
|
||||
});
|
||||
static CHECKING_PATCH: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^Checking patch\\s+(?P<path>.+?)\\.\\.\\.$"));
|
||||
static UNMERGED_LINE: Lazy<Regex> = Lazy::new(|| regex_ci("^U\\s+(?P<path>.+)$"));
|
||||
static PATCH_FAILED: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^error:\\s+patch failed:\\s+(?P<path>.+?)(?::\\d+)?(?:\\s|$)"));
|
||||
static DOES_NOT_APPLY: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^error:\\s+(?P<path>.+?):\\s+patch does not apply$"));
|
||||
static THREE_WAY_START: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci("^(?:Performing three-way merge|Falling back to three-way merge)\\.\\.\\.$")
|
||||
});
|
||||
static THREE_WAY_FAILED: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^Failed to perform three-way merge\\.\\.\\.$"));
|
||||
static FALLBACK_DIRECT: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^Falling back to direct application\\.\\.\\.$"));
|
||||
static LACKS_BLOB: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci(
|
||||
"^(?:error: )?repository lacks the necessary blob to (?:perform|fall back on) 3-?way merge\\.?$",
|
||||
)
|
||||
});
|
||||
static INDEX_MISMATCH: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^error:\\s+(?P<path>.+?):\\s+does not match index\\b"));
|
||||
static NOT_IN_INDEX: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^error:\\s+(?P<path>.+?):\\s+does not exist in index\\b"));
|
||||
static ALREADY_EXISTS_WT: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci("^error:\\s+(?P<path>.+?)\\s+already exists in (?:the )?working directory\\b")
|
||||
});
|
||||
static FILE_EXISTS: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^error:\\s+patch failed:\\s+(?P<path>.+?)\\s+File exists"));
|
||||
static RENAMED_DELETED: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^error:\\s+path\\s+(?P<path>.+?)\\s+has been renamed\\/deleted"));
|
||||
static CANNOT_APPLY_BINARY: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci(
|
||||
"^error:\\s+cannot apply binary patch to\\s+['\\\"]?(?P<path>.+?)['\\\"]?\\s+without full index line$",
|
||||
)
|
||||
});
|
||||
static BINARY_DOES_NOT_APPLY: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci("^error:\\s+binary patch does not apply to\\s+['\\\"]?(?P<path>.+?)['\\\"]?$")
|
||||
});
|
||||
static BINARY_INCORRECT_RESULT: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci(
|
||||
"^error:\\s+binary patch to\\s+['\\\"]?(?P<path>.+?)['\\\"]?\\s+creates incorrect result\\b",
|
||||
)
|
||||
});
|
||||
static CANNOT_READ_CURRENT: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci("^error:\\s+cannot read the current contents of\\s+['\\\"]?(?P<path>.+?)['\\\"]?$")
|
||||
});
|
||||
static SKIPPED_PATCH: Lazy<Regex> =
|
||||
Lazy::new(|| regex_ci("^Skipped patch\\s+['\\\"]?(?P<path>.+?)['\\\"]\\.$"));
|
||||
static CANNOT_MERGE_BINARY_WARN: Lazy<Regex> = Lazy::new(|| {
|
||||
regex_ci(
|
||||
"^warning:\\s*Cannot merge binary files:\\s+(?P<path>.+?)\\s+\\(ours\\s+vs\\.\\s+theirs\\)",
|
||||
)
|
||||
});
|
||||
|
||||
for raw_line in combined.lines() {
|
||||
let line = raw_line.trim();
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// === "Checking patch <path>..." tracking ===
|
||||
if let Some(c) = CHECKING_PATCH.captures(line) {
|
||||
if let Some(m) = c.name("path") {
|
||||
last_seen_path = Some(m.as_str().to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// === Status lines ===
|
||||
if let Some(c) = APPLIED_CLEAN.captures(line) {
|
||||
if let Some(m) = c.name("path") {
|
||||
add(&mut applied, m.as_str());
|
||||
let p = applied.iter().next_back().cloned();
|
||||
if let Some(p) = p {
|
||||
conflicted.remove(&p);
|
||||
skipped.remove(&p);
|
||||
last_seen_path = Some(p);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(c) = APPLIED_CONFLICTS.captures(line) {
|
||||
if let Some(m) = c.name("path") {
|
||||
add(&mut conflicted, m.as_str());
|
||||
let p = conflicted.iter().next_back().cloned();
|
||||
if let Some(p) = p {
|
||||
applied.remove(&p);
|
||||
skipped.remove(&p);
|
||||
last_seen_path = Some(p);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(c) = APPLYING_WITH_REJECTS.captures(line) {
|
||||
if let Some(m) = c.name("path") {
|
||||
add(&mut conflicted, m.as_str());
|
||||
let p = conflicted.iter().next_back().cloned();
|
||||
if let Some(p) = p {
|
||||
applied.remove(&p);
|
||||
skipped.remove(&p);
|
||||
last_seen_path = Some(p);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// === “U <path>” after conflicts ===
|
||||
if let Some(c) = UNMERGED_LINE.captures(line) {
|
||||
if let Some(m) = c.name("path") {
|
||||
add(&mut conflicted, m.as_str());
|
||||
let p = conflicted.iter().next_back().cloned();
|
||||
if let Some(p) = p {
|
||||
applied.remove(&p);
|
||||
skipped.remove(&p);
|
||||
last_seen_path = Some(p);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// === Early hints ===
|
||||
if PATCH_FAILED.is_match(line) || DOES_NOT_APPLY.is_match(line) {
|
||||
if let Some(c) = PATCH_FAILED
|
||||
.captures(line)
|
||||
.or_else(|| DOES_NOT_APPLY.captures(line))
|
||||
&& let Some(m) = c.name("path")
|
||||
{
|
||||
add(&mut skipped, m.as_str());
|
||||
last_seen_path = Some(m.as_str().to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// === Ignore narration ===
|
||||
if THREE_WAY_START.is_match(line) || FALLBACK_DIRECT.is_match(line) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// === 3-way failed entirely; attribute to last_seen_path ===
|
||||
if THREE_WAY_FAILED.is_match(line) || LACKS_BLOB.is_match(line) {
|
||||
if let Some(p) = last_seen_path.clone() {
|
||||
add(&mut skipped, &p);
|
||||
applied.remove(&p);
|
||||
conflicted.remove(&p);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// === Skips / I/O problems ===
|
||||
if let Some(c) = INDEX_MISMATCH
|
||||
.captures(line)
|
||||
.or_else(|| NOT_IN_INDEX.captures(line))
|
||||
.or_else(|| ALREADY_EXISTS_WT.captures(line))
|
||||
.or_else(|| FILE_EXISTS.captures(line))
|
||||
.or_else(|| RENAMED_DELETED.captures(line))
|
||||
.or_else(|| CANNOT_APPLY_BINARY.captures(line))
|
||||
.or_else(|| BINARY_DOES_NOT_APPLY.captures(line))
|
||||
.or_else(|| BINARY_INCORRECT_RESULT.captures(line))
|
||||
.or_else(|| CANNOT_READ_CURRENT.captures(line))
|
||||
.or_else(|| SKIPPED_PATCH.captures(line))
|
||||
{
|
||||
if let Some(m) = c.name("path") {
|
||||
add(&mut skipped, m.as_str());
|
||||
let p_now = skipped.iter().next_back().cloned();
|
||||
if let Some(p) = p_now {
|
||||
applied.remove(&p);
|
||||
conflicted.remove(&p);
|
||||
last_seen_path = Some(p);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// === Warnings that imply conflicts ===
|
||||
if let Some(c) = CANNOT_MERGE_BINARY_WARN.captures(line) {
|
||||
if let Some(m) = c.name("path") {
|
||||
add(&mut conflicted, m.as_str());
|
||||
let p = conflicted.iter().next_back().cloned();
|
||||
if let Some(p) = p {
|
||||
applied.remove(&p);
|
||||
skipped.remove(&p);
|
||||
last_seen_path = Some(p);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Final precedence: conflicts > applied > skipped
|
||||
for p in conflicted.iter() {
|
||||
applied.remove(p);
|
||||
skipped.remove(p);
|
||||
}
|
||||
for p in applied.iter() {
|
||||
skipped.remove(p);
|
||||
}
|
||||
|
||||
(
|
||||
applied.into_iter().collect(),
|
||||
skipped.into_iter().collect(),
|
||||
conflicted.into_iter().collect(),
|
||||
)
|
||||
}
|
||||
|
||||
fn regex_ci(pat: &str) -> Regex {
|
||||
Regex::new(&format!("(?i){pat}")).unwrap_or_else(|e| panic!("invalid regex: {e}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
fn env_lock() -> &'static Mutex<()> {
|
||||
static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
|
||||
LOCK.get_or_init(|| Mutex::new(()))
|
||||
}
|
||||
|
||||
fn run(cwd: &Path, args: &[&str]) -> (i32, String, String) {
|
||||
let out = std::process::Command::new(args[0])
|
||||
.args(&args[1..])
|
||||
.current_dir(cwd)
|
||||
.output()
|
||||
.expect("spawn ok");
|
||||
(
|
||||
out.status.code().unwrap_or(-1),
|
||||
String::from_utf8_lossy(&out.stdout).into_owned(),
|
||||
String::from_utf8_lossy(&out.stderr).into_owned(),
|
||||
)
|
||||
}
|
||||
|
||||
fn init_repo() -> tempfile::TempDir {
|
||||
let dir = tempfile::tempdir().expect("tempdir");
|
||||
let root = dir.path();
|
||||
// git init and minimal identity
|
||||
let _ = run(root, &["git", "init"]);
|
||||
let _ = run(root, &["git", "config", "user.email", "codex@example.com"]);
|
||||
let _ = run(root, &["git", "config", "user.name", "Codex"]);
|
||||
dir
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_add_success() {
|
||||
let _g = env_lock().lock().unwrap();
|
||||
let repo = init_repo();
|
||||
let root = repo.path();
|
||||
|
||||
let diff = "diff --git a/hello.txt b/hello.txt\nnew file mode 100644\n--- /dev/null\n+++ b/hello.txt\n@@ -0,0 +1,2 @@\n+hello\n+world\n";
|
||||
let req = ApplyGitRequest {
|
||||
cwd: root.to_path_buf(),
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let r = apply_git_patch(&req).expect("run apply");
|
||||
assert_eq!(r.exit_code, 0, "exit code 0");
|
||||
// File exists now
|
||||
assert!(root.join("hello.txt").exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_modify_conflict() {
|
||||
let _g = env_lock().lock().unwrap();
|
||||
let repo = init_repo();
|
||||
let root = repo.path();
|
||||
// seed file and commit
|
||||
std::fs::write(root.join("file.txt"), "line1\nline2\nline3\n").unwrap();
|
||||
let _ = run(root, &["git", "add", "file.txt"]);
|
||||
let _ = run(root, &["git", "commit", "-m", "seed"]);
|
||||
// local edit (unstaged)
|
||||
std::fs::write(root.join("file.txt"), "line1\nlocal2\nline3\n").unwrap();
|
||||
// patch wants to change the same line differently
|
||||
let diff = "diff --git a/file.txt b/file.txt\n--- a/file.txt\n+++ b/file.txt\n@@ -1,3 +1,3 @@\n line1\n-line2\n+remote2\n line3\n";
|
||||
let req = ApplyGitRequest {
|
||||
cwd: root.to_path_buf(),
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let r = apply_git_patch(&req).expect("run apply");
|
||||
assert_ne!(r.exit_code, 0, "non-zero exit on conflict");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_modify_skipped_missing_index() {
|
||||
let _g = env_lock().lock().unwrap();
|
||||
let repo = init_repo();
|
||||
let root = repo.path();
|
||||
// Try to modify a file that is not in the index
|
||||
let diff = "diff --git a/ghost.txt b/ghost.txt\n--- a/ghost.txt\n+++ b/ghost.txt\n@@ -1,1 +1,1 @@\n-old\n+new\n";
|
||||
let req = ApplyGitRequest {
|
||||
cwd: root.to_path_buf(),
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let r = apply_git_patch(&req).expect("run apply");
|
||||
assert_ne!(r.exit_code, 0, "non-zero exit on missing index");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_then_revert_success() {
|
||||
let _g = env_lock().lock().unwrap();
|
||||
let repo = init_repo();
|
||||
let root = repo.path();
|
||||
// Seed file and commit original content
|
||||
std::fs::write(root.join("file.txt"), "orig\n").unwrap();
|
||||
let _ = run(root, &["git", "add", "file.txt"]);
|
||||
let _ = run(root, &["git", "commit", "-m", "seed"]);
|
||||
|
||||
// Forward patch: orig -> ORIG
|
||||
let diff = "diff --git a/file.txt b/file.txt\n--- a/file.txt\n+++ b/file.txt\n@@ -1,1 +1,1 @@\n-orig\n+ORIG\n";
|
||||
let apply_req = ApplyGitRequest {
|
||||
cwd: root.to_path_buf(),
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let res_apply = apply_git_patch(&apply_req).expect("apply ok");
|
||||
assert_eq!(res_apply.exit_code, 0, "forward apply succeeded");
|
||||
let after_apply = std::fs::read_to_string(root.join("file.txt")).unwrap();
|
||||
assert_eq!(after_apply, "ORIG\n");
|
||||
|
||||
// Revert patch: ORIG -> orig (stage paths first; engine handles it)
|
||||
let revert_req = ApplyGitRequest {
|
||||
cwd: root.to_path_buf(),
|
||||
diff: diff.to_string(),
|
||||
revert: true,
|
||||
preflight: false,
|
||||
};
|
||||
let res_revert = apply_git_patch(&revert_req).expect("revert ok");
|
||||
assert_eq!(res_revert.exit_code, 0, "revert apply succeeded");
|
||||
let after_revert = std::fs::read_to_string(root.join("file.txt")).unwrap();
|
||||
assert_eq!(after_revert, "orig\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn preflight_blocks_partial_changes() {
|
||||
let _g = env_lock().lock().unwrap();
|
||||
let repo = init_repo();
|
||||
let root = repo.path();
|
||||
// Build a multi-file diff: one valid add (ok.txt) and one invalid modify (ghost.txt)
|
||||
let diff = "diff --git a/ok.txt b/ok.txt\nnew file mode 100644\n--- /dev/null\n+++ b/ok.txt\n@@ -0,0 +1,2 @@\n+alpha\n+beta\n\n\
|
||||
diff --git a/ghost.txt b/ghost.txt\n--- a/ghost.txt\n+++ b/ghost.txt\n@@ -1,1 +1,1 @@\n-old\n+new\n";
|
||||
|
||||
// 1) With preflight enabled, nothing should be changed (even though ok.txt could be added)
|
||||
let req1 = ApplyGitRequest {
|
||||
cwd: root.to_path_buf(),
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: true,
|
||||
};
|
||||
let r1 = apply_git_patch(&req1).expect("preflight apply");
|
||||
assert_ne!(r1.exit_code, 0, "preflight reports failure");
|
||||
assert!(
|
||||
!root.join("ok.txt").exists(),
|
||||
"preflight must prevent adding ok.txt"
|
||||
);
|
||||
assert!(
|
||||
r1.cmd_for_log.contains("--check"),
|
||||
"preflight path recorded --check"
|
||||
);
|
||||
|
||||
// 2) Without preflight, we should see no --check in the executed command
|
||||
let req2 = ApplyGitRequest {
|
||||
cwd: root.to_path_buf(),
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let r2 = apply_git_patch(&req2).expect("direct apply");
|
||||
assert_ne!(r2.exit_code, 0, "apply is expected to fail overall");
|
||||
assert!(
|
||||
!r2.cmd_for_log.contains("--check"),
|
||||
"non-preflight path should not use --check"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ help:
|
||||
just -l
|
||||
|
||||
# `codex`
|
||||
alias c := codex
|
||||
codex *args:
|
||||
cargo run --bin codex -- "$@"
|
||||
|
||||
@@ -28,9 +27,6 @@ fmt:
|
||||
fix *args:
|
||||
cargo clippy --fix --all-features --tests --allow-dirty "$@"
|
||||
|
||||
clippy:
|
||||
cargo clippy --all-features --tests "$@"
|
||||
|
||||
install:
|
||||
rustup show active-toolchain
|
||||
cargo fetch
|
||||
|
||||
@@ -274,8 +274,7 @@ impl McpClient {
|
||||
err.error.code, err.error.message
|
||||
))),
|
||||
other => Err(anyhow!(format!(
|
||||
"unexpected message variant received in reply path: {:?}",
|
||||
other
|
||||
"unexpected message variant received in reply path: {other:?}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1273,7 +1273,6 @@ fn derive_config_from_params(
|
||||
include_view_image_tool: None,
|
||||
show_raw_agent_reasoning: None,
|
||||
tools_web_search_request: None,
|
||||
enable_git_worktree: None,
|
||||
};
|
||||
|
||||
let cli_overrides = cli_overrides
|
||||
|
||||
@@ -165,7 +165,6 @@ impl CodexToolCallParam {
|
||||
include_view_image_tool: None,
|
||||
show_raw_agent_reasoning: None,
|
||||
tools_web_search_request: None,
|
||||
enable_git_worktree: None,
|
||||
};
|
||||
|
||||
let cli_overrides = cli_overrides
|
||||
|
||||
@@ -278,7 +278,6 @@ async fn run_codex_tool_session_inner(
|
||||
| EventMsg::PlanUpdate(_)
|
||||
| EventMsg::TurnAborted(_)
|
||||
| EventMsg::ConversationPath(_)
|
||||
| EventMsg::WorktreeRemoved(_)
|
||||
| EventMsg::UserMessage(_)
|
||||
| EventMsg::ShutdownComplete
|
||||
| EventMsg::EnteredReviewMode(_)
|
||||
|
||||
@@ -286,7 +286,6 @@ mod tests {
|
||||
history_entry_count: 1000,
|
||||
initial_messages: None,
|
||||
rollout_path: rollout_file.path().to_path_buf(),
|
||||
worktree_path: None,
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -322,7 +321,6 @@ mod tests {
|
||||
history_entry_count: 1000,
|
||||
initial_messages: None,
|
||||
rollout_path: rollout_file.path().to_path_buf(),
|
||||
worktree_path: None,
|
||||
};
|
||||
let event = Event {
|
||||
id: "1".to_string(),
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-process-hardening"
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
name = "codex_process_hardening"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "android")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
libc = { workspace = true }
|
||||
@@ -1,7 +0,0 @@
|
||||
# codex-process-hardening
|
||||
|
||||
This crate provides `pre_main_hardening()`, which is designed to be called pre-`main()` (using `#[ctor::ctor]`) to perform various process hardening steps, such as
|
||||
|
||||
- disabling core dumps
|
||||
- disabling ptrace attach on Linux and macOS
|
||||
- removing dangerous environment variables such as `LD_PRELOAD` and `DYLD_*`
|
||||
@@ -66,7 +66,7 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
.status()
|
||||
.with_context(|| format!("Failed to invoke Prettier at {}", prettier_bin.display()))?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("Prettier failed with status {}", status));
|
||||
return Err(anyhow!("Prettier failed with status {status}"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -174,9 +174,6 @@ pub enum Op {
|
||||
/// Request a code review from the agent.
|
||||
Review { review_request: ReviewRequest },
|
||||
|
||||
/// Remove the git worktree associated with the current session, if any.
|
||||
RemoveWorktree,
|
||||
|
||||
/// Request to shut down codex instance.
|
||||
Shutdown,
|
||||
}
|
||||
@@ -522,9 +519,6 @@ pub enum EventMsg {
|
||||
|
||||
/// Exited review mode with an optional final result to apply.
|
||||
ExitedReviewMode(ExitedReviewModeEvent),
|
||||
|
||||
/// Confirmation that a git worktree has been removed.
|
||||
WorktreeRemoved(WorktreeRemovedEvent),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
@@ -1209,16 +1203,6 @@ pub struct SessionConfiguredEvent {
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
|
||||
pub rollout_path: PathBuf,
|
||||
|
||||
/// When set, the session is running inside this git worktree checkout.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub worktree_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
pub struct WorktreeRemovedEvent {
|
||||
/// Filesystem path that was removed, relative to the host running Codex.
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
/// User's decision in response to an ExecApprovalRequest.
|
||||
@@ -1302,7 +1286,6 @@ mod tests {
|
||||
history_entry_count: 0,
|
||||
initial_messages: None,
|
||||
rollout_path: rollout_file.path().to_path_buf(),
|
||||
worktree_path: None,
|
||||
}),
|
||||
};
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ name = "codex_responses_api_proxy"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "codex-responses-api-proxy"
|
||||
name = "responses-api-proxy"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lints]
|
||||
@@ -17,11 +17,11 @@ workspace = true
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-process-hardening = { workspace = true }
|
||||
ctor = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["blocking", "json", "rustls-tls"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tiny_http = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
zeroize = { workspace = true }
|
||||
|
||||
@@ -4,12 +4,12 @@ A strict HTTP proxy that only forwards `POST` requests to `/v1/responses` to the
|
||||
|
||||
## Expected Usage
|
||||
|
||||
**IMPORTANT:** `codex-responses-api-proxy` is designed to be run by a privileged user with access to `OPENAI_API_KEY` so that an unprivileged user cannot inspect or tamper with the process. Though if `--http-shutdown` is specified, an unprivileged user _can_ make a `GET` request to `/shutdown` to shutdown the server, as an unprivileged could not send `SIGTERM` to kill the process.
|
||||
**IMPORTANT:** This is designed to be used with `CODEX_SECURE_MODE=1` so that an unprivileged user cannot inspect or tamper with this process. Though if `--http-shutdown` is specified, an unprivileged user _can_ shutdown the server.
|
||||
|
||||
A privileged user (i.e., `root` or a user with `sudo`) who has access to `OPENAI_API_KEY` would run the following to start the server, as `codex-responses-api-proxy` reads the auth token from `stdin`:
|
||||
A privileged user (i.e., `root` or a user with `sudo`) who has access to `OPENAI_API_KEY` would run the following to start the server:
|
||||
|
||||
```shell
|
||||
printenv OPENAI_API_KEY | codex-responses-api-proxy --http-shutdown --server-info /tmp/server-info.json
|
||||
printenv OPENAI_API_KEY | CODEX_SECURE_MODE=1 codex responses-api-proxy --http-shutdown --server-info /tmp/server-info.json
|
||||
```
|
||||
|
||||
A non-privileged user would then run Codex as follows, specifying the `model_provider` dynamically:
|
||||
@@ -22,7 +22,7 @@ codex exec -c "model_providers.openai-proxy={ name = 'OpenAI Proxy', base_url =
|
||||
'Your prompt here'
|
||||
```
|
||||
|
||||
When the unprivileged user was finished, they could shutdown the server using `curl` (since `kill -SIGTERM` is not an option):
|
||||
When the unprivileged user was finished, they could shutdown the server using `curl` (since `kill -9` is not an option):
|
||||
|
||||
```shell
|
||||
curl --fail --silent --show-error "${PROXY_BASE_URL}/shutdown"
|
||||
@@ -30,17 +30,17 @@ curl --fail --silent --show-error "${PROXY_BASE_URL}/shutdown"
|
||||
|
||||
## Behavior
|
||||
|
||||
- Reads the API key from `stdin`. All callers should pipe the key in (for example, `printenv OPENAI_API_KEY | codex-responses-api-proxy`).
|
||||
- Reads the API key from `stdin`. All callers should pipe the key in (for example, `printenv OPENAI_API_KEY | codex responses-api-proxy`).
|
||||
- Formats the header value as `Bearer <key>` and attempts to `mlock(2)` the memory holding that header so it is not swapped to disk.
|
||||
- Listens on the provided port or an ephemeral port if `--port` is not specified.
|
||||
- Accepts exactly `POST /v1/responses` (no query string). The request body is forwarded to `https://api.openai.com/v1/responses` with `Authorization: Bearer <key>` set. All original request headers (except any incoming `Authorization`) are forwarded upstream. For other requests, it responds with `403`.
|
||||
- Optionally writes a single-line JSON file with server info, currently `{ "port": <u16> }`.
|
||||
- Optional `--http-shutdown` enables `GET /shutdown` to terminate the process with exit code 0. This allows one user (e.g., `root`) to start the proxy and another unprivileged user on the host to shut it down.
|
||||
- Optional `--http-shutdown` enables `GET /shutdown` to terminate the process with exit code 0. This allows one user (e.g., root) to start the proxy and another unprivileged user on the host to shut it down.
|
||||
|
||||
## CLI
|
||||
|
||||
```
|
||||
codex-responses-api-proxy [--port <PORT>] [--server-info <FILE>] [--http-shutdown]
|
||||
responses-api-proxy [--port <PORT>] [--server-info <FILE>] [--http-shutdown]
|
||||
```
|
||||
|
||||
- `--port <PORT>`: Port to bind on `127.0.0.1`. If omitted, an ephemeral port is chosen.
|
||||
@@ -51,19 +51,3 @@ codex-responses-api-proxy [--port <PORT>] [--server-info <FILE>] [--http-shutdow
|
||||
|
||||
- Only `POST /v1/responses` is permitted. No query strings are allowed.
|
||||
- All request headers are forwarded to the upstream call (aside from overriding `Authorization`). Response status and content-type are mirrored from upstream.
|
||||
|
||||
## Hardening Details
|
||||
|
||||
Care is taken to restrict access/copying to the value of `OPENAI_API_KEY` retained in memory:
|
||||
|
||||
- We leverage [`codex_process_hardening`](https://github.com/openai/codex/blob/main/codex-rs/process-hardening/README.md) so `codex-responses-api-proxy` is run with standard process-hardening techniques.
|
||||
- At startup, we allocate a `1024` byte buffer on the stack and write `"Bearer "` as the first `7` bytes.
|
||||
- We then read from `stdin`, copying the contents into the buffer after `"Bearer "`.
|
||||
- After verifying the key matches `/^[a-zA-Z0-9_-]+$/` (and does not exceed the buffer), we create a `String` from that buffer (so the data is now on the heap).
|
||||
- We zero out the stack-allocated buffer using https://crates.io/crates/zeroize so it is not optimized away by the compiler.
|
||||
- We invoke `.leak()` on the `String` so we can treat its contents as a `&'static str`, as it will live for the rest of the process.
|
||||
- On UNIX, we `mlock(2)` the memory backing the `&'static str`.
|
||||
- When using the `&'static str` when building an HTTP request, we use `HeaderValue::from_static()` to avoid copying the `&str`.
|
||||
- We also invoke `.set_sensitive(true)` on the `HeaderValue`, which in theory indicates to other parts of the HTTP stack that the header should be treated with "special care" to avoid leakage:
|
||||
|
||||
https://github.com/hyperium/http/blob/439d1c50d71e3be3204b6c4a1bf2255ed78e1f93/src/header/value.rs#L346-L376
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# @openai/codex-responses-api-proxy
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex-responses-api-proxy</code> to install <code>codex-responses-api-proxy</code></p>
|
||||
|
||||
This package distributes the prebuilt [Codex Responses API proxy binary](https://github.com/openai/codex/tree/main/codex-rs/responses-api-proxy) for macOS, Linux, and Windows.
|
||||
|
||||
To see available options, run:
|
||||
|
||||
```
|
||||
node ./bin/codex-responses-api-proxy.js --help
|
||||
```
|
||||
|
||||
Refer to [`codex-rs/responses-api-proxy/README.md`](https://github.com/openai/codex/blob/main/codex-rs/responses-api-proxy/README.md) for detailed documentation.
|
||||
@@ -1,97 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
// Entry point for the Codex responses API proxy binary.
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
function determineTargetTriple(platform, arch) {
|
||||
switch (platform) {
|
||||
case "linux":
|
||||
case "android":
|
||||
if (arch === "x64") {
|
||||
return "x86_64-unknown-linux-musl";
|
||||
}
|
||||
if (arch === "arm64") {
|
||||
return "aarch64-unknown-linux-musl";
|
||||
}
|
||||
break;
|
||||
case "darwin":
|
||||
if (arch === "x64") {
|
||||
return "x86_64-apple-darwin";
|
||||
}
|
||||
if (arch === "arm64") {
|
||||
return "aarch64-apple-darwin";
|
||||
}
|
||||
break;
|
||||
case "win32":
|
||||
if (arch === "x64") {
|
||||
return "x86_64-pc-windows-msvc";
|
||||
}
|
||||
if (arch === "arm64") {
|
||||
return "aarch64-pc-windows-msvc";
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
const targetTriple = determineTargetTriple(process.platform, process.arch);
|
||||
if (!targetTriple) {
|
||||
throw new Error(
|
||||
`Unsupported platform: ${process.platform} (${process.arch})`,
|
||||
);
|
||||
}
|
||||
|
||||
const vendorRoot = path.join(__dirname, "..", "vendor");
|
||||
const archRoot = path.join(vendorRoot, targetTriple);
|
||||
const binaryBaseName = "codex-responses-api-proxy";
|
||||
const binaryPath = path.join(
|
||||
archRoot,
|
||||
binaryBaseName,
|
||||
process.platform === "win32" ? `${binaryBaseName}.exe` : binaryBaseName,
|
||||
);
|
||||
|
||||
const child = spawn(binaryPath, process.argv.slice(2), {
|
||||
stdio: "inherit",
|
||||
});
|
||||
|
||||
child.on("error", (err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
const forwardSignal = (signal) => {
|
||||
if (!child.killed) {
|
||||
try {
|
||||
child.kill(signal);
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
["SIGINT", "SIGTERM", "SIGHUP"].forEach((sig) => {
|
||||
process.on(sig, () => forwardSignal(sig));
|
||||
});
|
||||
|
||||
const childResult = await new Promise((resolve) => {
|
||||
child.on("exit", (code, signal) => {
|
||||
if (signal) {
|
||||
resolve({ type: "signal", signal });
|
||||
} else {
|
||||
resolve({ type: "code", exitCode: code ?? 1 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (childResult.type === "signal") {
|
||||
process.kill(process.pid, childResult.signal);
|
||||
} else {
|
||||
process.exit(childResult.exitCode);
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"name": "@openai/codex-responses-api-proxy",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"codex-responses-api-proxy": "bin/codex-responses-api-proxy.js"
|
||||
},
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"vendor"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-rs/responses-api-proxy/npm"
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,6 @@ use std::net::TcpListener;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
@@ -63,8 +62,6 @@ pub fn run_main(args: Args) -> Result<()> {
|
||||
.map_err(|err| anyhow!("creating HTTP server: {err}"))?;
|
||||
let client = Arc::new(
|
||||
Client::builder()
|
||||
// Disable reqwest's 30s default so long-lived response streams keep flowing.
|
||||
.timeout(None::<Duration>)
|
||||
.build()
|
||||
.context("building reqwest client")?,
|
||||
);
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
use anyhow::Context;
|
||||
use clap::Parser;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
|
||||
#[ctor::ctor]
|
||||
fn pre_main() {
|
||||
codex_process_hardening::pre_main_hardening();
|
||||
}
|
||||
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
let args = ResponsesApiProxyArgs::parse();
|
||||
codex_responses_api_proxy::run_main(args)
|
||||
arg0_dispatch_or_else(|_codex_linux_sandbox_exe| async move {
|
||||
let args = ResponsesApiProxyArgs::parse();
|
||||
tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
|
||||
.await
|
||||
.context("responses-api-proxy blocking task panicked")??;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -54,16 +54,9 @@ where
|
||||
));
|
||||
}
|
||||
|
||||
if let Err(err) = validate_auth_header_bytes(&buf[AUTH_HEADER_PREFIX.len()..total]) {
|
||||
buf.zeroize();
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let header_str = match std::str::from_utf8(&buf[..total]) {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
// In theory, validate_auth_header_bytes() should have caught
|
||||
// any invalid UTF-8 sequences, but just in case...
|
||||
buf.zeroize();
|
||||
return Err(err).context("reading Authorization header from stdin as UTF-8");
|
||||
}
|
||||
@@ -120,21 +113,6 @@ fn mlock_str(value: &str) {
|
||||
#[cfg(not(unix))]
|
||||
fn mlock_str(_value: &str) {}
|
||||
|
||||
/// The key should match /^[A-Za-z0-9\-_]+$/. Ensure there is no funny business
|
||||
/// with NUL characters and whatnot.
|
||||
fn validate_auth_header_bytes(key_bytes: &[u8]) -> Result<()> {
|
||||
if key_bytes
|
||||
.iter()
|
||||
.all(|byte| byte.is_ascii_alphanumeric() || matches!(byte, b'-' | b'_'))
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'"
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -180,7 +158,7 @@ mod tests {
|
||||
})
|
||||
.unwrap_err();
|
||||
let message = format!("{err:#}");
|
||||
assert!(message.contains("OPENAI_API_KEY is too large to fit in the 512-byte buffer"));
|
||||
assert!(message.contains("too large"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -202,23 +180,6 @@ mod tests {
|
||||
.unwrap_err();
|
||||
|
||||
let message = format!("{err:#}");
|
||||
assert!(
|
||||
message.contains("OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors_on_invalid_characters() {
|
||||
let err = read_auth_header_with(|buf| {
|
||||
let data = b"sk-abc!23";
|
||||
buf[..data.len()].copy_from_slice(data);
|
||||
Ok(data.len())
|
||||
})
|
||||
.unwrap_err();
|
||||
|
||||
let message = format!("{err:#}");
|
||||
assert!(
|
||||
message.contains("OPENAI_API_KEY may only contain ASCII letters, numbers, '-' or '_'")
|
||||
);
|
||||
assert!(message.contains("UTF-8"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,15 +16,6 @@ rmcp = { version = "0.7.0", default-features = false, features = [
|
||||
"schemars",
|
||||
"server",
|
||||
"transport-child-process",
|
||||
"transport-streamable-http-client-reqwest",
|
||||
"transport-streamable-http-server",
|
||||
] }
|
||||
axum = { version = "0.8", default-features = false, features = ["http1", "tokio"] }
|
||||
futures = { version = "0.3", default-features = false, features = ["std"] }
|
||||
reqwest = { version = "0.12", default-features = false, features = [
|
||||
"json",
|
||||
"stream",
|
||||
"rustls-tls",
|
||||
] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rmcp::ErrorData as McpError;
|
||||
use rmcp::ServiceExt;
|
||||
use rmcp::handler::server::ServerHandler;
|
||||
use rmcp::model::CallToolRequestParam;
|
||||
use rmcp::model::CallToolResult;
|
||||
use rmcp::model::JsonObject;
|
||||
use rmcp::model::ListToolsResult;
|
||||
use rmcp::model::PaginatedRequestParam;
|
||||
use rmcp::model::ServerCapabilities;
|
||||
use rmcp::model::ServerInfo;
|
||||
use rmcp::model::Tool;
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
use tokio::task;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct TestToolServer {
|
||||
tools: Arc<Vec<Tool>>,
|
||||
}
|
||||
pub fn stdio() -> (tokio::io::Stdin, tokio::io::Stdout) {
|
||||
(tokio::io::stdin(), tokio::io::stdout())
|
||||
}
|
||||
impl TestToolServer {
|
||||
fn new() -> Self {
|
||||
let tools = vec![Self::echo_tool()];
|
||||
Self {
|
||||
tools: Arc::new(tools),
|
||||
}
|
||||
}
|
||||
|
||||
fn echo_tool() -> Tool {
|
||||
#[expect(clippy::expect_used)]
|
||||
let schema: JsonObject = serde_json::from_value(json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"message": { "type": "string" },
|
||||
"env_var": { "type": "string" }
|
||||
},
|
||||
"required": ["message"],
|
||||
"additionalProperties": false
|
||||
}))
|
||||
.expect("echo tool schema should deserialize");
|
||||
|
||||
Tool::new(
|
||||
Cow::Borrowed("echo"),
|
||||
Cow::Borrowed("Echo back the provided message and include environment data."),
|
||||
Arc::new(schema),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct EchoArgs {
|
||||
message: String,
|
||||
#[allow(dead_code)]
|
||||
env_var: Option<String>,
|
||||
}
|
||||
|
||||
impl ServerHandler for TestToolServer {
|
||||
fn get_info(&self) -> ServerInfo {
|
||||
ServerInfo {
|
||||
capabilities: ServerCapabilities::builder()
|
||||
.enable_tools()
|
||||
.enable_tool_list_changed()
|
||||
.build(),
|
||||
..ServerInfo::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn list_tools(
|
||||
&self,
|
||||
_request: Option<PaginatedRequestParam>,
|
||||
_context: rmcp::service::RequestContext<rmcp::service::RoleServer>,
|
||||
) -> impl std::future::Future<Output = Result<ListToolsResult, McpError>> + Send + '_ {
|
||||
let tools = self.tools.clone();
|
||||
async move {
|
||||
Ok(ListToolsResult {
|
||||
tools: (*tools).clone(),
|
||||
next_cursor: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn call_tool(
|
||||
&self,
|
||||
request: CallToolRequestParam,
|
||||
_context: rmcp::service::RequestContext<rmcp::service::RoleServer>,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
match request.name.as_ref() {
|
||||
"echo" => {
|
||||
let args: EchoArgs = match request.arguments {
|
||||
Some(arguments) => serde_json::from_value(serde_json::Value::Object(
|
||||
arguments.into_iter().collect(),
|
||||
))
|
||||
.map_err(|err| McpError::invalid_params(err.to_string(), None))?,
|
||||
None => {
|
||||
return Err(McpError::invalid_params(
|
||||
"missing arguments for echo tool",
|
||||
None,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let env_snapshot: HashMap<String, String> = std::env::vars().collect();
|
||||
let structured_content = json!({
|
||||
"echo": format!("ECHOING: {}", args.message),
|
||||
"env": env_snapshot.get("MCP_TEST_VALUE"),
|
||||
});
|
||||
|
||||
Ok(CallToolResult {
|
||||
content: Vec::new(),
|
||||
structured_content: Some(structured_content),
|
||||
is_error: Some(false),
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
other => Err(McpError::invalid_params(
|
||||
format!("unknown tool: {other}"),
|
||||
None,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
eprintln!("starting rmcp test server");
|
||||
// Run the server with STDIO transport. If the client disconnects we simply
|
||||
// bubble up the error so the process exits.
|
||||
let service = TestToolServer::new();
|
||||
let running = service.serve(stdio()).await?;
|
||||
|
||||
// Wait for the client to finish interacting with the server.
|
||||
running.waiting().await?;
|
||||
// Drain background tasks to ensure clean shutdown.
|
||||
task::yield_now().await;
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,167 +0,0 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::io::ErrorKind;
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::Router;
|
||||
use rmcp::ErrorData as McpError;
|
||||
use rmcp::handler::server::ServerHandler;
|
||||
use rmcp::model::CallToolRequestParam;
|
||||
use rmcp::model::CallToolResult;
|
||||
use rmcp::model::JsonObject;
|
||||
use rmcp::model::ListToolsResult;
|
||||
use rmcp::model::PaginatedRequestParam;
|
||||
use rmcp::model::ServerCapabilities;
|
||||
use rmcp::model::ServerInfo;
|
||||
use rmcp::model::Tool;
|
||||
use rmcp::transport::StreamableHttpServerConfig;
|
||||
use rmcp::transport::StreamableHttpService;
|
||||
use rmcp::transport::streamable_http_server::session::local::LocalSessionManager;
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
use tokio::task;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct TestToolServer {
|
||||
tools: Arc<Vec<Tool>>,
|
||||
}
|
||||
|
||||
impl TestToolServer {
|
||||
fn new() -> Self {
|
||||
let tools = vec![Self::echo_tool()];
|
||||
Self {
|
||||
tools: Arc::new(tools),
|
||||
}
|
||||
}
|
||||
|
||||
fn echo_tool() -> Tool {
|
||||
#[expect(clippy::expect_used)]
|
||||
let schema: JsonObject = serde_json::from_value(json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"message": { "type": "string" },
|
||||
"env_var": { "type": "string" }
|
||||
},
|
||||
"required": ["message"],
|
||||
"additionalProperties": false
|
||||
}))
|
||||
.expect("echo tool schema should deserialize");
|
||||
|
||||
Tool::new(
|
||||
Cow::Borrowed("echo"),
|
||||
Cow::Borrowed("Echo back the provided message and include environment data."),
|
||||
Arc::new(schema),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct EchoArgs {
|
||||
message: String,
|
||||
#[allow(dead_code)]
|
||||
env_var: Option<String>,
|
||||
}
|
||||
|
||||
impl ServerHandler for TestToolServer {
|
||||
fn get_info(&self) -> ServerInfo {
|
||||
ServerInfo {
|
||||
capabilities: ServerCapabilities::builder()
|
||||
.enable_tools()
|
||||
.enable_tool_list_changed()
|
||||
.build(),
|
||||
..ServerInfo::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn list_tools(
|
||||
&self,
|
||||
_request: Option<PaginatedRequestParam>,
|
||||
_context: rmcp::service::RequestContext<rmcp::service::RoleServer>,
|
||||
) -> impl std::future::Future<Output = Result<ListToolsResult, McpError>> + Send + '_ {
|
||||
let tools = self.tools.clone();
|
||||
async move {
|
||||
Ok(ListToolsResult {
|
||||
tools: (*tools).clone(),
|
||||
next_cursor: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn call_tool(
|
||||
&self,
|
||||
request: CallToolRequestParam,
|
||||
_context: rmcp::service::RequestContext<rmcp::service::RoleServer>,
|
||||
) -> Result<CallToolResult, McpError> {
|
||||
match request.name.as_ref() {
|
||||
"echo" => {
|
||||
let args: EchoArgs = match request.arguments {
|
||||
Some(arguments) => serde_json::from_value(serde_json::Value::Object(
|
||||
arguments.into_iter().collect(),
|
||||
))
|
||||
.map_err(|err| McpError::invalid_params(err.to_string(), None))?,
|
||||
None => {
|
||||
return Err(McpError::invalid_params(
|
||||
"missing arguments for echo tool",
|
||||
None,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let env_snapshot: HashMap<String, String> = std::env::vars().collect();
|
||||
let structured_content = json!({
|
||||
"echo": format!("ECHOING: {}", args.message),
|
||||
"env": env_snapshot.get("MCP_TEST_VALUE"),
|
||||
});
|
||||
|
||||
Ok(CallToolResult {
|
||||
content: Vec::new(),
|
||||
structured_content: Some(structured_content),
|
||||
is_error: Some(false),
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
other => Err(McpError::invalid_params(
|
||||
format!("unknown tool: {other}"),
|
||||
None,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_bind_addr() -> Result<SocketAddr, Box<dyn std::error::Error>> {
|
||||
let default_addr = "127.0.0.1:3920";
|
||||
let bind_addr = std::env::var("MCP_STREAMABLE_HTTP_BIND_ADDR")
|
||||
.or_else(|_| std::env::var("BIND_ADDR"))
|
||||
.unwrap_or_else(|_| default_addr.to_string());
|
||||
Ok(bind_addr.parse()?)
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let bind_addr = parse_bind_addr()?;
|
||||
let listener = match tokio::net::TcpListener::bind(&bind_addr).await {
|
||||
Ok(listener) => listener,
|
||||
Err(err) if err.kind() == ErrorKind::PermissionDenied => {
|
||||
eprintln!(
|
||||
"failed to bind to {bind_addr}: {err}. make sure the process has network access"
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
eprintln!("starting rmcp streamable http test server on http://{bind_addr}/mcp");
|
||||
|
||||
let router = Router::new().nest_service(
|
||||
"/mcp",
|
||||
StreamableHttpService::new(
|
||||
|| Ok(TestToolServer::new()),
|
||||
Arc::new(LocalSessionManager::default()),
|
||||
StreamableHttpServerConfig::default(),
|
||||
),
|
||||
);
|
||||
|
||||
axum::serve(listener, router).await?;
|
||||
task::yield_now().await;
|
||||
Ok(())
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user