mirror of
https://github.com/openai/codex.git
synced 2026-02-02 06:57:03 +00:00
Compare commits
1 Commits
exec-run-a
...
re-status
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab5a129a7d |
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
@@ -27,26 +27,12 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
# Run all tasks using workspace filters
|
||||
|
||||
- name: Stage npm package
|
||||
- name: Ensure staging a release works.
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CODEX_VERSION=0.40.0
|
||||
PACK_OUTPUT="${RUNNER_TEMP}/codex-npm.tgz"
|
||||
python3 ./codex-cli/scripts/build_npm_package.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--pack-output "$PACK_OUTPUT"
|
||||
echo "PACK_OUTPUT=$PACK_OUTPUT" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ env.PACK_OUTPUT }}
|
||||
run: ./codex-cli/scripts/stage_release.sh
|
||||
|
||||
- name: Ensure root README.md contains only ASCII and certain Unicode code points
|
||||
run: ./scripts/asciicheck.py README.md
|
||||
|
||||
6
.github/workflows/rust-ci.yml
vendored
6
.github/workflows/rust-ci.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -143,7 +143,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
52
.github/workflows/rust-release.yml
vendored
52
.github/workflows/rust-release.yml
vendored
@@ -77,7 +77,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -173,8 +173,6 @@ jobs:
|
||||
outputs:
|
||||
version: ${{ steps.release_name.outputs.name }}
|
||||
tag: ${{ github.ref_name }}
|
||||
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -195,37 +193,21 @@ jobs:
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
echo "name=${version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Determine npm publish settings
|
||||
id: npm_publish_settings
|
||||
env:
|
||||
VERSION: ${{ steps.release_name.outputs.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${VERSION}"
|
||||
|
||||
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=alpha" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
python3 codex-cli/scripts/stage_rust_release.py \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
--tmp "${TMP_DIR}"
|
||||
mkdir -p dist/npm
|
||||
# Produce an npm-ready tarball using `npm pack` and store it in dist/npm.
|
||||
# We then rename it to a stable name used by our publishing script.
|
||||
(cd "$TMP_DIR" && npm pack --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
mv "${GITHUB_WORKSPACE}"/dist/npm/*.tgz \
|
||||
"${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
@@ -248,8 +230,8 @@ jobs:
|
||||
# July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
|
||||
# npm docs: https://docs.npmjs.com/trusted-publishers
|
||||
publish-npm:
|
||||
# Publish to npm for stable releases and alpha pre-releases with numeric suffixes.
|
||||
if: ${{ needs.release.outputs.should_publish_npm == 'true' }}
|
||||
# Skip this step for pre-releases (alpha/beta).
|
||||
if: ${{ !contains(needs.release.outputs.version, '-') }}
|
||||
name: publish-npm
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
@@ -284,17 +266,7 @@ jobs:
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
env:
|
||||
VERSION: ${{ needs.release.outputs.version }}
|
||||
NPM_TAG: ${{ needs.release.outputs.npm_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
run: npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ needs.release.outputs.version }}.tgz"
|
||||
|
||||
update-branch:
|
||||
name: Update latest-alpha-cli branch
|
||||
|
||||
8
codex-cli/.gitignore
vendored
8
codex-cli/.gitignore
vendored
@@ -1 +1,7 @@
|
||||
/vendor/
|
||||
# Added by ./scripts/install_native_deps.sh
|
||||
/bin/codex-aarch64-apple-darwin
|
||||
/bin/codex-aarch64-unknown-linux-musl
|
||||
/bin/codex-linux-sandbox-arm64
|
||||
/bin/codex-linux-sandbox-x64
|
||||
/bin/codex-x86_64-apple-darwin
|
||||
/bin/codex-x86_64-unknown-linux-musl
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
// Unified entry point for the Codex CLI.
|
||||
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
@@ -41,10 +40,10 @@ switch (platform) {
|
||||
case "win32":
|
||||
switch (arch) {
|
||||
case "x64":
|
||||
targetTriple = "x86_64-pc-windows-msvc";
|
||||
targetTriple = "x86_64-pc-windows-msvc.exe";
|
||||
break;
|
||||
case "arm64":
|
||||
targetTriple = "aarch64-pc-windows-msvc";
|
||||
targetTriple = "aarch64-pc-windows-msvc.exe";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@@ -58,10 +57,7 @@ if (!targetTriple) {
|
||||
throw new Error(`Unsupported platform: ${platform} (${arch})`);
|
||||
}
|
||||
|
||||
const vendorRoot = path.join(__dirname, "..", "vendor");
|
||||
const archRoot = path.join(vendorRoot, targetTriple);
|
||||
const codexBinaryName = process.platform === "win32" ? "codex.exe" : "codex";
|
||||
const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
|
||||
|
||||
// Use an asynchronous spawn instead of spawnSync so that Node is able to
|
||||
// respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is
|
||||
@@ -70,6 +66,23 @@ const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
async function tryImport(moduleName) {
|
||||
try {
|
||||
// eslint-disable-next-line node/no-unsupported-features/es-syntax
|
||||
return await import(moduleName);
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveRgDir() {
|
||||
const ripgrep = await tryImport("@vscode/ripgrep");
|
||||
if (!ripgrep?.rgPath) {
|
||||
return null;
|
||||
}
|
||||
return path.dirname(ripgrep.rgPath);
|
||||
}
|
||||
|
||||
function getUpdatedPath(newDirs) {
|
||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||
const existingPath = process.env.PATH || "";
|
||||
@@ -81,9 +94,9 @@ function getUpdatedPath(newDirs) {
|
||||
}
|
||||
|
||||
const additionalDirs = [];
|
||||
const pathDir = path.join(archRoot, "path");
|
||||
if (existsSync(pathDir)) {
|
||||
additionalDirs.push(pathDir);
|
||||
const rgDir = await resolveRgDir();
|
||||
if (rgDir) {
|
||||
additionalDirs.push(rgDir);
|
||||
}
|
||||
const updatedPath = getUpdatedPath(additionalDirs);
|
||||
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
#!/usr/bin/env dotslash
|
||||
|
||||
{
|
||||
"name": "rg",
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"size": 1787248,
|
||||
"hash": "blake3",
|
||||
"digest": "8d9942032585ea8ee805937634238d9aee7b210069f4703c88fbe568e26fb78a",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"size": 2047405,
|
||||
"hash": "blake3",
|
||||
"digest": "0b670b8fa0a3df2762af2fc82cc4932f684ca4c02dbd1260d4f3133fd4b2a515",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-unknown-linux-gnu/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-unknown-linux-gnu.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"size": 2082672,
|
||||
"hash": "blake3",
|
||||
"digest": "e9b862fc8da3127f92791f0ff6a799504154ca9d36c98bf3e60a81c6b1f7289e",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"size": 2566310,
|
||||
"hash": "blake3",
|
||||
"digest": "f73cca4e54d78c31f832c7f6e2c0b4db8b04fa3eaa747915727d570893dbee76",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-unknown-linux-musl/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-unknown-linux-musl.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"size": 2058893,
|
||||
"hash": "blake3",
|
||||
"digest": "a8ce1a6fed4f8093ee997e57f33254e94b2cd18e26358b09db599c89882eadbd",
|
||||
"format": "zip",
|
||||
"path": "ripgrep-14.1.1-x86_64-pc-windows-msvc/rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"size": 1667740,
|
||||
"hash": "blake3",
|
||||
"digest": "47b971a8c4fca1d23a4e7c19bd4d88465ebc395598458133139406d3bf85f3fa",
|
||||
"format": "zip",
|
||||
"path": "rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/microsoft/ripgrep-prebuilt/releases/download/v13.0.0-13/ripgrep-v13.0.0-13-aarch64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
101
codex-cli/package-lock.json
generated
101
codex-cli/package-lock.json
generated
@@ -2,17 +2,118 @@
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/@vscode/ripgrep": {
|
||||
"version": "1.15.14",
|
||||
"resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.15.14.tgz",
|
||||
"integrity": "sha512-/G1UJPYlm+trBWQ6cMO3sv6b8D1+G16WaJH1/DSqw32JOVlzgZbLkDxRyzIpTpv30AcYGMkCf5tUqGlW6HbDWw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"proxy-from-env": "^1.1.0",
|
||||
"yauzl": "^2.9.2"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "7.1.4",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
|
||||
"integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pend": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
|
||||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/yauzl": {
|
||||
"version": "2.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "~0.2.3",
|
||||
"fd-slicer": "~1.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,11 +11,17 @@
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"vendor"
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,5 @@ Run the following:
|
||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/build_npm_package.py --release-version 0.6.0
|
||||
./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0
|
||||
```
|
||||
|
||||
Note this will create `./codex-cli/vendor/` as a side-effect.
|
||||
|
||||
@@ -1,269 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stage and optionally package the @openai/codex npm module."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
GITHUB_REPO = "openai/codex"
|
||||
|
||||
# The docs are not clear on what the expected value/format of
|
||||
# workflow/workflowName is:
|
||||
# https://cli.github.com/manual/gh_run_list
|
||||
WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
help="Version number to write to package.json inside the staged package.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version",
|
||||
help=(
|
||||
"Version to stage for npm release. When provided, the script also resolves the "
|
||||
"matching rust-release workflow unless --workflow-url is supplied."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help="Optional GitHub Actions workflow run URL used to download native binaries.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--staging-dir",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory to stage the package contents. Defaults to a new temporary directory "
|
||||
"if omitted. The directory must be empty when provided."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
dest="staging_dir",
|
||||
type=Path,
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pack-output",
|
||||
type=Path,
|
||||
help="Path where the generated npm tarball should be written.",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
version = args.version
|
||||
release_version = args.release_version
|
||||
if release_version:
|
||||
if version and version != release_version:
|
||||
raise RuntimeError("--version and --release-version must match when both are provided.")
|
||||
version = release_version
|
||||
|
||||
if not version:
|
||||
raise RuntimeError("Must specify --version or --release-version.")
|
||||
|
||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||
|
||||
try:
|
||||
stage_sources(staging_dir, version)
|
||||
|
||||
workflow_url = args.workflow_url
|
||||
resolved_head_sha: str | None = None
|
||||
if not workflow_url:
|
||||
if release_version:
|
||||
workflow = resolve_release_workflow(version)
|
||||
workflow_url = workflow["url"]
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
else:
|
||||
workflow_url = resolve_latest_alpha_workflow_url()
|
||||
elif release_version:
|
||||
try:
|
||||
workflow = resolve_release_workflow(version)
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
except Exception:
|
||||
resolved_head_sha = None
|
||||
|
||||
if release_version and resolved_head_sha:
|
||||
print(f"should `git checkout {resolved_head_sha}`")
|
||||
|
||||
if not workflow_url:
|
||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
||||
|
||||
install_native_binaries(staging_dir, workflow_url)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
if args.pack_output is not None:
|
||||
output_path = run_npm_pack(staging_dir, args.pack_output)
|
||||
print(f"npm pack output written to {output_path}")
|
||||
finally:
|
||||
if created_temp:
|
||||
# Preserve the staging directory for further inspection.
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
if staging_dir is not None:
|
||||
staging_dir = staging_dir.resolve()
|
||||
staging_dir.mkdir(parents=True, exist_ok=True)
|
||||
if any(staging_dir.iterdir()):
|
||||
raise RuntimeError(f"Staging directory {staging_dir} is not empty.")
|
||||
return staging_dir, False
|
||||
|
||||
temp_dir = Path(tempfile.mkdtemp(prefix="codex-npm-stage-"))
|
||||
return temp_dir, True
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str) -> None:
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
with open(CODEX_CLI_ROOT / "package.json", "r", encoding="utf-8") as fh:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
||||
json.dump(package_json, out, indent=2)
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str | None) -> None:
|
||||
cmd = ["./scripts/install_native_deps.py"]
|
||||
if workflow_url:
|
||||
cmd.extend(["--workflow-url", workflow_url])
|
||||
cmd.append(str(staging_dir))
|
||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
||||
|
||||
|
||||
def resolve_latest_alpha_workflow_url() -> str:
|
||||
version = determine_latest_alpha_version()
|
||||
workflow = resolve_release_workflow(version)
|
||||
return workflow["url"]
|
||||
|
||||
|
||||
def determine_latest_alpha_version() -> str:
|
||||
releases = list_releases()
|
||||
best_key: tuple[int, int, int, int] | None = None
|
||||
best_version: str | None = None
|
||||
pattern = re.compile(r"^rust-v(\d+)\.(\d+)\.(\d+)-alpha\.(\d+)$")
|
||||
for release in releases:
|
||||
tag = release.get("tag_name", "")
|
||||
match = pattern.match(tag)
|
||||
if not match:
|
||||
continue
|
||||
key = tuple(int(match.group(i)) for i in range(1, 5))
|
||||
if best_key is None or key > best_key:
|
||||
best_key = key
|
||||
best_version = (
|
||||
f"{match.group(1)}.{match.group(2)}.{match.group(3)}-alpha.{match.group(4)}"
|
||||
)
|
||||
|
||||
if best_version is None:
|
||||
raise RuntimeError("No alpha releases found when resolving workflow URL.")
|
||||
return best_version
|
||||
|
||||
|
||||
def list_releases() -> list[dict]:
|
||||
stdout = subprocess.check_output(
|
||||
["gh", "api", f"/repos/{GITHUB_REPO}/releases?per_page=100"],
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
releases = json.loads(stdout or "[]")
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Unable to parse releases JSON.") from exc
|
||||
if not isinstance(releases, list):
|
||||
raise RuntimeError("Unexpected response when listing releases.")
|
||||
return releases
|
||||
|
||||
|
||||
def resolve_release_workflow(version: str) -> dict:
|
||||
stdout = subprocess.check_output(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--workflow",
|
||||
WORKFLOW_NAME,
|
||||
"--jq",
|
||||
"first(.[])",
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
workflow = json.loads(stdout or "[]")
|
||||
if not workflow:
|
||||
raise RuntimeError(f"Unable to find rust-release workflow for version {version}.")
|
||||
return workflow
|
||||
|
||||
|
||||
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
||||
output_path = output_path.resolve()
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-npm-pack-") as pack_dir_str:
|
||||
pack_dir = Path(pack_dir_str)
|
||||
stdout = subprocess.check_output(
|
||||
["npm", "pack", "--json", "--pack-destination", str(pack_dir)],
|
||||
cwd=staging_dir,
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
pack_output = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Failed to parse npm pack output.") from exc
|
||||
|
||||
if not pack_output:
|
||||
raise RuntimeError("npm pack did not produce an output tarball.")
|
||||
|
||||
tarball_name = pack_output[0].get("filename") or pack_output[0].get("name")
|
||||
if not tarball_name:
|
||||
raise RuntimeError("Unable to determine npm pack output filename.")
|
||||
|
||||
tarball_path = pack_dir / tarball_name
|
||||
if not tarball_path.exists():
|
||||
raise RuntimeError(f"Expected npm pack output not found: {tarball_path}")
|
||||
|
||||
shutil.move(str(tarball_path), output_path)
|
||||
|
||||
return output_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
@@ -1,318 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
CODEX_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"aarch64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"aarch64-pc-windows-msvc",
|
||||
)
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||
("x86_64-apple-darwin", "macos-x86_64"),
|
||||
("aarch64-apple-darwin", "macos-aarch64"),
|
||||
("x86_64-pc-windows-msvc", "windows-x86_64"),
|
||||
("aarch64-pc-windows-msvc", "windows-aarch64"),
|
||||
]
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help=(
|
||||
"GitHub Actions workflow URL that produced the artifacts. Defaults to a "
|
||||
"known good run when omitted."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"root",
|
||||
nargs="?",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory containing package.json for the staged package. If omitted, the "
|
||||
"repository checkout is used."
|
||||
),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve()
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
workflow_url = DEFAULT_WORKFLOW_URL
|
||||
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_codex_binaries(artifacts_dir, vendor_dir, CODEX_TARGETS)
|
||||
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
def fetch_rg(
|
||||
vendor_dir: Path,
|
||||
targets: Sequence[str] | None = None,
|
||||
*,
|
||||
manifest_path: Path,
|
||||
) -> list[Path]:
|
||||
"""Download ripgrep binaries described by the DotSlash manifest."""
|
||||
|
||||
if targets is None:
|
||||
targets = DEFAULT_RG_TARGETS
|
||||
|
||||
if not manifest_path.exists():
|
||||
raise FileNotFoundError(f"DotSlash manifest not found: {manifest_path}")
|
||||
|
||||
manifest = _load_manifest(manifest_path)
|
||||
platforms = manifest.get("platforms", {})
|
||||
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
task_configs: list[tuple[str, str, dict]] = []
|
||||
for target in targets:
|
||||
platform_key = RG_TARGET_TO_PLATFORM.get(target)
|
||||
if platform_key is None:
|
||||
raise ValueError(f"Unsupported ripgrep target '{target}'.")
|
||||
|
||||
platform_info = platforms.get(platform_key)
|
||||
if platform_info is None:
|
||||
raise RuntimeError(f"Platform '{platform_key}' not found in manifest {manifest_path}.")
|
||||
|
||||
task_configs.append((target, platform_key, platform_info))
|
||||
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(
|
||||
_fetch_single_rg,
|
||||
vendor_dir,
|
||||
target,
|
||||
platform_key,
|
||||
platform_info,
|
||||
manifest_path,
|
||||
): target
|
||||
for target, platform_key, platform_info in task_configs
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
cmd = [
|
||||
"gh",
|
||||
"run",
|
||||
"download",
|
||||
"--dir",
|
||||
str(dest_dir),
|
||||
"--repo",
|
||||
"openai/codex",
|
||||
workflow_id,
|
||||
]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def install_codex_binaries(
|
||||
artifacts_dir: Path, vendor_dir: Path, targets: Iterable[str]
|
||||
) -> list[Path]:
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(_install_single_codex_binary, artifacts_dir, vendor_dir, target): target
|
||||
for target in targets
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: str) -> Path:
|
||||
artifact_subdir = artifacts_dir / target
|
||||
archive_name = _archive_name_for_target(target)
|
||||
archive_path = artifact_subdir / archive_name
|
||||
if not archive_path.exists():
|
||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||
|
||||
dest_dir = vendor_dir / target / "codex"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
binary_name = "codex.exe" if "windows" in target else "codex"
|
||||
dest = dest_dir / binary_name
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(archive_path, "zst", None, dest)
|
||||
if "windows" not in target:
|
||||
dest.chmod(0o755)
|
||||
return dest
|
||||
|
||||
|
||||
def _archive_name_for_target(target: str) -> str:
|
||||
if "windows" in target:
|
||||
return f"codex-{target}.exe.zst"
|
||||
return f"codex-{target}.zst"
|
||||
|
||||
|
||||
def _fetch_single_rg(
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
platform_key: str,
|
||||
platform_info: dict,
|
||||
manifest_path: Path,
|
||||
) -> Path:
|
||||
providers = platform_info.get("providers", [])
|
||||
if not providers:
|
||||
raise RuntimeError(f"No providers listed for platform '{platform_key}' in {manifest_path}.")
|
||||
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
is_windows = platform_key.startswith("win")
|
||||
binary_name = "rg.exe" if is_windows else "rg"
|
||||
dest = dest_dir / binary_name
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir_str:
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
_download_file(url, download_path)
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
|
||||
return dest
|
||||
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
def extract_archive(
|
||||
archive_path: Path,
|
||||
archive_format: str,
|
||||
archive_member: str | None,
|
||||
dest: Path,
|
||||
) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if archive_format == "zst":
|
||||
output_path = archive_path.parent / dest.name
|
||||
subprocess.check_call(
|
||||
["zstd", "-f", "-d", str(archive_path), "-o", str(output_path)]
|
||||
)
|
||||
shutil.move(str(output_path), dest)
|
||||
return
|
||||
|
||||
if archive_format == "tar.gz":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for tar.gz archive in DotSlash manifest.")
|
||||
with tarfile.open(archive_path, "r:gz") as tar:
|
||||
try:
|
||||
member = tar.getmember(archive_member)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
tar.extract(member, path=archive_path.parent, filter="data")
|
||||
extracted = archive_path.parent / archive_member
|
||||
shutil.move(str(extracted), dest)
|
||||
return
|
||||
|
||||
if archive_format == "zip":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for zip archive in DotSlash manifest.")
|
||||
with zipfile.ZipFile(archive_path) as archive:
|
||||
try:
|
||||
with archive.open(archive_member) as src, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(src, out)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
return
|
||||
|
||||
raise RuntimeError(f"Unsupported archive format '{archive_format}'.")
|
||||
|
||||
|
||||
def _load_manifest(manifest_path: Path) -> dict:
|
||||
cmd = ["dotslash", "--", "parse", str(manifest_path)]
|
||||
stdout = subprocess.check_output(cmd, text=True)
|
||||
try:
|
||||
manifest = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError(f"Invalid DotSlash manifest output from {manifest_path}.") from exc
|
||||
|
||||
if not isinstance(manifest, dict):
|
||||
raise RuntimeError(
|
||||
f"Unexpected DotSlash manifest structure for {manifest_path}: {type(manifest)!r}"
|
||||
)
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
94
codex-cli/scripts/install_native_deps.sh
Executable file
94
codex-cli/scripts/install_native_deps.sh
Executable file
@@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Install native runtime dependencies for codex-cli.
|
||||
#
|
||||
# Usage
|
||||
# install_native_deps.sh [--workflow-url URL] [CODEX_CLI_ROOT]
|
||||
#
|
||||
# The optional RELEASE_ROOT is the path that contains package.json. Omitting
|
||||
# it installs the binaries into the repository's own bin/ folder to support
|
||||
# local development.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ------------------
|
||||
# Parse arguments
|
||||
# ------------------
|
||||
|
||||
CODEX_CLI_ROOT=""
|
||||
|
||||
# Until we start publishing stable GitHub releases, we have to grab the binaries
|
||||
# from the GitHub Action that created them. Update the URL below to point to the
|
||||
# appropriate workflow run:
|
||||
WORKFLOW_URL="https://github.com/openai/codex/actions/runs/17417194663" # rust-v0.28.0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
if [ -n "$1" ]; then
|
||||
WORKFLOW_URL="$1"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$CODEX_CLI_ROOT" ]]; then
|
||||
CODEX_CLI_ROOT="$1"
|
||||
else
|
||||
echo "Unexpected argument: $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Determine where the binaries should be installed.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -n "$CODEX_CLI_ROOT" ]; then
|
||||
# The caller supplied a release root directory.
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
else
|
||||
# No argument; fall back to the repo’s own bin directory.
|
||||
# Resolve the path of this script, then walk up to the repo root.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
fi
|
||||
|
||||
# Make sure the destination directory exists.
|
||||
mkdir -p "$BIN_DIR"
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Download and decompress the artifacts from the GitHub Actions workflow.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
WORKFLOW_ID="${WORKFLOW_URL##*/}"
|
||||
|
||||
ARTIFACTS_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$ARTIFACTS_DIR"' EXIT
|
||||
|
||||
# NB: The GitHub CLI `gh` must be installed and authenticated.
|
||||
gh run download --dir "$ARTIFACTS_DIR" --repo openai/codex "$WORKFLOW_ID"
|
||||
|
||||
# x64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-unknown-linux-musl/codex-x86_64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-unknown-linux-musl"
|
||||
# ARM64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-unknown-linux-musl/codex-aarch64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-unknown-linux-musl"
|
||||
# x64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-apple-darwin/codex-x86_64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-apple-darwin"
|
||||
# ARM64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-apple-darwin/codex-aarch64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-apple-darwin"
|
||||
# x64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-pc-windows-msvc/codex-x86_64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-pc-windows-msvc.exe"
|
||||
# ARM64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-pc-windows-msvc/codex-aarch64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-pc-windows-msvc.exe"
|
||||
|
||||
echo "Installed native dependencies into $BIN_DIR"
|
||||
120
codex-cli/scripts/stage_release.sh
Executable file
120
codex-cli/scripts/stage_release.sh
Executable file
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/env bash
|
||||
# -----------------------------------------------------------------------------
|
||||
# stage_release.sh
|
||||
# -----------------------------------------------------------------------------
|
||||
# Stages an npm release for @openai/codex.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# --tmp <dir> : Use <dir> instead of a freshly created temp directory.
|
||||
# -h|--help : Print usage.
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Helper - usage / flag parsing
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: $(basename "$0") [--tmp DIR] [--version VERSION]
|
||||
|
||||
Options
|
||||
--tmp DIR Use DIR to stage the release (defaults to a fresh mktemp dir)
|
||||
--version Specify the version to release (defaults to a timestamp-based version)
|
||||
-h, --help Show this help
|
||||
|
||||
Legacy positional argument: the first non-flag argument is still interpreted
|
||||
as the temporary directory (for backwards compatibility) but is deprecated.
|
||||
EOF
|
||||
exit "${1:-0}"
|
||||
}
|
||||
|
||||
TMPDIR=""
|
||||
# Default to a timestamp-based version (keep same scheme as before)
|
||||
VERSION="$(printf '0.1.%d' "$(date +%y%m%d%H%M)")"
|
||||
WORKFLOW_URL=""
|
||||
|
||||
# Manual flag parser - Bash getopts does not handle GNU long options well.
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--tmp)
|
||||
shift || { echo "--tmp requires an argument"; usage 1; }
|
||||
TMPDIR="$1"
|
||||
;;
|
||||
--tmp=*)
|
||||
TMPDIR="${1#*=}"
|
||||
;;
|
||||
--version)
|
||||
shift || { echo "--version requires an argument"; usage 1; }
|
||||
VERSION="$1"
|
||||
;;
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
WORKFLOW_URL="$1"
|
||||
;;
|
||||
-h|--help)
|
||||
usage 0
|
||||
;;
|
||||
--*)
|
||||
echo "Unknown option: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected extra argument: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Fallback when the caller did not specify a directory.
|
||||
# If no directory was specified create a fresh temporary one.
|
||||
if [[ -z "$TMPDIR" ]]; then
|
||||
TMPDIR="$(mktemp -d)"
|
||||
fi
|
||||
|
||||
# Ensure the directory exists, then resolve to an absolute path.
|
||||
mkdir -p "$TMPDIR"
|
||||
TMPDIR="$(cd "$TMPDIR" && pwd)"
|
||||
|
||||
# Main build logic
|
||||
|
||||
echo "Staging release in $TMPDIR"
|
||||
|
||||
# The script lives in codex-cli/scripts/ - change into codex-cli root so that
|
||||
# relative paths keep working.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
pushd "$CODEX_CLI_ROOT" >/dev/null
|
||||
|
||||
# 1. Build the JS artifacts ---------------------------------------------------
|
||||
|
||||
# Paths inside the staged package
|
||||
mkdir -p "$TMPDIR/bin"
|
||||
|
||||
cp -r bin/codex.js "$TMPDIR/bin/codex.js"
|
||||
cp ../README.md "$TMPDIR" || true # README is one level up - ignore if missing
|
||||
|
||||
# Modify package.json - bump version and optionally add the native directory to
|
||||
# the files array so that the binaries are published to npm.
|
||||
|
||||
jq --arg version "$VERSION" \
|
||||
'.version = $version' \
|
||||
package.json > "$TMPDIR/package.json"
|
||||
|
||||
# 2. Native runtime deps (sandbox plus optional Rust binaries)
|
||||
|
||||
./scripts/install_native_deps.sh --workflow-url "$WORKFLOW_URL" "$TMPDIR"
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
echo "Staged version $VERSION for release in $TMPDIR"
|
||||
|
||||
echo "Verify the CLI:"
|
||||
echo " node ${TMPDIR}/bin/codex.js --version"
|
||||
echo " node ${TMPDIR}/bin/codex.js --help"
|
||||
|
||||
# Print final hint for convenience
|
||||
echo "Next: cd \"$TMPDIR\" && npm publish"
|
||||
70
codex-cli/scripts/stage_rust_release.py
Executable file
70
codex-cli/scripts/stage_rust_release.py
Executable file
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Stage a release for the npm module.
|
||||
|
||||
Run this after the GitHub Release has been created and use
|
||||
`--release-version` to specify the version to release.
|
||||
|
||||
Optionally pass `--tmp` to control the temporary staging directory that will be
|
||||
forwarded to stage_release.sh.
|
||||
"""
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version", required=True, help="Version to release, e.g., 0.3.0"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
help="Optional path to stage the npm package; forwarded to stage_release.sh",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
version = args.release_version
|
||||
|
||||
gh_run = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--jq",
|
||||
'first(.[] | select(.workflowName == "rust-release"))',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
gh_run.check_returncode()
|
||||
workflow = json.loads(gh_run.stdout)
|
||||
sha = workflow["headSha"]
|
||||
|
||||
print(f"should `git checkout {sha}`")
|
||||
|
||||
current_dir = Path(__file__).parent.resolve()
|
||||
cmd = [
|
||||
str(current_dir / "stage_release.sh"),
|
||||
"--version",
|
||||
version,
|
||||
"--workflow-url",
|
||||
workflow["url"],
|
||||
]
|
||||
if args.tmp:
|
||||
cmd.extend(["--tmp", args.tmp])
|
||||
|
||||
stage_release = subprocess.run(cmd)
|
||||
stage_release.check_returncode()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
84
codex-rs/Cargo.lock
generated
84
codex-rs/Cargo.lock
generated
@@ -78,6 +78,12 @@ version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||
|
||||
[[package]]
|
||||
name = "android-tzdata"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
|
||||
|
||||
[[package]]
|
||||
name = "android_system_properties"
|
||||
version = "0.1.5"
|
||||
@@ -490,16 +496,17 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.42"
|
||||
version = "0.4.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2"
|
||||
checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"js-sys",
|
||||
"num-traits",
|
||||
"serde",
|
||||
"wasm-bindgen",
|
||||
"windows-link 0.2.0",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -588,6 +595,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"once_cell",
|
||||
"pretty_assertions",
|
||||
"similar",
|
||||
"tempfile",
|
||||
@@ -656,10 +664,13 @@ dependencies = [
|
||||
name = "codex-common"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"clap",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
"serde",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"toml",
|
||||
]
|
||||
|
||||
@@ -948,6 +959,7 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"mcp-types",
|
||||
"once_cell",
|
||||
"path-clean",
|
||||
"pathdiff",
|
||||
"pretty_assertions",
|
||||
@@ -969,21 +981,11 @@ dependencies = [
|
||||
"tracing-appender",
|
||||
"tracing-subscriber",
|
||||
"unicode-segmentation",
|
||||
"unicode-width 0.2.1",
|
||||
"unicode-width 0.1.14",
|
||||
"url",
|
||||
"vt100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-readiness"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"thiserror 2.0.16",
|
||||
"time",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-eyre"
|
||||
version = "0.6.5"
|
||||
@@ -1279,12 +1281,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
version = "0.5.4"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071"
|
||||
checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
|
||||
dependencies = [
|
||||
"powerfmt",
|
||||
"serde_core",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2684,9 +2686,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.28"
|
||||
version = "0.4.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
|
||||
[[package]]
|
||||
name = "logos"
|
||||
@@ -3927,9 +3929,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.226"
|
||||
version = "1.0.224"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd"
|
||||
checksum = "6aaeb1e94f53b16384af593c71e20b095e958dab1d26939c1b70645c5cfbcc0b"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
@@ -3937,18 +3939,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.226"
|
||||
version = "1.0.224"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4"
|
||||
checksum = "32f39390fa6346e24defbcdd3d9544ba8a19985d0af74df8501fbfe9a64341ab"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.226"
|
||||
version = "1.0.224"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33"
|
||||
checksum = "87ff78ab5e8561c9a675bfc1785cb07ae721f0ee53329a595cefd8c04c2ac4e0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4451,15 +4453,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.23.0"
|
||||
version = "3.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16"
|
||||
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
|
||||
dependencies = [
|
||||
"fastrand",
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4583,9 +4585,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.44"
|
||||
version = "0.3.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
|
||||
checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
|
||||
dependencies = [
|
||||
"deranged",
|
||||
"itoa",
|
||||
@@ -4600,15 +4602,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "time-core"
|
||||
version = "0.1.6"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
|
||||
checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
|
||||
|
||||
[[package]]
|
||||
name = "time-macros"
|
||||
version = "0.2.24"
|
||||
version = "0.2.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
|
||||
checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
|
||||
dependencies = [
|
||||
"num-conv",
|
||||
"time-core",
|
||||
@@ -5336,7 +5338,7 @@ checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
|
||||
dependencies = [
|
||||
"windows-implement",
|
||||
"windows-interface",
|
||||
"windows-link 0.1.3",
|
||||
"windows-link",
|
||||
"windows-result",
|
||||
"windows-strings",
|
||||
]
|
||||
@@ -5369,19 +5371,13 @@ version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65"
|
||||
|
||||
[[package]]
|
||||
name = "windows-registry"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
|
||||
dependencies = [
|
||||
"windows-link 0.1.3",
|
||||
"windows-link",
|
||||
"windows-result",
|
||||
"windows-strings",
|
||||
]
|
||||
@@ -5392,7 +5388,7 @@ version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
|
||||
dependencies = [
|
||||
"windows-link 0.1.3",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5401,7 +5397,7 @@ version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
|
||||
dependencies = [
|
||||
"windows-link 0.1.3",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -19,7 +19,6 @@ members = [
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"tui",
|
||||
"utils/readiness",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -50,7 +49,6 @@ codex-ollama = { path = "ollama" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -67,7 +65,7 @@ async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chrono = "0.4.42"
|
||||
chrono = "0.4.40"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
@@ -94,6 +92,7 @@ maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
once_cell = "1"
|
||||
openssl-sys = "*"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
@@ -122,7 +121,7 @@ strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
tempfile = "3.13.0"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
time = "0.3"
|
||||
@@ -140,7 +139,7 @@ tree-sitter = "0.25.9"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
ts-rs = "11"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
unicode-width = "0.1"
|
||||
url = "2"
|
||||
urlencoding = "2.1"
|
||||
uuid = "1"
|
||||
@@ -192,7 +191,7 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness"]
|
||||
ignored = ["openssl-sys"]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
|
||||
@@ -20,6 +20,7 @@ similar = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
|
||||
@@ -6,10 +6,10 @@ use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::str::Utf8Error;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use once_cell::sync::Lazy;
|
||||
pub use parser::Hunk;
|
||||
pub use parser::ParseError;
|
||||
use parser::ParseError::*;
|
||||
@@ -351,7 +351,7 @@ fn extract_apply_patch_from_bash(
|
||||
// also run an arbitrary query against the AST. This is useful for understanding
|
||||
// how tree-sitter parses the script and whether the query syntax is correct. Be sure
|
||||
// to test both positive and negative cases.
|
||||
static APPLY_PATCH_QUERY: LazyLock<Query> = LazyLock::new(|| {
|
||||
static APPLY_PATCH_QUERY: Lazy<Query> = Lazy::new(|| {
|
||||
let language = BASH.into();
|
||||
#[expect(clippy::expect_used)]
|
||||
Query::new(
|
||||
|
||||
@@ -7,11 +7,14 @@ version = { workspace = true }
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "wrap_help"], optional = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
toml = { workspace = true, optional = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
[features]
|
||||
# Separate feature so that `clap` is not a mandatory dependency.
|
||||
|
||||
@@ -34,3 +34,5 @@ pub mod model_presets;
|
||||
// Shared approval presets (AskForApproval + Sandbox) used by TUI and MCP server
|
||||
// Not to be confused with AskForApproval, which we should probably rename to EscalationPolicy.
|
||||
pub mod approval_presets;
|
||||
// Readiness flag with token-based authorization and async waiting (Tokio).
|
||||
pub mod readiness;
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::protocol::FileChange;
|
||||
use crate::protocol::ReviewDecision;
|
||||
use crate::safety::SafetyCheck;
|
||||
use crate::safety::assess_patch_safety;
|
||||
use codex_apply_patch::ApplyPatchAction;
|
||||
use codex_apply_patch::ApplyPatchFileChange;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -16,7 +17,7 @@ pub(crate) enum InternalApplyPatchInvocation {
|
||||
/// The `apply_patch` call was handled programmatically, without any sort
|
||||
/// of sandbox, because the user explicitly approved it. This is the
|
||||
/// result to use with the `shell` function call that contained `apply_patch`.
|
||||
Output(Result<String, FunctionCallError>),
|
||||
Output(ResponseInputItem),
|
||||
|
||||
/// The `apply_patch` call was approved, either automatically because it
|
||||
/// appears that it should be allowed based on the user's sandbox policy
|
||||
@@ -32,6 +33,12 @@ pub(crate) struct ApplyPatchExec {
|
||||
pub(crate) user_explicitly_approved_this_action: bool,
|
||||
}
|
||||
|
||||
impl From<ResponseInputItem> for InternalApplyPatchInvocation {
|
||||
fn from(item: ResponseInputItem) -> Self {
|
||||
InternalApplyPatchInvocation::Output(item)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn apply_patch(
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
@@ -70,15 +77,25 @@ pub(crate) async fn apply_patch(
|
||||
})
|
||||
}
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
InternalApplyPatchInvocation::Output(Err(FunctionCallError::RespondToModel(
|
||||
"patch rejected by user".to_string(),
|
||||
)))
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.to_owned(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "patch rejected by user".to_string(),
|
||||
success: Some(false),
|
||||
},
|
||||
}
|
||||
.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
SafetyCheck::Reject { reason } => InternalApplyPatchInvocation::Output(Err(
|
||||
FunctionCallError::RespondToModel(format!("patch rejected: {reason}")),
|
||||
)),
|
||||
SafetyCheck::Reject { reason } => ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.to_owned(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("patch rejected: {reason}"),
|
||||
success: Some(false),
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -415,6 +415,9 @@ struct SseEvent {
|
||||
delta: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseCreated {}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseCompleted {
|
||||
id: String,
|
||||
|
||||
@@ -10,7 +10,6 @@ use std::time::Duration;
|
||||
use crate::AuthManager;
|
||||
use crate::client_common::REVIEW_PROMPT;
|
||||
use crate::event_mapping::map_response_item_to_event_messages;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::review_format::format_review_findings_block;
|
||||
use crate::user_notification::UserNotifier;
|
||||
use async_channel::Receiver;
|
||||
@@ -2233,41 +2232,18 @@ async fn handle_response_item(
|
||||
..
|
||||
} => {
|
||||
info!("FunctionCall: {name}({arguments})");
|
||||
if let Some((server, tool_name)) = sess.mcp_connection_manager.parse_tool_name(&name) {
|
||||
let resp = handle_mcp_tool_call(
|
||||
sess,
|
||||
sub_id,
|
||||
call_id.clone(),
|
||||
server,
|
||||
tool_name,
|
||||
arguments,
|
||||
)
|
||||
.await;
|
||||
Some(resp)
|
||||
} else {
|
||||
let result = handle_function_call(
|
||||
Some(
|
||||
handle_function_call(
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
name,
|
||||
arguments,
|
||||
call_id.clone(),
|
||||
call_id,
|
||||
)
|
||||
.await;
|
||||
|
||||
let output = match result {
|
||||
Ok(content) => FunctionCallOutputPayload {
|
||||
content,
|
||||
success: Some(true),
|
||||
},
|
||||
Err(FunctionCallError::RespondToModel(msg)) => FunctionCallOutputPayload {
|
||||
content: msg,
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
Some(ResponseInputItem::FunctionCallOutput { call_id, output })
|
||||
}
|
||||
.await,
|
||||
)
|
||||
}
|
||||
ResponseItem::LocalShellCall {
|
||||
id,
|
||||
@@ -2300,32 +2276,17 @@ async fn handle_response_item(
|
||||
};
|
||||
|
||||
let exec_params = to_exec_params(params, turn_context);
|
||||
{
|
||||
let result = handle_container_exec_with_params(
|
||||
Some(
|
||||
handle_container_exec_with_params(
|
||||
exec_params,
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
effective_call_id.clone(),
|
||||
effective_call_id,
|
||||
)
|
||||
.await;
|
||||
|
||||
let output = match result {
|
||||
Ok(content) => FunctionCallOutputPayload {
|
||||
content,
|
||||
success: Some(true),
|
||||
},
|
||||
Err(FunctionCallError::RespondToModel(msg)) => FunctionCallOutputPayload {
|
||||
content: msg,
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
Some(ResponseInputItem::FunctionCallOutput {
|
||||
call_id: effective_call_id,
|
||||
output,
|
||||
})
|
||||
}
|
||||
.await,
|
||||
)
|
||||
}
|
||||
ResponseItem::CustomToolCall {
|
||||
id: _,
|
||||
@@ -2333,24 +2294,18 @@ async fn handle_response_item(
|
||||
name,
|
||||
input,
|
||||
status: _,
|
||||
} => {
|
||||
let result = handle_custom_tool_call(
|
||||
} => Some(
|
||||
handle_custom_tool_call(
|
||||
sess,
|
||||
turn_context,
|
||||
turn_diff_tracker,
|
||||
sub_id.to_string(),
|
||||
name,
|
||||
input,
|
||||
call_id.clone(),
|
||||
call_id,
|
||||
)
|
||||
.await;
|
||||
|
||||
let output = match result {
|
||||
Ok(content) => content,
|
||||
Err(FunctionCallError::RespondToModel(msg)) => msg,
|
||||
};
|
||||
Some(ResponseInputItem::CustomToolCallOutput { call_id, output })
|
||||
}
|
||||
.await,
|
||||
),
|
||||
ResponseItem::FunctionCallOutput { .. } => {
|
||||
debug!("unexpected FunctionCallOutput from stream");
|
||||
None
|
||||
@@ -2387,17 +2342,22 @@ async fn handle_response_item(
|
||||
|
||||
async fn handle_unified_exec_tool_call(
|
||||
sess: &Session,
|
||||
call_id: String,
|
||||
session_id: Option<String>,
|
||||
arguments: Vec<String>,
|
||||
timeout_ms: Option<u64>,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
) -> ResponseInputItem {
|
||||
let parsed_session_id = if let Some(session_id) = session_id {
|
||||
match session_id.parse::<i32>() {
|
||||
Ok(parsed) => Some(parsed),
|
||||
Err(output) => {
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"invalid session_id: {session_id} due to error {output:?}"
|
||||
)));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("invalid session_id: {session_id} due to error {output}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -2410,29 +2370,40 @@ async fn handle_unified_exec_tool_call(
|
||||
timeout_ms,
|
||||
};
|
||||
|
||||
let value = sess
|
||||
.unified_exec_manager
|
||||
.handle_request(request)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("unified exec failed: {err:?}"))
|
||||
})?;
|
||||
let result = sess.unified_exec_manager.handle_request(request).await;
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct SerializedUnifiedExecResult {
|
||||
session_id: Option<String>,
|
||||
output: String,
|
||||
let output_payload = match result {
|
||||
Ok(value) => {
|
||||
#[derive(Serialize)]
|
||||
struct SerializedUnifiedExecResult<'a> {
|
||||
session_id: Option<String>,
|
||||
output: &'a str,
|
||||
}
|
||||
|
||||
match serde_json::to_string(&SerializedUnifiedExecResult {
|
||||
session_id: value.session_id.map(|id| id.to_string()),
|
||||
output: &value.output,
|
||||
}) {
|
||||
Ok(serialized) => FunctionCallOutputPayload {
|
||||
content: serialized,
|
||||
success: Some(true),
|
||||
},
|
||||
Err(err) => FunctionCallOutputPayload {
|
||||
content: format!("failed to serialize unified exec output: {err}"),
|
||||
success: Some(false),
|
||||
},
|
||||
}
|
||||
}
|
||||
Err(err) => FunctionCallOutputPayload {
|
||||
content: format!("unified exec failed: {err}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: output_payload,
|
||||
}
|
||||
|
||||
serde_json::to_string(&SerializedUnifiedExecResult {
|
||||
session_id: value.session_id.map(|id| id.to_string()),
|
||||
output: value.output,
|
||||
})
|
||||
.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to serialize unified exec output: {err:?}"
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_function_call(
|
||||
@@ -2443,10 +2414,15 @@ async fn handle_function_call(
|
||||
name: String,
|
||||
arguments: String,
|
||||
call_id: String,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
) -> ResponseInputItem {
|
||||
match name.as_str() {
|
||||
"container.exec" | "shell" => {
|
||||
let params = parse_container_exec_arguments(arguments, turn_context, &call_id)?;
|
||||
let params = match parse_container_exec_arguments(arguments, turn_context, &call_id) {
|
||||
Ok(params) => params,
|
||||
Err(output) => {
|
||||
return *output;
|
||||
}
|
||||
};
|
||||
handle_container_exec_with_params(
|
||||
params,
|
||||
sess,
|
||||
@@ -2467,41 +2443,74 @@ async fn handle_function_call(
|
||||
timeout_ms: Option<u64>,
|
||||
}
|
||||
|
||||
let args: UnifiedExecArgs = serde_json::from_str(&arguments).map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to parse function arguments: {err:?}"
|
||||
))
|
||||
})?;
|
||||
let args = match serde_json::from_str::<UnifiedExecArgs>(&arguments) {
|
||||
Ok(args) => args,
|
||||
Err(err) => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("failed to parse function arguments: {err}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
handle_unified_exec_tool_call(sess, args.session_id, args.input, args.timeout_ms).await
|
||||
handle_unified_exec_tool_call(
|
||||
sess,
|
||||
call_id,
|
||||
args.session_id,
|
||||
args.input,
|
||||
args.timeout_ms,
|
||||
)
|
||||
.await
|
||||
}
|
||||
"view_image" => {
|
||||
#[derive(serde::Deserialize)]
|
||||
struct SeeImageArgs {
|
||||
path: String,
|
||||
}
|
||||
let args: SeeImageArgs = serde_json::from_str(&arguments).map_err(|e| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to parse function arguments: {e:?}"
|
||||
))
|
||||
})?;
|
||||
let args = match serde_json::from_str::<SeeImageArgs>(&arguments) {
|
||||
Ok(a) => a,
|
||||
Err(e) => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("failed to parse function arguments: {e}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
let abs = turn_context.resolve_path(Some(args.path));
|
||||
sess.inject_input(vec![InputItem::LocalImage { path: abs }])
|
||||
let output = match sess
|
||||
.inject_input(vec![InputItem::LocalImage { path: abs }])
|
||||
.await
|
||||
.map_err(|_| {
|
||||
FunctionCallError::RespondToModel(
|
||||
"unable to attach image (no active task)".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok("attached local image path".to_string())
|
||||
{
|
||||
Ok(()) => FunctionCallOutputPayload {
|
||||
content: "attached local image path".to_string(),
|
||||
success: Some(true),
|
||||
},
|
||||
Err(_) => FunctionCallOutputPayload {
|
||||
content: "unable to attach image (no active task)".to_string(),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
ResponseInputItem::FunctionCallOutput { call_id, output }
|
||||
}
|
||||
"apply_patch" => {
|
||||
let args: ApplyPatchToolArgs = serde_json::from_str(&arguments).map_err(|e| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to parse function arguments: {e:?}"
|
||||
))
|
||||
})?;
|
||||
let args = match serde_json::from_str::<ApplyPatchToolArgs>(&arguments) {
|
||||
Ok(a) => a,
|
||||
Err(e) => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("failed to parse function arguments: {e}"),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
let exec_params = ExecParams {
|
||||
command: vec!["apply_patch".to_string(), args.input.clone()],
|
||||
cwd: turn_context.cwd.clone(),
|
||||
@@ -2523,39 +2532,69 @@ async fn handle_function_call(
|
||||
"update_plan" => handle_update_plan(sess, arguments, sub_id, call_id).await,
|
||||
EXEC_COMMAND_TOOL_NAME => {
|
||||
// TODO(mbolin): Sandbox check.
|
||||
let exec_params: ExecCommandParams = serde_json::from_str(&arguments).map_err(|e| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to parse function arguments: {e:?}"
|
||||
))
|
||||
})?;
|
||||
let exec_params = match serde_json::from_str::<ExecCommandParams>(&arguments) {
|
||||
Ok(params) => params,
|
||||
Err(e) => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("failed to parse function arguments: {e}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
let result = sess
|
||||
.session_manager
|
||||
.handle_exec_command_request(exec_params)
|
||||
.await;
|
||||
match result {
|
||||
Ok(output) => Ok(output.to_text_output()),
|
||||
Err(err) => Err(FunctionCallError::RespondToModel(err)),
|
||||
let function_call_output = crate::exec_command::result_into_payload(result);
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: function_call_output,
|
||||
}
|
||||
}
|
||||
WRITE_STDIN_TOOL_NAME => {
|
||||
let write_stdin_params =
|
||||
serde_json::from_str::<WriteStdinParams>(&arguments).map_err(|e| {
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"failed to parse function arguments: {e:?}"
|
||||
))
|
||||
})?;
|
||||
|
||||
let write_stdin_params = match serde_json::from_str::<WriteStdinParams>(&arguments) {
|
||||
Ok(params) => params,
|
||||
Err(e) => {
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("failed to parse function arguments: {e}"),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
let result = sess
|
||||
.session_manager
|
||||
.handle_write_stdin_request(write_stdin_params)
|
||||
.await
|
||||
.map_err(FunctionCallError::RespondToModel)?;
|
||||
|
||||
Ok(result.to_text_output())
|
||||
.await;
|
||||
let function_call_output: FunctionCallOutputPayload =
|
||||
crate::exec_command::result_into_payload(result);
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: function_call_output,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
match sess.mcp_connection_manager.parse_tool_name(&name) {
|
||||
Some((server, tool_name)) => {
|
||||
handle_mcp_tool_call(sess, &sub_id, call_id, server, tool_name, arguments).await
|
||||
}
|
||||
None => {
|
||||
// Unknown function: reply with structured failure so the model can adapt.
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("unsupported call: {name}"),
|
||||
success: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => Err(FunctionCallError::RespondToModel(format!(
|
||||
"unsupported call: {name}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2567,7 +2606,7 @@ async fn handle_custom_tool_call(
|
||||
name: String,
|
||||
input: String,
|
||||
call_id: String,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
) -> ResponseInputItem {
|
||||
info!("CustomToolCall: {name} {input}");
|
||||
match name.as_str() {
|
||||
"apply_patch" => {
|
||||
@@ -2579,8 +2618,7 @@ async fn handle_custom_tool_call(
|
||||
with_escalated_permissions: None,
|
||||
justification: None,
|
||||
};
|
||||
|
||||
handle_container_exec_with_params(
|
||||
let resp = handle_container_exec_with_params(
|
||||
exec_params,
|
||||
sess,
|
||||
turn_context,
|
||||
@@ -2588,13 +2626,26 @@ async fn handle_custom_tool_call(
|
||||
sub_id,
|
||||
call_id,
|
||||
)
|
||||
.await
|
||||
.await;
|
||||
|
||||
// Convert function-call style output into a custom tool call output
|
||||
match resp {
|
||||
ResponseInputItem::FunctionCallOutput { call_id, output } => {
|
||||
ResponseInputItem::CustomToolCallOutput {
|
||||
call_id,
|
||||
output: output.content,
|
||||
}
|
||||
}
|
||||
// Pass through if already a custom tool output or other variant
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
debug!("unexpected CustomToolCall from stream");
|
||||
Err(FunctionCallError::RespondToModel(format!(
|
||||
"unsupported custom tool call: {name}"
|
||||
)))
|
||||
ResponseInputItem::CustomToolCallOutput {
|
||||
call_id,
|
||||
output: format!("unsupported custom tool call: {name}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2613,13 +2664,23 @@ fn to_exec_params(params: ShellToolCallParams, turn_context: &TurnContext) -> Ex
|
||||
fn parse_container_exec_arguments(
|
||||
arguments: String,
|
||||
turn_context: &TurnContext,
|
||||
_call_id: &str,
|
||||
) -> Result<ExecParams, FunctionCallError> {
|
||||
serde_json::from_str::<ShellToolCallParams>(&arguments)
|
||||
.map(|p| to_exec_params(p, turn_context))
|
||||
.map_err(|e| {
|
||||
FunctionCallError::RespondToModel(format!("failed to parse function arguments: {e:?}"))
|
||||
})
|
||||
call_id: &str,
|
||||
) -> Result<ExecParams, Box<ResponseInputItem>> {
|
||||
// parse command
|
||||
match serde_json::from_str::<ShellToolCallParams>(&arguments) {
|
||||
Ok(shell_tool_call_params) => Ok(to_exec_params(shell_tool_call_params, turn_context)),
|
||||
Err(e) => {
|
||||
// allow model to re-sample
|
||||
let output = ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("failed to parse function arguments: {e}"),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
Err(Box::new(output))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ExecInvokeArgs<'a> {
|
||||
@@ -2656,14 +2717,20 @@ async fn handle_container_exec_with_params(
|
||||
turn_diff_tracker: &mut TurnDiffTracker,
|
||||
sub_id: String,
|
||||
call_id: String,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
) -> ResponseInputItem {
|
||||
if params.with_escalated_permissions.unwrap_or(false)
|
||||
&& !matches!(turn_context.approval_policy, AskForApproval::OnRequest)
|
||||
{
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"approval policy is {policy:?}; reject command — you should not ask for escalated permissions if the approval policy is {policy:?}",
|
||||
policy = turn_context.approval_policy
|
||||
)));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!(
|
||||
"approval policy is {policy:?}; reject command — you should not ask for escalated permissions if the approval policy is {policy:?}",
|
||||
policy = turn_context.approval_policy
|
||||
),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// check if this was a patch, and apply it if so
|
||||
@@ -2680,9 +2747,13 @@ async fn handle_container_exec_with_params(
|
||||
// It looks like an invocation of `apply_patch`, but we
|
||||
// could not resolve it into a patch that would apply
|
||||
// cleanly. Return to model for resample.
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"error: {parse_error:#?}"
|
||||
)));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("error: {parse_error:#}"),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
}
|
||||
MaybeApplyPatchVerified::ShellParseError(error) => {
|
||||
trace!("Failed to parse shell command, {error:?}");
|
||||
@@ -2700,9 +2771,13 @@ async fn handle_container_exec_with_params(
|
||||
.ok()
|
||||
.map(|p| p.to_string_lossy().to_string());
|
||||
let Some(path_to_codex) = path_to_codex else {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"failed to determine path to codex executable".to_string(),
|
||||
));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "failed to determine path to codex executable".to_string(),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
let params = ExecParams {
|
||||
@@ -2768,9 +2843,13 @@ async fn handle_container_exec_with_params(
|
||||
sess.add_approved_command(params.command.clone()).await;
|
||||
}
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"exec command rejected by user".to_string(),
|
||||
));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "exec command rejected by user".to_string(),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
// No sandboxing is applied because the user has given
|
||||
@@ -2780,9 +2859,13 @@ async fn handle_container_exec_with_params(
|
||||
SandboxType::None
|
||||
}
|
||||
SafetyCheck::Reject { reason } => {
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"exec command rejected: {reason:?}"
|
||||
)));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("exec command rejected: {reason}"),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2829,11 +2912,15 @@ async fn handle_container_exec_with_params(
|
||||
match output_result {
|
||||
Ok(output) => {
|
||||
let ExecToolCallOutput { exit_code, .. } = &output;
|
||||
|
||||
let is_success = *exit_code == 0;
|
||||
let content = format_exec_output(&output);
|
||||
if *exit_code == 0 {
|
||||
Ok(content)
|
||||
} else {
|
||||
Err(FunctionCallError::RespondToModel(content))
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content,
|
||||
success: Some(is_success),
|
||||
},
|
||||
}
|
||||
}
|
||||
Err(CodexErr::Sandbox(error)) => {
|
||||
@@ -2848,9 +2935,13 @@ async fn handle_container_exec_with_params(
|
||||
)
|
||||
.await
|
||||
}
|
||||
Err(e) => Err(FunctionCallError::RespondToModel(format!(
|
||||
"execution error: {e:?}"
|
||||
))),
|
||||
Err(e) => ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("execution error: {e}"),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2862,23 +2953,35 @@ async fn handle_sandbox_error(
|
||||
sandbox_type: SandboxType,
|
||||
sess: &Session,
|
||||
turn_context: &TurnContext,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
) -> ResponseInputItem {
|
||||
let call_id = exec_command_context.call_id.clone();
|
||||
let sub_id = exec_command_context.sub_id.clone();
|
||||
let cwd = exec_command_context.cwd.clone();
|
||||
|
||||
if let SandboxErr::Timeout { output } = &error {
|
||||
let content = format_exec_output(output);
|
||||
return Err(FunctionCallError::RespondToModel(content));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content,
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Early out if either the user never wants to be asked for approval, or
|
||||
// we're letting the model manage escalation requests. Otherwise, continue
|
||||
match turn_context.approval_policy {
|
||||
AskForApproval::Never | AskForApproval::OnRequest => {
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"failed in sandbox {sandbox_type:?} with execution error: {error:?}"
|
||||
)));
|
||||
return ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!(
|
||||
"failed in sandbox {sandbox_type:?} with execution error: {error}"
|
||||
),
|
||||
success: Some(false),
|
||||
},
|
||||
};
|
||||
}
|
||||
AskForApproval::UnlessTrusted | AskForApproval::OnFailure => (),
|
||||
}
|
||||
@@ -2944,23 +3047,36 @@ async fn handle_sandbox_error(
|
||||
match retry_output_result {
|
||||
Ok(retry_output) => {
|
||||
let ExecToolCallOutput { exit_code, .. } = &retry_output;
|
||||
|
||||
let is_success = *exit_code == 0;
|
||||
let content = format_exec_output(&retry_output);
|
||||
if *exit_code == 0 {
|
||||
Ok(content)
|
||||
} else {
|
||||
Err(FunctionCallError::RespondToModel(content))
|
||||
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content,
|
||||
success: Some(is_success),
|
||||
},
|
||||
}
|
||||
}
|
||||
Err(e) => Err(FunctionCallError::RespondToModel(format!(
|
||||
"retry failed: {e}"
|
||||
))),
|
||||
Err(e) => ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.clone(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("retry failed: {e}"),
|
||||
success: None,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
ReviewDecision::Denied | ReviewDecision::Abort => {
|
||||
// Fall through to original failure handling.
|
||||
Err(FunctionCallError::RespondToModel(
|
||||
"exec command rejected by user".to_string(),
|
||||
))
|
||||
ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "exec command rejected by user".to_string(),
|
||||
success: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3682,8 +3798,8 @@ mod tests {
|
||||
)
|
||||
.await;
|
||||
|
||||
let Err(FunctionCallError::RespondToModel(output)) = resp else {
|
||||
panic!("expected error result");
|
||||
let ResponseInputItem::FunctionCallOutput { output, .. } = resp else {
|
||||
panic!("expected FunctionCallOutput");
|
||||
};
|
||||
|
||||
let expected = format!(
|
||||
@@ -3691,7 +3807,7 @@ mod tests {
|
||||
policy = turn_context.approval_policy
|
||||
);
|
||||
|
||||
pretty_assertions::assert_eq!(output, expected);
|
||||
pretty_assertions::assert_eq!(output.content, expected);
|
||||
|
||||
// Now retry the same command WITHOUT escalated permissions; should succeed.
|
||||
// Force DangerFullAccess to avoid platform sandbox dependencies in tests.
|
||||
@@ -3707,7 +3823,9 @@ mod tests {
|
||||
)
|
||||
.await;
|
||||
|
||||
let output = resp2.expect("expected Ok result");
|
||||
let ResponseInputItem::FunctionCallOutput { output, .. } = resp2 else {
|
||||
panic!("expected FunctionCallOutput on retry");
|
||||
};
|
||||
|
||||
#[derive(Deserialize, PartialEq, Eq, Debug)]
|
||||
struct ResponseExecMetadata {
|
||||
@@ -3721,9 +3839,10 @@ mod tests {
|
||||
}
|
||||
|
||||
let exec_output: ResponseExecOutput =
|
||||
serde_json::from_str(&output).expect("valid exec output json");
|
||||
serde_json::from_str(&output.content).expect("valid exec output json");
|
||||
|
||||
pretty_assertions::assert_eq!(exec_output.metadata, ResponseExecMetadata { exit_code: 0 });
|
||||
assert!(exec_output.output.contains("hi"));
|
||||
pretty_assertions::assert_eq!(output.success, Some(true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ pub(super) async fn run_inline_auto_compact_task(
|
||||
let input = vec![InputItem::Text {
|
||||
text: SUMMARIZATION_PROMPT.to_string(),
|
||||
}];
|
||||
run_compact_task_inner(sess, turn_context, sub_id, input, false).await;
|
||||
run_compact_task_inner(sess, turn_context, sub_id, input, None, false).await;
|
||||
}
|
||||
|
||||
pub(super) async fn run_compact_task(
|
||||
@@ -71,7 +71,15 @@ pub(super) async fn run_compact_task(
|
||||
}),
|
||||
};
|
||||
sess.send_event(start_event).await;
|
||||
run_compact_task_inner(sess.clone(), turn_context, sub_id.clone(), input, true).await;
|
||||
run_compact_task_inner(
|
||||
sess.clone(),
|
||||
turn_context,
|
||||
sub_id.clone(),
|
||||
input,
|
||||
None,
|
||||
true,
|
||||
)
|
||||
.await;
|
||||
let event = Event {
|
||||
id: sub_id,
|
||||
msg: EventMsg::TaskComplete(TaskCompleteEvent {
|
||||
@@ -86,6 +94,7 @@ async fn run_compact_task_inner(
|
||||
turn_context: Arc<TurnContext>,
|
||||
sub_id: String,
|
||||
input: Vec<InputItem>,
|
||||
instructions_override: Option<String>,
|
||||
remove_task_on_completion: bool,
|
||||
) {
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
@@ -95,7 +104,9 @@ async fn run_compact_task_inner(
|
||||
|
||||
let prompt = Prompt {
|
||||
input: turn_input,
|
||||
..Default::default()
|
||||
tools: Vec::new(),
|
||||
base_instructions_override: instructions_override,
|
||||
output_schema: None,
|
||||
};
|
||||
|
||||
let max_retries = turn_context.client.get_provider().stream_max_retries();
|
||||
|
||||
@@ -12,3 +12,4 @@ pub use responses_api::WRITE_STDIN_TOOL_NAME;
|
||||
pub use responses_api::create_exec_command_tool_for_responses_api;
|
||||
pub use responses_api::create_write_stdin_tool_for_responses_api;
|
||||
pub use session_manager::SessionManager as ExecSessionManager;
|
||||
pub use session_manager::result_into_payload;
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::exec_command::exec_command_params::WriteStdinParams;
|
||||
use crate::exec_command::exec_command_session::ExecCommandSession;
|
||||
use crate::exec_command::session_id::SessionId;
|
||||
use crate::truncate::truncate_middle;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SessionManager {
|
||||
@@ -37,7 +38,7 @@ pub struct ExecCommandOutput {
|
||||
}
|
||||
|
||||
impl ExecCommandOutput {
|
||||
pub(crate) fn to_text_output(&self) -> String {
|
||||
fn to_text_output(&self) -> String {
|
||||
let wall_time_secs = self.wall_time.as_secs_f32();
|
||||
let termination_status = match self.exit_status {
|
||||
ExitStatus::Exited(code) => format!("Process exited with code {code}"),
|
||||
@@ -67,6 +68,19 @@ pub enum ExitStatus {
|
||||
Ongoing(SessionId),
|
||||
}
|
||||
|
||||
pub fn result_into_payload(result: Result<ExecCommandOutput, String>) -> FunctionCallOutputPayload {
|
||||
match result {
|
||||
Ok(output) => FunctionCallOutputPayload {
|
||||
content: output.to_text_output(),
|
||||
success: Some(true),
|
||||
},
|
||||
Err(err) => FunctionCallOutputPayload {
|
||||
content: err,
|
||||
success: Some(false),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionManager {
|
||||
/// Processes the request and is required to send a response via `outgoing`.
|
||||
pub async fn handle_exec_command_request(
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error, PartialEq)]
|
||||
pub enum FunctionCallError {
|
||||
#[error("{0}")]
|
||||
RespondToModel(String),
|
||||
}
|
||||
@@ -75,7 +75,6 @@ pub use rollout::find_conversation_path_by_id_str;
|
||||
pub use rollout::list::ConversationItem;
|
||||
pub use rollout::list::ConversationsPage;
|
||||
pub use rollout::list::Cursor;
|
||||
mod function_tool;
|
||||
mod user_notification;
|
||||
pub mod util;
|
||||
|
||||
|
||||
@@ -7,14 +7,13 @@ use crate::model_family::ModelFamily;
|
||||
/// Though this would help present more accurate pricing information in the UI.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ModelInfo {
|
||||
/// Size of the context window in tokens. This is the maximum size of the input context.
|
||||
/// Size of the context window in tokens.
|
||||
pub(crate) context_window: u64,
|
||||
|
||||
/// Maximum number of output tokens that can be generated for the model.
|
||||
pub(crate) max_output_tokens: u64,
|
||||
|
||||
/// Token threshold where we should automatically compact conversation history. This considers
|
||||
/// input tokens + output tokens of this turn.
|
||||
/// Token threshold where we should automatically compact conversation history.
|
||||
pub(crate) auto_compact_token_limit: Option<i64>,
|
||||
}
|
||||
|
||||
@@ -65,7 +64,7 @@ pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option<ModelInfo> {
|
||||
_ if slug.starts_with("gpt-5-codex") => Some(ModelInfo {
|
||||
context_window: 272_000,
|
||||
max_output_tokens: 128_000,
|
||||
auto_compact_token_limit: Some(350_000),
|
||||
auto_compact_token_limit: Some(220_000),
|
||||
}),
|
||||
|
||||
_ if slug.starts_with("gpt-5") => Some(ModelInfo::new(272_000, 128_000)),
|
||||
|
||||
@@ -2,12 +2,13 @@ use std::collections::BTreeMap;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use crate::codex::Session;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::openai_tools::JsonSchema;
|
||||
use crate::openai_tools::OpenAiTool;
|
||||
use crate::openai_tools::ResponsesApiTool;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use codex_protocol::models::FunctionCallOutputPayload;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
|
||||
// Use the canonical plan tool types from the protocol crate to ensure
|
||||
// type-identity matches events transported via `codex_protocol`.
|
||||
@@ -66,20 +67,44 @@ pub(crate) async fn handle_update_plan(
|
||||
session: &Session,
|
||||
arguments: String,
|
||||
sub_id: String,
|
||||
_call_id: String,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
let args = parse_update_plan_arguments(&arguments)?;
|
||||
session
|
||||
.send_event(Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::PlanUpdate(args),
|
||||
})
|
||||
.await;
|
||||
Ok("Plan updated".to_string())
|
||||
call_id: String,
|
||||
) -> ResponseInputItem {
|
||||
match parse_update_plan_arguments(arguments, &call_id) {
|
||||
Ok(args) => {
|
||||
let output = ResponseInputItem::FunctionCallOutput {
|
||||
call_id,
|
||||
output: FunctionCallOutputPayload {
|
||||
content: "Plan updated".to_string(),
|
||||
success: Some(true),
|
||||
},
|
||||
};
|
||||
session
|
||||
.send_event(Event {
|
||||
id: sub_id.to_string(),
|
||||
msg: EventMsg::PlanUpdate(args),
|
||||
})
|
||||
.await;
|
||||
output
|
||||
}
|
||||
Err(output) => *output,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_update_plan_arguments(arguments: &str) -> Result<UpdatePlanArgs, FunctionCallError> {
|
||||
serde_json::from_str::<UpdatePlanArgs>(arguments).map_err(|e| {
|
||||
FunctionCallError::RespondToModel(format!("failed to parse function arguments: {e}"))
|
||||
})
|
||||
fn parse_update_plan_arguments(
|
||||
arguments: String,
|
||||
call_id: &str,
|
||||
) -> Result<UpdatePlanArgs, Box<ResponseInputItem>> {
|
||||
match serde_json::from_str::<UpdatePlanArgs>(&arguments) {
|
||||
Ok(args) => Ok(args),
|
||||
Err(e) => {
|
||||
let output = ResponseInputItem::FunctionCallOutput {
|
||||
call_id: call_id.to_string(),
|
||||
output: FunctionCallOutputPayload {
|
||||
content: format!("failed to parse function arguments: {e}"),
|
||||
success: None,
|
||||
},
|
||||
};
|
||||
Err(Box::new(output))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use time::OffsetDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
@@ -26,6 +28,7 @@ use super::policy::is_persisted_response_item;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::ORIGINATOR;
|
||||
use crate::git_info::collect_git_info;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::ResumedHistory;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
@@ -33,6 +36,19 @@ use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct SessionStateSnapshot {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct SavedSession {
|
||||
pub session: SessionMeta,
|
||||
#[serde(default)]
|
||||
pub items: Vec<ResponseItem>,
|
||||
#[serde(default)]
|
||||
pub state: SessionStateSnapshot,
|
||||
pub session_id: ConversationId,
|
||||
}
|
||||
|
||||
/// Records all [`ResponseItem`]s for a session and flushes them to disk after
|
||||
/// every update.
|
||||
///
|
||||
|
||||
@@ -10,6 +10,11 @@ use crate::openai_tools::ResponsesApiTool;
|
||||
|
||||
const APPLY_PATCH_LARK_GRAMMAR: &str = include_str!("tool_apply_patch.lark");
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub(crate) struct ApplyPatchToolArgs {
|
||||
pub(crate) input: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ApplyPatchToolType {
|
||||
|
||||
@@ -67,10 +67,6 @@ pub struct Cli {
|
||||
#[arg(long = "json", default_value_t = false)]
|
||||
pub json: bool,
|
||||
|
||||
/// Whether to include the plan tool in the conversation.
|
||||
#[arg(long = "include-plan-tool", default_value_t = false)]
|
||||
pub include_plan_tool: bool,
|
||||
|
||||
/// Specifies file where the last message from the agent should be written.
|
||||
#[arg(long = "output-last-message")]
|
||||
pub last_message_file: Option<PathBuf>,
|
||||
|
||||
@@ -539,37 +539,8 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
}
|
||||
EventMsg::PlanUpdate(plan_update_event) => {
|
||||
let UpdatePlanArgs { explanation, plan } = plan_update_event;
|
||||
|
||||
// Header
|
||||
ts_println!(self, "{}", "Plan update".style(self.magenta));
|
||||
|
||||
// Optional explanation
|
||||
if let Some(explanation) = explanation
|
||||
&& !explanation.trim().is_empty()
|
||||
{
|
||||
ts_println!(self, "{}", explanation.style(self.italic));
|
||||
}
|
||||
|
||||
// Pretty-print the plan items with simple status markers.
|
||||
for item in plan {
|
||||
use codex_core::plan_tool::StepStatus;
|
||||
match item.status {
|
||||
StepStatus::Completed => {
|
||||
ts_println!(self, " {} {}", "✓".style(self.green), item.step);
|
||||
}
|
||||
StepStatus::InProgress => {
|
||||
ts_println!(self, " {} {}", "→".style(self.cyan), item.step);
|
||||
}
|
||||
StepStatus::Pending => {
|
||||
ts_println!(
|
||||
self,
|
||||
" {} {}",
|
||||
"•".style(self.dimmed),
|
||||
item.step.style(self.dimmed)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
ts_println!(self, "explanation: {explanation:?}");
|
||||
ts_println!(self, "plan: {plan:?}");
|
||||
}
|
||||
EventMsg::GetHistoryEntryResponse(_) => {
|
||||
// Currently ignored in exec output.
|
||||
|
||||
@@ -53,7 +53,6 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
sandbox_mode: sandbox_mode_cli_arg,
|
||||
prompt,
|
||||
output_schema: output_schema_path,
|
||||
include_plan_tool,
|
||||
config_overrides,
|
||||
} = cli;
|
||||
|
||||
@@ -162,7 +161,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
model_provider,
|
||||
codex_linux_sandbox_exe,
|
||||
base_instructions: None,
|
||||
include_plan_tool: Some(include_plan_tool),
|
||||
include_plan_tool: None,
|
||||
include_apply_patch_tool: None,
|
||||
include_view_image_tool: None,
|
||||
show_raw_agent_reasoning: oss.then_some(true),
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "1.90.0"
|
||||
components = ["clippy", "rustfmt", "rust-src"]
|
||||
channel = "1.89.0"
|
||||
components = [ "clippy", "rustfmt", "rust-src"]
|
||||
|
||||
@@ -53,6 +53,7 @@ image = { workspace = true, features = [
|
||||
itertools = { workspace = true }
|
||||
lazy_static = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
path-clean = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
ratatui = { workspace = true, features = [
|
||||
|
||||
@@ -77,9 +77,11 @@ use crate::history_cell::CommandOutput;
|
||||
use crate::history_cell::ExecCell;
|
||||
use crate::history_cell::HistoryCell;
|
||||
use crate::history_cell::PatchEventType;
|
||||
use crate::history_cell::RateLimitSnapshotDisplay;
|
||||
use crate::markdown::append_markdown;
|
||||
use crate::slash_command::SlashCommand;
|
||||
use crate::status::RateLimitSnapshotDisplay;
|
||||
use crate::status::new_status_output;
|
||||
use crate::status::rate_limit_snapshot_display;
|
||||
use crate::text_formatting::truncate_text;
|
||||
use crate::tui::FrameRequester;
|
||||
// streaming internals are provided by crate::streaming and crate::markdown_stream
|
||||
@@ -91,6 +93,7 @@ use self::agent::spawn_agent;
|
||||
use self::agent::spawn_agent_from_existing;
|
||||
mod session_header;
|
||||
use self::session_header::SessionHeader;
|
||||
use crate::streaming::controller::AppEventHistorySink;
|
||||
use crate::streaming::controller::StreamController;
|
||||
use std::path::Path;
|
||||
|
||||
@@ -251,13 +254,11 @@ fn create_initial_user_message(text: String, image_paths: Vec<PathBuf>) -> Optio
|
||||
|
||||
impl ChatWidget {
|
||||
fn flush_answer_stream_with_separator(&mut self) {
|
||||
if let Some(mut controller) = self.stream_controller.take()
|
||||
&& let Some(cell) = controller.finalize()
|
||||
{
|
||||
self.add_boxed_history(cell);
|
||||
if let Some(mut controller) = self.stream_controller.take() {
|
||||
let sink = AppEventHistorySink(self.app_event_tx.clone());
|
||||
controller.finalize(&sink);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Small event handlers ---
|
||||
fn on_session_configured(&mut self, event: codex_core::protocol::SessionConfiguredEvent) {
|
||||
self.bottom_pane
|
||||
@@ -347,8 +348,12 @@ impl ChatWidget {
|
||||
}
|
||||
|
||||
fn on_task_complete(&mut self, last_agent_message: Option<String>) {
|
||||
// If a stream is currently active, finalize it.
|
||||
self.flush_answer_stream_with_separator();
|
||||
// If a stream is currently active, finalize only that stream to flush any tail
|
||||
// without emitting stray headers for other streams.
|
||||
if let Some(mut controller) = self.stream_controller.take() {
|
||||
let sink = AppEventHistorySink(self.app_event_tx.clone());
|
||||
controller.finalize(&sink);
|
||||
}
|
||||
// Mark task stopped and request redraw now that all content is in history.
|
||||
self.bottom_pane.set_task_running(false);
|
||||
self.running_commands.clear();
|
||||
@@ -379,7 +384,7 @@ impl ChatWidget {
|
||||
snapshot.primary.as_ref().map(|window| window.used_percent),
|
||||
);
|
||||
|
||||
let display = history_cell::rate_limit_snapshot_display(&snapshot, Local::now());
|
||||
let display = rate_limit_snapshot_display(&snapshot, Local::now());
|
||||
self.rate_limit_snapshot = Some(display);
|
||||
|
||||
if !warnings.is_empty() {
|
||||
@@ -551,18 +556,14 @@ impl ChatWidget {
|
||||
self.add_to_history(history_cell::new_stream_error_event(message));
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
/// Periodic tick to commit at most one queued line to history with a small delay,
|
||||
/// animating the output.
|
||||
pub(crate) fn on_commit_tick(&mut self) {
|
||||
if let Some(controller) = self.stream_controller.as_mut() {
|
||||
let (cell, is_idle) = controller.on_commit_tick();
|
||||
if let Some(cell) = cell {
|
||||
self.bottom_pane.set_task_running(false);
|
||||
self.add_boxed_history(cell);
|
||||
}
|
||||
if is_idle {
|
||||
self.app_event_tx.send(AppEvent::StopCommitAnimation);
|
||||
let sink = AppEventHistorySink(self.app_event_tx.clone());
|
||||
let finished = controller.on_commit_tick(&sink);
|
||||
if finished {
|
||||
self.handle_stream_finished();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -606,10 +607,9 @@ impl ChatWidget {
|
||||
if self.stream_controller.is_none() {
|
||||
self.stream_controller = Some(StreamController::new(self.config.clone()));
|
||||
}
|
||||
if let Some(controller) = self.stream_controller.as_mut()
|
||||
&& controller.push(&delta)
|
||||
{
|
||||
self.app_event_tx.send(AppEvent::StartCommitAnimation);
|
||||
if let Some(controller) = self.stream_controller.as_mut() {
|
||||
let sink = AppEventHistorySink(self.app_event_tx.clone());
|
||||
controller.push_and_maybe_commit(&delta, &sink);
|
||||
}
|
||||
self.request_redraw();
|
||||
}
|
||||
@@ -1466,7 +1466,7 @@ impl ChatWidget {
|
||||
default_usage = TokenUsage::default();
|
||||
&default_usage
|
||||
};
|
||||
self.add_to_history(history_cell::new_status_output(
|
||||
self.add_to_history(new_status_output(
|
||||
&self.config,
|
||||
usage_ref,
|
||||
&self.conversation_id,
|
||||
|
||||
@@ -11,30 +11,17 @@ use crate::wrapping::RtOptions;
|
||||
use crate::wrapping::word_wrap_line;
|
||||
use crate::wrapping::word_wrap_lines;
|
||||
use base64::Engine;
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration as ChronoDuration;
|
||||
use chrono::Local;
|
||||
use codex_ansi_escape::ansi_escape_line;
|
||||
use codex_common::create_config_summary_entries;
|
||||
use codex_common::elapsed::format_duration;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::try_read_auth_json;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_types::ReasoningSummaryFormat;
|
||||
use codex_core::plan_tool::PlanItemArg;
|
||||
use codex_core::plan_tool::StepStatus;
|
||||
use codex_core::plan_tool::UpdatePlanArgs;
|
||||
use codex_core::project_doc::discover_project_doc_paths;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::McpInvocation;
|
||||
use codex_core::protocol::RateLimitSnapshot;
|
||||
use codex_core::protocol::RateLimitWindow;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_core::protocol_config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use codex_protocol::num_format::format_with_separators;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use image::DynamicImage;
|
||||
use image::ImageReader;
|
||||
@@ -51,7 +38,6 @@ use ratatui::widgets::WidgetRef;
|
||||
use ratatui::widgets::Wrap;
|
||||
use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::io::Cursor;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
@@ -60,10 +46,6 @@ use std::time::Instant;
|
||||
use tracing::error;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
const STATUS_LIMIT_BAR_SEGMENTS: usize = 20;
|
||||
const STATUS_LIMIT_BAR_FILLED: &str = "█";
|
||||
const STATUS_LIMIT_BAR_EMPTY: &str = " ";
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct CommandOutput {
|
||||
pub(crate) exit_code: i32,
|
||||
@@ -225,6 +207,12 @@ pub(crate) struct PlainHistoryCell {
|
||||
lines: Vec<Line<'static>>,
|
||||
}
|
||||
|
||||
impl PlainHistoryCell {
|
||||
pub(crate) fn new(lines: Vec<Line<'static>>) -> Self {
|
||||
Self { lines }
|
||||
}
|
||||
}
|
||||
|
||||
impl HistoryCell for PlainHistoryCell {
|
||||
fn display_lines(&self, _width: u16) -> Vec<Line<'static>> {
|
||||
self.lines.clone()
|
||||
@@ -636,9 +624,9 @@ impl HistoryCell for CompletedMcpToolCallWithImageOutput {
|
||||
}
|
||||
|
||||
const TOOL_CALL_MAX_LINES: usize = 5;
|
||||
const SESSION_HEADER_MAX_INNER_WIDTH: usize = 56; // Just an eyeballed value
|
||||
pub(crate) const SESSION_HEADER_MAX_INNER_WIDTH: usize = 64; // Just an eyeballed value
|
||||
|
||||
fn card_inner_width(width: u16, max_inner_width: usize) -> Option<usize> {
|
||||
pub(crate) fn card_inner_width(width: u16, max_inner_width: usize) -> Option<usize> {
|
||||
if width < 4 {
|
||||
return None;
|
||||
}
|
||||
@@ -646,7 +634,7 @@ fn card_inner_width(width: u16, max_inner_width: usize) -> Option<usize> {
|
||||
Some(inner_width)
|
||||
}
|
||||
|
||||
fn with_border(lines: Vec<Line<'static>>) -> Vec<Line<'static>> {
|
||||
pub(crate) fn with_border(lines: Vec<Line<'static>>) -> Vec<Line<'static>> {
|
||||
let content_width = lines
|
||||
.iter()
|
||||
.map(|line| {
|
||||
@@ -682,26 +670,29 @@ fn with_border(lines: Vec<Line<'static>>) -> Vec<Line<'static>> {
|
||||
out
|
||||
}
|
||||
|
||||
fn title_case(s: &str) -> String {
|
||||
if s.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
let mut chars = s.chars();
|
||||
let first = match chars.next() {
|
||||
Some(c) => c,
|
||||
None => return String::new(),
|
||||
pub(crate) fn format_directory_display(directory: &Path, max_width: Option<usize>) -> String {
|
||||
let formatted = if let Some(rel) = relativize_to_home(directory) {
|
||||
if rel.as_os_str().is_empty() {
|
||||
"~".to_string()
|
||||
} else {
|
||||
format!("~{}{}", std::path::MAIN_SEPARATOR, rel.display())
|
||||
}
|
||||
} else {
|
||||
directory.display().to_string()
|
||||
};
|
||||
let rest: String = chars.as_str().to_ascii_lowercase();
|
||||
first.to_uppercase().collect::<String>() + &rest
|
||||
|
||||
if let Some(max_width) = max_width {
|
||||
if max_width == 0 {
|
||||
return String::new();
|
||||
}
|
||||
if UnicodeWidthStr::width(formatted.as_str()) > max_width {
|
||||
return crate::text_formatting::center_truncate_path(&formatted, max_width);
|
||||
}
|
||||
}
|
||||
|
||||
formatted
|
||||
}
|
||||
|
||||
fn pretty_provider_name(id: &str) -> String {
|
||||
if id.eq_ignore_ascii_case("openai") {
|
||||
"OpenAI".to_string()
|
||||
} else {
|
||||
title_case(id)
|
||||
}
|
||||
}
|
||||
/// Return the emoji followed by a hair space (U+200A).
|
||||
/// Using only the hair space avoids excessive padding after the emoji while
|
||||
/// still providing a small visual gap across terminals.
|
||||
@@ -827,30 +818,12 @@ impl SessionHeaderHistoryCell {
|
||||
}
|
||||
|
||||
fn format_directory(&self, max_width: Option<usize>) -> String {
|
||||
Self::format_directory_inner(&self.directory, max_width)
|
||||
format_directory_display(&self.directory, max_width)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn format_directory_inner(directory: &Path, max_width: Option<usize>) -> String {
|
||||
let formatted = if let Some(rel) = relativize_to_home(directory) {
|
||||
if rel.as_os_str().is_empty() {
|
||||
"~".to_string()
|
||||
} else {
|
||||
format!("~{}{}", std::path::MAIN_SEPARATOR, rel.display())
|
||||
}
|
||||
} else {
|
||||
directory.display().to_string()
|
||||
};
|
||||
|
||||
if let Some(max_width) = max_width {
|
||||
if max_width == 0 {
|
||||
return String::new();
|
||||
}
|
||||
if UnicodeWidthStr::width(formatted.as_str()) > max_width {
|
||||
return crate::text_formatting::center_truncate_path(&formatted, max_width);
|
||||
}
|
||||
}
|
||||
|
||||
formatted
|
||||
format_directory_display(directory, max_width)
|
||||
}
|
||||
|
||||
fn reasoning_label(&self) -> Option<&'static str> {
|
||||
@@ -871,7 +844,6 @@ impl HistoryCell for SessionHeaderHistoryCell {
|
||||
|
||||
let make_row = |spans: Vec<Span<'static>>| Line::from(spans);
|
||||
|
||||
// Title line rendered inside the box: ">_ OpenAI Codex (vX)"
|
||||
let title_spans: Vec<Span<'static>> = vec![
|
||||
Span::from(">_ ").dim(),
|
||||
Span::from("OpenAI Codex").bold(),
|
||||
@@ -924,6 +896,12 @@ pub(crate) struct CompositeHistoryCell {
|
||||
parts: Vec<Box<dyn HistoryCell>>,
|
||||
}
|
||||
|
||||
impl CompositeHistoryCell {
|
||||
pub(crate) fn new(parts: Vec<Box<dyn HistoryCell>>) -> Self {
|
||||
Self { parts }
|
||||
}
|
||||
}
|
||||
|
||||
impl HistoryCell for CompositeHistoryCell {
|
||||
fn display_lines(&self, width: u16) -> Vec<Line<'static>> {
|
||||
let mut out: Vec<Line<'static>> = Vec::new();
|
||||
@@ -1083,228 +1061,6 @@ pub(crate) fn new_warning_event(message: String) -> PlainHistoryCell {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct RateLimitWindowDisplay {
|
||||
pub used_percent: f64,
|
||||
pub resets_at: Option<String>,
|
||||
}
|
||||
|
||||
impl RateLimitWindowDisplay {
|
||||
fn from_window(window: &RateLimitWindow, captured_at: DateTime<Local>) -> Self {
|
||||
let resets_at = window
|
||||
.resets_in_seconds
|
||||
.and_then(|seconds| i64::try_from(seconds).ok())
|
||||
.and_then(|secs| captured_at.checked_add_signed(ChronoDuration::seconds(secs)))
|
||||
.map(|dt| dt.format("%b %-d, %Y %-I:%M %p").to_string());
|
||||
|
||||
Self {
|
||||
used_percent: window.used_percent,
|
||||
resets_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct RateLimitSnapshotDisplay {
|
||||
pub primary: Option<RateLimitWindowDisplay>,
|
||||
pub secondary: Option<RateLimitWindowDisplay>,
|
||||
}
|
||||
|
||||
pub(crate) fn rate_limit_snapshot_display(
|
||||
snapshot: &RateLimitSnapshot,
|
||||
captured_at: DateTime<Local>,
|
||||
) -> RateLimitSnapshotDisplay {
|
||||
RateLimitSnapshotDisplay {
|
||||
primary: snapshot
|
||||
.primary
|
||||
.as_ref()
|
||||
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
|
||||
secondary: snapshot
|
||||
.secondary
|
||||
.as_ref()
|
||||
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_status_output(
|
||||
config: &Config,
|
||||
usage: &TokenUsage,
|
||||
session_id: &Option<ConversationId>,
|
||||
rate_limits: Option<&RateLimitSnapshotDisplay>,
|
||||
) -> PlainHistoryCell {
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
lines.push("/status".magenta().into());
|
||||
|
||||
let config_entries = create_config_summary_entries(config);
|
||||
let lookup = |k: &str| -> String {
|
||||
config_entries
|
||||
.iter()
|
||||
.find(|(key, _)| *key == k)
|
||||
.map(|(_, v)| v.clone())
|
||||
.unwrap_or_default()
|
||||
};
|
||||
|
||||
// 📂 Workspace
|
||||
lines.push(vec![padded_emoji("📂").into(), "Workspace".bold()].into());
|
||||
// Path (home-relative, e.g., ~/code/project)
|
||||
let cwd_str = match relativize_to_home(&config.cwd) {
|
||||
Some(rel) if !rel.as_os_str().is_empty() => {
|
||||
let sep = std::path::MAIN_SEPARATOR;
|
||||
format!("~{sep}{}", rel.display())
|
||||
}
|
||||
Some(_) => "~".to_string(),
|
||||
None => config.cwd.display().to_string(),
|
||||
};
|
||||
lines.push(vec![" • Path: ".into(), cwd_str.into()].into());
|
||||
// Approval mode (as-is)
|
||||
lines.push(vec![" • Approval Mode: ".into(), lookup("approval").into()].into());
|
||||
// Sandbox (simplified name only)
|
||||
let sandbox_name = match &config.sandbox_policy {
|
||||
SandboxPolicy::DangerFullAccess => "danger-full-access",
|
||||
SandboxPolicy::ReadOnly => "read-only",
|
||||
SandboxPolicy::WorkspaceWrite { .. } => "workspace-write",
|
||||
};
|
||||
lines.push(vec![" • Sandbox: ".into(), sandbox_name.into()].into());
|
||||
|
||||
// AGENTS.md files discovered via core's project_doc logic
|
||||
let agents_list = {
|
||||
match discover_project_doc_paths(config) {
|
||||
Ok(paths) => {
|
||||
let mut rels: Vec<String> = Vec::new();
|
||||
for p in paths {
|
||||
let display = if let Some(parent) = p.parent() {
|
||||
if parent == config.cwd {
|
||||
"AGENTS.md".to_string()
|
||||
} else {
|
||||
let mut cur = config.cwd.as_path();
|
||||
let mut ups = 0usize;
|
||||
let mut reached = false;
|
||||
while let Some(c) = cur.parent() {
|
||||
if cur == parent {
|
||||
reached = true;
|
||||
break;
|
||||
}
|
||||
cur = c;
|
||||
ups += 1;
|
||||
}
|
||||
if reached {
|
||||
let up = format!("..{}", std::path::MAIN_SEPARATOR);
|
||||
format!("{}AGENTS.md", up.repeat(ups))
|
||||
} else if let Ok(stripped) = p.strip_prefix(&config.cwd) {
|
||||
stripped.display().to_string()
|
||||
} else {
|
||||
p.display().to_string()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
p.display().to_string()
|
||||
};
|
||||
rels.push(display);
|
||||
}
|
||||
rels
|
||||
}
|
||||
Err(_) => Vec::new(),
|
||||
}
|
||||
};
|
||||
if agents_list.is_empty() {
|
||||
lines.push(" • AGENTS files: (none)".into());
|
||||
} else {
|
||||
lines.push(vec![" • AGENTS files: ".into(), agents_list.join(", ").into()].into());
|
||||
}
|
||||
lines.push("".into());
|
||||
|
||||
// 👤 Account (only if ChatGPT tokens exist), shown under the first block
|
||||
let auth_file = get_auth_file(&config.codex_home);
|
||||
let auth = try_read_auth_json(&auth_file).ok();
|
||||
let is_chatgpt_auth = auth
|
||||
.as_ref()
|
||||
.and_then(|auth| auth.tokens.as_ref())
|
||||
.is_some();
|
||||
if is_chatgpt_auth
|
||||
&& let Some(auth) = auth.as_ref()
|
||||
&& let Some(tokens) = auth.tokens.clone()
|
||||
{
|
||||
lines.push(vec![padded_emoji("👤").into(), "Account".bold()].into());
|
||||
lines.push(" • Signed in with ChatGPT".into());
|
||||
|
||||
let info = tokens.id_token;
|
||||
if let Some(email) = &info.email {
|
||||
lines.push(vec![" • Login: ".into(), email.clone().into()].into());
|
||||
}
|
||||
|
||||
match auth.openai_api_key.as_deref() {
|
||||
Some(key) if !key.is_empty() => {
|
||||
lines.push(" • Using API key. Run codex login to use ChatGPT plan".into());
|
||||
}
|
||||
_ => {
|
||||
let plan_text = info
|
||||
.get_chatgpt_plan_type()
|
||||
.map(|s| title_case(&s))
|
||||
.unwrap_or_else(|| "Unknown".to_string());
|
||||
lines.push(vec![" • Plan: ".into(), plan_text.into()].into());
|
||||
}
|
||||
}
|
||||
|
||||
lines.push("".into());
|
||||
}
|
||||
|
||||
// 🧠 Model
|
||||
lines.push(vec![padded_emoji("🧠").into(), "Model".bold()].into());
|
||||
lines.push(vec![" • Name: ".into(), config.model.clone().into()].into());
|
||||
let provider_disp = pretty_provider_name(&config.model_provider_id);
|
||||
lines.push(vec![" • Provider: ".into(), provider_disp.into()].into());
|
||||
// Only show Reasoning fields if present in config summary
|
||||
let reff = lookup("reasoning effort");
|
||||
if !reff.is_empty() {
|
||||
lines.push(vec![" • Reasoning Effort: ".into(), title_case(&reff).into()].into());
|
||||
}
|
||||
let rsum = lookup("reasoning summaries");
|
||||
if !rsum.is_empty() {
|
||||
lines.push(vec![" • Reasoning Summaries: ".into(), title_case(&rsum).into()].into());
|
||||
}
|
||||
|
||||
lines.push("".into());
|
||||
|
||||
// 💻 Client
|
||||
let cli_version = crate::version::CODEX_CLI_VERSION;
|
||||
lines.push(vec![padded_emoji("💻").into(), "Client".bold()].into());
|
||||
lines.push(vec![" • CLI Version: ".into(), cli_version.into()].into());
|
||||
lines.push("".into());
|
||||
|
||||
// 📊 Token Usage
|
||||
lines.push(vec!["📊 ".into(), "Token Usage".bold()].into());
|
||||
if let Some(session_id) = session_id {
|
||||
lines.push(vec![" • Session ID: ".into(), session_id.to_string().into()].into());
|
||||
}
|
||||
// Input: <input> [+ <cached> cached]
|
||||
let mut input_line_spans: Vec<Span<'static>> = vec![
|
||||
" • Input: ".into(),
|
||||
format_with_separators(usage.non_cached_input()).into(),
|
||||
];
|
||||
if usage.cached_input_tokens > 0 {
|
||||
let cached = usage.cached_input_tokens;
|
||||
input_line_spans.push(format!(" (+ {cached} cached)").into());
|
||||
}
|
||||
lines.push(Line::from(input_line_spans));
|
||||
// Output: <output>
|
||||
lines.push(Line::from(vec![
|
||||
" • Output: ".into(),
|
||||
format_with_separators(usage.output_tokens).into(),
|
||||
]));
|
||||
// Total: <total>
|
||||
lines.push(Line::from(vec![
|
||||
" • Total: ".into(),
|
||||
format_with_separators(usage.blended_total()).into(),
|
||||
]));
|
||||
|
||||
if is_chatgpt_auth {
|
||||
lines.push("".into());
|
||||
lines.extend(build_status_limit_lines(rate_limits));
|
||||
}
|
||||
|
||||
PlainHistoryCell { lines }
|
||||
}
|
||||
|
||||
/// Render a summary of configured MCP servers from the current `Config`.
|
||||
pub(crate) fn empty_mcp_output() -> PlainHistoryCell {
|
||||
let lines: Vec<Line<'static>> = vec![
|
||||
@@ -1659,82 +1415,6 @@ fn format_mcp_invocation<'a>(invocation: McpInvocation) -> Line<'a> {
|
||||
invocation_spans.into()
|
||||
}
|
||||
|
||||
fn build_status_limit_lines(snapshot: Option<&RateLimitSnapshotDisplay>) -> Vec<Line<'static>> {
|
||||
let mut lines: Vec<Line<'static>> =
|
||||
vec![vec![padded_emoji("⏱️").into(), "Usage Limits".bold()].into()];
|
||||
|
||||
match snapshot {
|
||||
Some(snapshot) => {
|
||||
let mut windows: Vec<(&str, &RateLimitWindowDisplay)> = Vec::new();
|
||||
if let Some(primary) = snapshot.primary.as_ref() {
|
||||
windows.push(("5h limit", primary));
|
||||
}
|
||||
if let Some(secondary) = snapshot.secondary.as_ref() {
|
||||
windows.push(("Weekly limit", secondary));
|
||||
}
|
||||
|
||||
if windows.is_empty() {
|
||||
lines.push(" • No rate limit data available.".into());
|
||||
} else {
|
||||
let label_width = windows
|
||||
.iter()
|
||||
.map(|(label, _)| UnicodeWidthStr::width(*label))
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
|
||||
for (label, window) in windows {
|
||||
lines.push(build_status_limit_line(
|
||||
label,
|
||||
window.used_percent,
|
||||
label_width,
|
||||
));
|
||||
if let Some(resets_at) = window.resets_at.as_deref() {
|
||||
lines.push(build_status_reset_line(resets_at));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => lines.push(" • Send a message to load usage data.".into()),
|
||||
}
|
||||
|
||||
lines
|
||||
}
|
||||
|
||||
fn build_status_limit_line(label: &str, percent_used: f64, label_width: usize) -> Line<'static> {
|
||||
let clamped_percent = percent_used.clamp(0.0, 100.0);
|
||||
let progress = render_status_limit_progress_bar(clamped_percent);
|
||||
let summary = format_status_limit_summary(clamped_percent);
|
||||
|
||||
let mut spans: Vec<Span<'static>> = Vec::with_capacity(5);
|
||||
let padded_label = format!("{label:<label_width$}");
|
||||
spans.push(format!(" • {padded_label}: ").into());
|
||||
spans.push(progress.into());
|
||||
spans.push(" ".into());
|
||||
spans.push(summary.into());
|
||||
|
||||
Line::from(spans)
|
||||
}
|
||||
|
||||
fn build_status_reset_line(resets_at: &str) -> Line<'static> {
|
||||
vec![" ".into(), format!("Resets at: {resets_at}").dim()].into()
|
||||
}
|
||||
|
||||
fn render_status_limit_progress_bar(percent_used: f64) -> String {
|
||||
let ratio = (percent_used / 100.0).clamp(0.0, 1.0);
|
||||
let filled = (ratio * STATUS_LIMIT_BAR_SEGMENTS as f64).round() as usize;
|
||||
let filled = filled.min(STATUS_LIMIT_BAR_SEGMENTS);
|
||||
let empty = STATUS_LIMIT_BAR_SEGMENTS.saturating_sub(filled);
|
||||
format!(
|
||||
"[{}{}]",
|
||||
STATUS_LIMIT_BAR_FILLED.repeat(filled),
|
||||
STATUS_LIMIT_BAR_EMPTY.repeat(empty)
|
||||
)
|
||||
}
|
||||
|
||||
fn format_status_limit_summary(percent_used: f64) -> String {
|
||||
format!("{percent_used:.0}% used")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -2214,7 +1894,7 @@ mod tests {
|
||||
// Long explanation forces wrapping; include long step text to verify step wrapping and alignment.
|
||||
let update = UpdatePlanArgs {
|
||||
explanation: Some(
|
||||
"I’ll update Grafana call error handling by adding retries and clearer messages when the backend is unreachable."
|
||||
"I'll update Grafana call error handling by adding retries and clearer messages when the backend is unreachable."
|
||||
.to_string(),
|
||||
),
|
||||
plan: vec![
|
||||
|
||||
@@ -60,6 +60,7 @@ mod resume_picker;
|
||||
mod session_log;
|
||||
mod shimmer;
|
||||
mod slash_command;
|
||||
mod status;
|
||||
mod status_indicator_widget;
|
||||
mod streaming;
|
||||
mod text_formatting;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use ratatui::text::Line;
|
||||
|
||||
@@ -95,6 +97,59 @@ impl MarkdownStreamCollector {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct StepResult {
|
||||
pub history: Vec<Line<'static>>, // lines to insert into history this step
|
||||
}
|
||||
|
||||
/// Streams already-rendered rows into history while computing the newest K
|
||||
/// rows to show in a live overlay.
|
||||
pub(crate) struct AnimatedLineStreamer {
|
||||
queue: VecDeque<Line<'static>>,
|
||||
}
|
||||
|
||||
impl AnimatedLineStreamer {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
queue: VecDeque::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.queue.clear();
|
||||
}
|
||||
|
||||
pub fn enqueue(&mut self, lines: Vec<Line<'static>>) {
|
||||
for l in lines {
|
||||
self.queue.push_back(l);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step(&mut self) -> StepResult {
|
||||
let mut history = Vec::new();
|
||||
// Move exactly one per tick to animate gradual insertion.
|
||||
let burst = if self.queue.is_empty() { 0 } else { 1 };
|
||||
for _ in 0..burst {
|
||||
if let Some(l) = self.queue.pop_front() {
|
||||
history.push(l);
|
||||
}
|
||||
}
|
||||
|
||||
StepResult { history }
|
||||
}
|
||||
|
||||
pub fn drain_all(&mut self) -> StepResult {
|
||||
let mut history = Vec::new();
|
||||
while let Some(l) = self.queue.pop_front() {
|
||||
history.push(l);
|
||||
}
|
||||
StepResult { history }
|
||||
}
|
||||
|
||||
pub fn is_idle(&self) -> bool {
|
||||
self.queue.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn simulate_stream_markdown_for_tests(
|
||||
deltas: &[&str],
|
||||
|
||||
@@ -2,27 +2,27 @@ use std::fs::File;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::Op;
|
||||
use once_cell::sync::Lazy;
|
||||
use once_cell::sync::OnceCell;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
|
||||
static LOGGER: LazyLock<SessionLogger> = LazyLock::new(SessionLogger::new);
|
||||
static LOGGER: Lazy<SessionLogger> = Lazy::new(SessionLogger::new);
|
||||
|
||||
struct SessionLogger {
|
||||
file: OnceLock<Mutex<File>>,
|
||||
file: OnceCell<Mutex<File>>,
|
||||
}
|
||||
|
||||
impl SessionLogger {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
file: OnceLock::new(),
|
||||
file: OnceCell::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,11 @@ impl SessionLogger {
|
||||
}
|
||||
|
||||
let file = opts.open(path)?;
|
||||
self.file.get_or_init(|| Mutex::new(file));
|
||||
// If already initialized, ignore and succeed.
|
||||
if self.file.get().is_some() {
|
||||
return Ok(());
|
||||
}
|
||||
let _ = self.file.set(Mutex::new(file));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ source: tui/src/history_cell.rs
|
||||
expression: rendered
|
||||
---
|
||||
• Updated Plan
|
||||
└ I’ll update Grafana call
|
||||
└ I'll update Grafana call
|
||||
error handling by adding
|
||||
retries and clearer
|
||||
messages when the backend is
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
---
|
||||
source: tui/src/status.rs
|
||||
expression: sanitized
|
||||
---
|
||||
/status
|
||||
|
||||
╭──────────────────────────────────────────────────────────╮
|
||||
│ >_ OpenAI Codex (v0.0.0) │
|
||||
│ │
|
||||
│ Model: gpt-5-codex (reasoning High, summaries Detailed) │
|
||||
│ Directory: [[workspace]] │
|
||||
│ Approval: on-request │
|
||||
│ Sandbox: workspace-write │
|
||||
│ Agents.md: <none> │
|
||||
│ │
|
||||
│ Token Usage: 1.9K (1K input + 900 output) + 200 cached │
|
||||
│ 5h Limit : [███████████████░░░░░] 72% used │
|
||||
│ Resets at: Jan 2, 2024 3:14 AM │
|
||||
│ Weekly Limit: [█████████░░░░░░░░░░░] 45% used │
|
||||
│ Resets at: Jan 2, 2024 3:24 AM │
|
||||
╰──────────────────────────────────────────────────────────╯
|
||||
696
codex-rs/tui/src/status.rs
Normal file
696
codex-rs/tui/src/status.rs
Normal file
@@ -0,0 +1,696 @@
|
||||
use crate::history_cell::CompositeHistoryCell;
|
||||
use crate::history_cell::HistoryCell;
|
||||
use crate::history_cell::PlainHistoryCell;
|
||||
use crate::history_cell::SESSION_HEADER_MAX_INNER_WIDTH;
|
||||
use crate::history_cell::card_inner_width;
|
||||
use crate::history_cell::format_directory_display;
|
||||
use crate::history_cell::with_border;
|
||||
use crate::version::CODEX_CLI_VERSION;
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration as ChronoDuration;
|
||||
use chrono::Local;
|
||||
use codex_common::create_config_summary_entries;
|
||||
use codex_core::auth::get_auth_file;
|
||||
use codex_core::auth::try_read_auth_json;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::project_doc::discover_project_doc_paths;
|
||||
use codex_core::protocol::RateLimitSnapshot;
|
||||
use codex_core::protocol::RateLimitWindow;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use ratatui::prelude::*;
|
||||
use ratatui::style::Stylize;
|
||||
use std::convert::TryFrom;
|
||||
use std::path::PathBuf;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
const STATUS_LIMIT_BAR_SEGMENTS: usize = 20;
|
||||
const STATUS_LIMIT_BAR_FILLED: &str = "█";
|
||||
const STATUS_LIMIT_BAR_EMPTY: &str = "░";
|
||||
|
||||
fn label_display(label: &str) -> String {
|
||||
format!(" {label}: ")
|
||||
}
|
||||
|
||||
fn label_span(label: &str) -> Span<'static> {
|
||||
Span::from(label_display(label)).dim()
|
||||
}
|
||||
|
||||
fn label_width(label: &str) -> usize {
|
||||
UnicodeWidthStr::width(label_display(label).as_str())
|
||||
}
|
||||
|
||||
fn status_header_spans() -> Vec<Span<'static>> {
|
||||
vec![
|
||||
Span::from(">_ ").dim(),
|
||||
Span::from("OpenAI Codex").bold(),
|
||||
Span::from(" ").dim(),
|
||||
Span::from(format!("(v{CODEX_CLI_VERSION})")).dim(),
|
||||
]
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct RateLimitWindowDisplay {
|
||||
pub used_percent: f64,
|
||||
pub resets_at: Option<String>,
|
||||
}
|
||||
|
||||
impl RateLimitWindowDisplay {
|
||||
fn from_window(window: &RateLimitWindow, captured_at: DateTime<Local>) -> Self {
|
||||
let resets_at = window
|
||||
.resets_in_seconds
|
||||
.and_then(|seconds| i64::try_from(seconds).ok())
|
||||
.and_then(|secs| captured_at.checked_add_signed(ChronoDuration::seconds(secs)))
|
||||
.map(|dt| dt.format("%b %-d, %Y %-I:%M %p").to_string());
|
||||
|
||||
Self {
|
||||
used_percent: window.used_percent,
|
||||
resets_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct RateLimitSnapshotDisplay {
|
||||
pub primary: Option<RateLimitWindowDisplay>,
|
||||
pub secondary: Option<RateLimitWindowDisplay>,
|
||||
}
|
||||
|
||||
pub(crate) fn rate_limit_snapshot_display(
|
||||
snapshot: &RateLimitSnapshot,
|
||||
captured_at: DateTime<Local>,
|
||||
) -> RateLimitSnapshotDisplay {
|
||||
RateLimitSnapshotDisplay {
|
||||
primary: snapshot
|
||||
.primary
|
||||
.as_ref()
|
||||
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
|
||||
secondary: snapshot
|
||||
.secondary
|
||||
.as_ref()
|
||||
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct StatusField {
|
||||
label: &'static str,
|
||||
value: Vec<Span<'static>>,
|
||||
}
|
||||
|
||||
impl StatusField {
|
||||
fn text(label: &'static str, value: impl Into<String>) -> Self {
|
||||
Self {
|
||||
label,
|
||||
value: vec![Span::from(value.into())],
|
||||
}
|
||||
}
|
||||
|
||||
fn spans(label: &'static str, value: Vec<Span<'static>>) -> Self {
|
||||
Self { label, value }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct StatusRows {
|
||||
lines: Vec<Line<'static>>,
|
||||
}
|
||||
|
||||
impl StatusRows {
|
||||
fn new() -> Self {
|
||||
Self { lines: Vec::new() }
|
||||
}
|
||||
|
||||
fn push_blank(&mut self) {
|
||||
self.lines.push(Line::from(Vec::<Span<'static>>::new()));
|
||||
}
|
||||
|
||||
fn push_line(&mut self, spans: Vec<Span<'static>>) {
|
||||
self.lines.push(Line::from(spans));
|
||||
}
|
||||
|
||||
fn push_field(&mut self, field: StatusField) {
|
||||
let mut spans = Vec::with_capacity(field.value.len() + 1);
|
||||
spans.push(label_span(field.label));
|
||||
spans.extend(field.value);
|
||||
self.lines.push(Line::from(spans));
|
||||
}
|
||||
|
||||
fn extend_fields<I>(&mut self, fields: I)
|
||||
where
|
||||
I: IntoIterator<Item = StatusField>,
|
||||
{
|
||||
for field in fields {
|
||||
self.push_field(field);
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_lines<I>(&mut self, lines: I)
|
||||
where
|
||||
I: IntoIterator<Item = Line<'static>>,
|
||||
{
|
||||
self.lines.extend(lines);
|
||||
}
|
||||
|
||||
fn into_lines(self) -> Vec<Line<'static>> {
|
||||
self.lines
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_status_output(
|
||||
config: &Config,
|
||||
usage: &TokenUsage,
|
||||
session_id: &Option<ConversationId>,
|
||||
rate_limits: Option<&RateLimitSnapshotDisplay>,
|
||||
) -> CompositeHistoryCell {
|
||||
let command = PlainHistoryCell::new(vec!["/status".magenta().into()]);
|
||||
let card = StatusHistoryCell::new(config, usage, session_id, rate_limits);
|
||||
|
||||
CompositeHistoryCell::new(vec![Box::new(command), Box::new(card)])
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct StatusTokenUsageData {
|
||||
total: u64,
|
||||
input: u64,
|
||||
cached_input: u64,
|
||||
output: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum StatusAccountDisplay {
|
||||
ChatGpt {
|
||||
email: Option<String>,
|
||||
plan: Option<String>,
|
||||
},
|
||||
ApiKey,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct StatusRateLimitRow {
|
||||
label: String,
|
||||
percent_used: f64,
|
||||
resets_at: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum StatusRateLimitData {
|
||||
Available(Vec<StatusRateLimitRow>),
|
||||
Missing,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct StatusHistoryCell {
|
||||
model_name: String,
|
||||
model_details: Vec<String>,
|
||||
directory: PathBuf,
|
||||
approval: String,
|
||||
sandbox: String,
|
||||
agents_summary: String,
|
||||
account: Option<StatusAccountDisplay>,
|
||||
session_id: Option<String>,
|
||||
token_usage: StatusTokenUsageData,
|
||||
rate_limits: StatusRateLimitData,
|
||||
}
|
||||
|
||||
impl StatusHistoryCell {
|
||||
fn new(
|
||||
config: &Config,
|
||||
usage: &TokenUsage,
|
||||
session_id: &Option<ConversationId>,
|
||||
rate_limits: Option<&RateLimitSnapshotDisplay>,
|
||||
) -> Self {
|
||||
let config_entries = create_config_summary_entries(config);
|
||||
let (model_name, model_details) = compose_model_display(config, &config_entries);
|
||||
let approval = config_entries
|
||||
.iter()
|
||||
.find(|(k, _)| *k == "approval")
|
||||
.map(|(_, v)| v.clone())
|
||||
.unwrap_or_else(|| "<unknown>".to_string());
|
||||
let sandbox = match &config.sandbox_policy {
|
||||
SandboxPolicy::DangerFullAccess => "danger-full-access".to_string(),
|
||||
SandboxPolicy::ReadOnly => "read-only".to_string(),
|
||||
SandboxPolicy::WorkspaceWrite { .. } => "workspace-write".to_string(),
|
||||
};
|
||||
let agents_summary = compose_agents_summary(config);
|
||||
let account = compose_account_display(config);
|
||||
let session_id = session_id.as_ref().map(std::string::ToString::to_string);
|
||||
let token_usage = StatusTokenUsageData {
|
||||
total: usage.blended_total(),
|
||||
input: usage.non_cached_input(),
|
||||
cached_input: usage.cached_input_tokens,
|
||||
output: usage.output_tokens,
|
||||
};
|
||||
let rate_limits = compose_rate_limit_data(rate_limits);
|
||||
|
||||
Self {
|
||||
model_name,
|
||||
model_details,
|
||||
directory: config.cwd.clone(),
|
||||
approval,
|
||||
sandbox,
|
||||
agents_summary,
|
||||
account,
|
||||
session_id,
|
||||
token_usage,
|
||||
rate_limits,
|
||||
}
|
||||
}
|
||||
|
||||
fn primary_fields(&self, inner_width: usize) -> Vec<StatusField> {
|
||||
let mut fields = Vec::new();
|
||||
let mut model_spans = vec![Span::from(self.model_name.clone())];
|
||||
if !self.model_details.is_empty() {
|
||||
model_spans.push(Span::from(" (").dim());
|
||||
model_spans.push(Span::from(self.model_details.join(", ")).dim());
|
||||
model_spans.push(Span::from(")").dim());
|
||||
}
|
||||
fields.push(StatusField::spans("Model", model_spans));
|
||||
|
||||
let directory_width = inner_width.saturating_sub(label_width("Directory"));
|
||||
let directory = format_directory_display(&self.directory, Some(directory_width));
|
||||
fields.push(StatusField::text("Directory", directory));
|
||||
|
||||
fields.push(StatusField::text("Approval", self.approval.clone()));
|
||||
fields.push(StatusField::text("Sandbox", self.sandbox.clone()));
|
||||
fields.push(StatusField::text("Agents.md", self.agents_summary.clone()));
|
||||
|
||||
fields
|
||||
}
|
||||
|
||||
fn account_field(&self) -> Option<StatusField> {
|
||||
let account = self.account.as_ref()?;
|
||||
let value = match account {
|
||||
StatusAccountDisplay::ChatGpt { email, plan } => match (email, plan) {
|
||||
(Some(email), Some(plan)) => format!("{email} ({plan})"),
|
||||
(Some(email), None) => email.clone(),
|
||||
(None, Some(plan)) => plan.clone(),
|
||||
(None, None) => "ChatGPT".to_string(),
|
||||
},
|
||||
StatusAccountDisplay::ApiKey => {
|
||||
"API key configured (run codex login to use ChatGPT)".to_string()
|
||||
}
|
||||
};
|
||||
|
||||
Some(StatusField::text("Account", value))
|
||||
}
|
||||
|
||||
fn session_field(&self) -> Option<StatusField> {
|
||||
self.session_id
|
||||
.as_ref()
|
||||
.map(|session| StatusField::text("Session", session.clone()))
|
||||
}
|
||||
|
||||
fn token_usage_field(&self) -> StatusField {
|
||||
StatusField::spans("Token Usage", self.token_usage_spans())
|
||||
}
|
||||
|
||||
fn token_usage_spans(&self) -> Vec<Span<'static>> {
|
||||
let total_fmt = format_tokens_compact(self.token_usage.total);
|
||||
let input_fmt = format_tokens_compact(self.token_usage.input);
|
||||
let output_fmt = format_tokens_compact(self.token_usage.output);
|
||||
|
||||
let mut spans: Vec<Span<'static>> = vec![
|
||||
Span::from(total_fmt),
|
||||
Span::from(" (").dim(),
|
||||
Span::from(input_fmt),
|
||||
Span::from(" input").dim(),
|
||||
Span::from(" + ").dim(),
|
||||
Span::from(output_fmt),
|
||||
Span::from(" output").dim(),
|
||||
Span::from(")").dim(),
|
||||
];
|
||||
|
||||
if self.token_usage.cached_input > 0 {
|
||||
let cached_fmt = format_tokens_compact(self.token_usage.cached_input);
|
||||
spans.push(Span::from(" + ").dim());
|
||||
spans.push(Span::from(format!("{cached_fmt} cached")).dim());
|
||||
}
|
||||
|
||||
spans
|
||||
}
|
||||
|
||||
fn rate_limit_lines(&self) -> Vec<Line<'static>> {
|
||||
match &self.rate_limits {
|
||||
StatusRateLimitData::Available(rows_data) => {
|
||||
if rows_data.is_empty() {
|
||||
return vec![Line::from(vec![
|
||||
label_span("Limits"),
|
||||
Span::from("data not available yet").dim(),
|
||||
])];
|
||||
}
|
||||
|
||||
let label_width = rows_data
|
||||
.iter()
|
||||
.map(|row| UnicodeWidthStr::width(row.label.as_str()))
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
|
||||
let mut lines = Vec::new();
|
||||
|
||||
for row in rows_data {
|
||||
let padded = format!("{label:<label_width$}", label = row.label);
|
||||
lines.push(Line::from(vec![
|
||||
Span::from(format!(" {padded}: ")).dim(),
|
||||
Span::from(render_status_limit_progress_bar(row.percent_used)),
|
||||
Span::from(" "),
|
||||
Span::from(format_status_limit_summary(row.percent_used)),
|
||||
]));
|
||||
|
||||
if let Some(resets_at) = row.resets_at.as_ref() {
|
||||
lines.push(
|
||||
vec![" ".into(), format!("Resets at: {resets_at}").dim()].into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
lines
|
||||
}
|
||||
StatusRateLimitData::Missing => {
|
||||
vec![Line::from(vec![
|
||||
label_span("Limits"),
|
||||
Span::from("data not available yet").dim(),
|
||||
])]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HistoryCell for StatusHistoryCell {
|
||||
fn display_lines(&self, width: u16) -> Vec<Line<'static>> {
|
||||
let Some(inner_width) = card_inner_width(width, SESSION_HEADER_MAX_INNER_WIDTH) else {
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
let mut rows = StatusRows::new();
|
||||
rows.push_line(status_header_spans());
|
||||
rows.push_blank();
|
||||
rows.extend_fields(self.primary_fields(inner_width));
|
||||
|
||||
if let Some(account) = self.account_field() {
|
||||
rows.push_field(account);
|
||||
}
|
||||
|
||||
if let Some(session) = self.session_field() {
|
||||
rows.push_field(session);
|
||||
}
|
||||
|
||||
rows.push_blank();
|
||||
rows.push_field(self.token_usage_field());
|
||||
rows.extend_lines(self.rate_limit_lines());
|
||||
|
||||
with_border(rows.into_lines())
|
||||
}
|
||||
}
|
||||
|
||||
fn compose_model_display(config: &Config, entries: &[(&str, String)]) -> (String, Vec<String>) {
|
||||
let mut details: Vec<String> = Vec::new();
|
||||
if let Some((_, effort)) = entries.iter().find(|(k, _)| *k == "reasoning effort") {
|
||||
details.push(format!("reasoning {}", title_case(effort)));
|
||||
}
|
||||
if let Some((_, summary)) = entries.iter().find(|(k, _)| *k == "reasoning summaries") {
|
||||
let summary = summary.trim();
|
||||
if summary.is_empty() {
|
||||
// nothing to add
|
||||
} else if summary.eq_ignore_ascii_case("none") || summary.eq_ignore_ascii_case("off") {
|
||||
details.push("summaries off".to_string());
|
||||
} else {
|
||||
details.push(format!("summaries {}", title_case(summary)));
|
||||
}
|
||||
}
|
||||
|
||||
(config.model.clone(), details)
|
||||
}
|
||||
|
||||
fn compose_agents_summary(config: &Config) -> String {
|
||||
match discover_project_doc_paths(config) {
|
||||
Ok(paths) => {
|
||||
let mut rels: Vec<String> = Vec::new();
|
||||
for p in paths {
|
||||
let display = if let Some(parent) = p.parent() {
|
||||
if parent == config.cwd {
|
||||
"AGENTS.md".to_string()
|
||||
} else {
|
||||
let mut cur = config.cwd.as_path();
|
||||
let mut ups = 0usize;
|
||||
let mut reached = false;
|
||||
while let Some(c) = cur.parent() {
|
||||
if cur == parent {
|
||||
reached = true;
|
||||
break;
|
||||
}
|
||||
cur = c;
|
||||
ups += 1;
|
||||
}
|
||||
if reached {
|
||||
let up = format!("..{}", std::path::MAIN_SEPARATOR);
|
||||
format!("{}AGENTS.md", up.repeat(ups))
|
||||
} else if let Ok(stripped) = p.strip_prefix(&config.cwd) {
|
||||
stripped.display().to_string()
|
||||
} else {
|
||||
p.display().to_string()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
p.display().to_string()
|
||||
};
|
||||
rels.push(display);
|
||||
}
|
||||
if rels.is_empty() {
|
||||
"<none>".to_string()
|
||||
} else {
|
||||
rels.join(", ")
|
||||
}
|
||||
}
|
||||
Err(_) => "<none>".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn compose_account_display(config: &Config) -> Option<StatusAccountDisplay> {
|
||||
let auth_file = get_auth_file(&config.codex_home);
|
||||
let auth = try_read_auth_json(&auth_file).ok()?;
|
||||
|
||||
if let Some(tokens) = auth.tokens.as_ref() {
|
||||
let info = &tokens.id_token;
|
||||
let email = info.email.clone();
|
||||
let plan = info.get_chatgpt_plan_type().map(|p| title_case(p.as_str()));
|
||||
return Some(StatusAccountDisplay::ChatGpt { email, plan });
|
||||
}
|
||||
|
||||
if let Some(key) = auth.openai_api_key
|
||||
&& !key.is_empty()
|
||||
{
|
||||
return Some(StatusAccountDisplay::ApiKey);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn compose_rate_limit_data(snapshot: Option<&RateLimitSnapshotDisplay>) -> StatusRateLimitData {
|
||||
match snapshot {
|
||||
Some(snapshot) => {
|
||||
let mut rows = Vec::new();
|
||||
|
||||
if let Some(primary) = snapshot.primary.as_ref() {
|
||||
rows.push(StatusRateLimitRow {
|
||||
label: "5h Limit".to_string(),
|
||||
percent_used: primary.used_percent,
|
||||
resets_at: primary.resets_at.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(secondary) = snapshot.secondary.as_ref() {
|
||||
rows.push(StatusRateLimitRow {
|
||||
label: "Weekly Limit".to_string(),
|
||||
percent_used: secondary.used_percent,
|
||||
resets_at: secondary.resets_at.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
if rows.is_empty() {
|
||||
StatusRateLimitData::Missing
|
||||
} else {
|
||||
StatusRateLimitData::Available(rows)
|
||||
}
|
||||
}
|
||||
None => StatusRateLimitData::Missing,
|
||||
}
|
||||
}
|
||||
|
||||
fn format_tokens_compact(value: u64) -> String {
|
||||
if value == 0 {
|
||||
return "0".to_string();
|
||||
}
|
||||
if value < 1_000 {
|
||||
return value.to_string();
|
||||
}
|
||||
|
||||
let (scaled, suffix) = if value >= 1_000_000_000_000 {
|
||||
(value as f64 / 1_000_000_000_000.0, "T")
|
||||
} else if value >= 1_000_000_000 {
|
||||
(value as f64 / 1_000_000_000.0, "B")
|
||||
} else if value >= 1_000_000 {
|
||||
(value as f64 / 1_000_000.0, "M")
|
||||
} else {
|
||||
(value as f64 / 1_000.0, "K")
|
||||
};
|
||||
|
||||
let decimals = if scaled < 10.0 {
|
||||
2
|
||||
} else if scaled < 100.0 {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let mut formatted = format!("{scaled:.decimals$}");
|
||||
if formatted.contains('.') {
|
||||
while formatted.ends_with('0') {
|
||||
formatted.pop();
|
||||
}
|
||||
if formatted.ends_with('.') {
|
||||
formatted.pop();
|
||||
}
|
||||
}
|
||||
|
||||
format!("{formatted}{suffix}")
|
||||
}
|
||||
|
||||
fn render_status_limit_progress_bar(percent_used: f64) -> String {
|
||||
let ratio = (percent_used / 100.0).clamp(0.0, 1.0);
|
||||
let filled = (ratio * STATUS_LIMIT_BAR_SEGMENTS as f64).round() as usize;
|
||||
let filled = filled.min(STATUS_LIMIT_BAR_SEGMENTS);
|
||||
let empty = STATUS_LIMIT_BAR_SEGMENTS.saturating_sub(filled);
|
||||
format!(
|
||||
"[{}{}]",
|
||||
STATUS_LIMIT_BAR_FILLED.repeat(filled),
|
||||
STATUS_LIMIT_BAR_EMPTY.repeat(empty)
|
||||
)
|
||||
}
|
||||
|
||||
fn format_status_limit_summary(percent_used: f64) -> String {
|
||||
format!("{percent_used:.0}% used")
|
||||
}
|
||||
|
||||
fn title_case(s: &str) -> String {
|
||||
if s.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
let mut chars = s.chars();
|
||||
let first = match chars.next() {
|
||||
Some(c) => c,
|
||||
None => return String::new(),
|
||||
};
|
||||
let rest: String = chars.as_str().to_ascii_lowercase();
|
||||
first.to_uppercase().collect::<String>() + &rest
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::TimeZone;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ConfigToml;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn test_config(temp_home: &TempDir) -> Config {
|
||||
Config::load_from_base_config_with_overrides(
|
||||
ConfigToml::default(),
|
||||
ConfigOverrides::default(),
|
||||
temp_home.path().to_path_buf(),
|
||||
)
|
||||
.expect("load config")
|
||||
}
|
||||
|
||||
fn render_lines(lines: &[Line<'static>]) -> Vec<String> {
|
||||
lines
|
||||
.iter()
|
||||
.map(|line| {
|
||||
line.spans
|
||||
.iter()
|
||||
.map(|span| span.content.as_ref())
|
||||
.collect::<String>()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn status_snapshot_includes_reasoning_details() {
|
||||
let temp_home = TempDir::new().expect("temp home");
|
||||
let mut config = test_config(&temp_home);
|
||||
config.model = "gpt-5-codex".to_string();
|
||||
config.model_reasoning_effort = Some(ReasoningEffort::High);
|
||||
config.model_reasoning_summary = ReasoningSummary::Detailed;
|
||||
config.sandbox_policy = SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: Vec::new(),
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
};
|
||||
|
||||
let project_root = temp_home.path().join("workspace");
|
||||
config.cwd = project_root;
|
||||
|
||||
let usage = TokenUsage {
|
||||
input_tokens: 1_200,
|
||||
cached_input_tokens: 200,
|
||||
output_tokens: 900,
|
||||
reasoning_output_tokens: 150,
|
||||
total_tokens: 2_250,
|
||||
};
|
||||
|
||||
let rate_snapshot = RateLimitSnapshot {
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 72.5,
|
||||
window_minutes: Some(300),
|
||||
resets_in_seconds: Some(600),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 45.0,
|
||||
window_minutes: Some(1_440),
|
||||
resets_in_seconds: Some(1_200),
|
||||
}),
|
||||
};
|
||||
let captured_at = Local
|
||||
.with_ymd_and_hms(2024, 1, 2, 3, 4, 5)
|
||||
.single()
|
||||
.expect("valid timestamp");
|
||||
let rate_display = rate_limit_snapshot_display(&rate_snapshot, captured_at);
|
||||
|
||||
let composite = new_status_output(&config, &usage, &None, Some(&rate_display));
|
||||
let lines = composite.display_lines(80);
|
||||
let mut rendered = render_lines(&lines).join("\n");
|
||||
if cfg!(windows) {
|
||||
rendered = rendered.replace('\\', "/");
|
||||
}
|
||||
|
||||
let mut sanitized_lines: Vec<String> = Vec::new();
|
||||
for line in rendered.lines() {
|
||||
if let Some(pos) = line.find("Directory: ") {
|
||||
if let Some(pipe_idx) = line.rfind('│') {
|
||||
let prefix = &line[..pos + "Directory: ".len()];
|
||||
let suffix = &line[pipe_idx..];
|
||||
let content_width = pipe_idx.saturating_sub(pos + "Directory: ".len());
|
||||
let replacement = "[[workspace]]";
|
||||
let mut rebuilt = prefix.to_string();
|
||||
rebuilt.push_str(replacement);
|
||||
if content_width > replacement.len() {
|
||||
rebuilt.push_str(&" ".repeat(content_width - replacement.len()));
|
||||
}
|
||||
rebuilt.push_str(suffix);
|
||||
sanitized_lines.push(rebuilt);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
sanitized_lines.push(line.to_string());
|
||||
}
|
||||
|
||||
let sanitized = sanitized_lines.join("\n");
|
||||
insta::assert_snapshot!(sanitized);
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,36 @@
|
||||
use crate::history_cell;
|
||||
use crate::history_cell::HistoryCell;
|
||||
use crate::history_cell::{self};
|
||||
use codex_core::config::Config;
|
||||
use ratatui::text::Line;
|
||||
|
||||
use super::StreamState;
|
||||
|
||||
/// Sink for history insertions and animation control.
|
||||
pub(crate) trait HistorySink {
|
||||
fn insert_history_cell(&self, cell: Box<dyn HistoryCell>);
|
||||
fn start_commit_animation(&self);
|
||||
fn stop_commit_animation(&self);
|
||||
}
|
||||
|
||||
/// Concrete sink backed by `AppEventSender`.
|
||||
pub(crate) struct AppEventHistorySink(pub(crate) crate::app_event_sender::AppEventSender);
|
||||
|
||||
impl HistorySink for AppEventHistorySink {
|
||||
fn insert_history_cell(&self, cell: Box<dyn crate::history_cell::HistoryCell>) {
|
||||
self.0
|
||||
.send(crate::app_event::AppEvent::InsertHistoryCell(cell))
|
||||
}
|
||||
fn start_commit_animation(&self) {
|
||||
self.0
|
||||
.send(crate::app_event::AppEvent::StartCommitAnimation)
|
||||
}
|
||||
fn stop_commit_animation(&self) {
|
||||
self.0.send(crate::app_event::AppEvent::StopCommitAnimation)
|
||||
}
|
||||
}
|
||||
|
||||
type Lines = Vec<Line<'static>>;
|
||||
|
||||
/// Controller that manages newline-gated streaming, header emission, and
|
||||
/// commit animation across streams.
|
||||
pub(crate) struct StreamController {
|
||||
@@ -25,7 +51,7 @@ impl StreamController {
|
||||
}
|
||||
|
||||
/// Push a delta; if it contains a newline, commit completed lines and start animation.
|
||||
pub(crate) fn push(&mut self, delta: &str) -> bool {
|
||||
pub(crate) fn push_and_maybe_commit(&mut self, delta: &str, sink: &impl HistorySink) {
|
||||
let cfg = self.config.clone();
|
||||
let state = &mut self.state;
|
||||
if !delta.is_empty() {
|
||||
@@ -36,14 +62,13 @@ impl StreamController {
|
||||
let newly_completed = state.collector.commit_complete_lines(&cfg);
|
||||
if !newly_completed.is_empty() {
|
||||
state.enqueue(newly_completed);
|
||||
return true;
|
||||
sink.start_commit_animation();
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Finalize the active stream. Drain and emit now.
|
||||
pub(crate) fn finalize(&mut self) -> Option<Box<dyn HistoryCell>> {
|
||||
pub(crate) fn finalize(&mut self, sink: &impl HistorySink) {
|
||||
let cfg = self.config.clone();
|
||||
// Finalize collector first.
|
||||
let remaining = {
|
||||
@@ -51,37 +76,45 @@ impl StreamController {
|
||||
state.collector.finalize_and_drain(&cfg)
|
||||
};
|
||||
// Collect all output first to avoid emitting headers when there is no content.
|
||||
let mut out_lines = Vec::new();
|
||||
let mut out_lines: Lines = Vec::new();
|
||||
{
|
||||
let state = &mut self.state;
|
||||
if !remaining.is_empty() {
|
||||
state.enqueue(remaining);
|
||||
}
|
||||
let step = state.drain_all();
|
||||
out_lines.extend(step);
|
||||
out_lines.extend(step.history);
|
||||
}
|
||||
if !out_lines.is_empty() {
|
||||
// Insert as a HistoryCell so display drops the header while transcript keeps it.
|
||||
self.emit(sink, out_lines);
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
self.state.clear();
|
||||
self.finishing_after_drain = false;
|
||||
self.emit(out_lines)
|
||||
}
|
||||
|
||||
/// Step animation: commit at most one queued line and handle end-of-drain cleanup.
|
||||
pub(crate) fn on_commit_tick(&mut self) -> (Option<Box<dyn HistoryCell>>, bool) {
|
||||
let step = self.state.step();
|
||||
(self.emit(step), self.state.is_idle())
|
||||
pub(crate) fn on_commit_tick(&mut self, sink: &impl HistorySink) -> bool {
|
||||
let step = { self.state.step() };
|
||||
if !step.history.is_empty() {
|
||||
self.emit(sink, step.history);
|
||||
}
|
||||
|
||||
let is_idle = self.state.is_idle();
|
||||
if is_idle {
|
||||
sink.stop_commit_animation();
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn emit(&mut self, lines: Vec<Line<'static>>) -> Option<Box<dyn HistoryCell>> {
|
||||
if lines.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(Box::new(history_cell::AgentMessageCell::new(lines, {
|
||||
let header_emitted = self.header_emitted;
|
||||
self.header_emitted = true;
|
||||
!header_emitted
|
||||
})))
|
||||
fn emit(&mut self, sink: &impl HistorySink, lines: Vec<Line<'static>>) {
|
||||
sink.insert_history_cell(Box::new(history_cell::AgentMessageCell::new(
|
||||
lines,
|
||||
!self.header_emitted,
|
||||
)));
|
||||
self.header_emitted = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,6 +123,7 @@ mod tests {
|
||||
use super::*;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use std::cell::RefCell;
|
||||
|
||||
fn test_config() -> Config {
|
||||
let overrides = ConfigOverrides {
|
||||
@@ -102,6 +136,25 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
struct TestSink {
|
||||
pub lines: RefCell<Vec<Vec<Line<'static>>>>,
|
||||
}
|
||||
impl TestSink {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
lines: RefCell::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl HistorySink for TestSink {
|
||||
fn insert_history_cell(&self, cell: Box<dyn crate::history_cell::HistoryCell>) {
|
||||
// For tests, store the transcript representation of the cell.
|
||||
self.lines.borrow_mut().push(cell.transcript_lines());
|
||||
}
|
||||
fn start_commit_animation(&self) {}
|
||||
fn stop_commit_animation(&self) {}
|
||||
}
|
||||
|
||||
fn lines_to_plain_strings(lines: &[ratatui::text::Line<'_>]) -> Vec<String> {
|
||||
lines
|
||||
.iter()
|
||||
@@ -119,7 +172,7 @@ mod tests {
|
||||
fn controller_loose_vs_tight_with_commit_ticks_matches_full() {
|
||||
let cfg = test_config();
|
||||
let mut ctrl = StreamController::new(cfg.clone());
|
||||
let mut lines = Vec::new();
|
||||
let sink = TestSink::new();
|
||||
|
||||
// Exact deltas from the session log (section: Loose vs. tight list items)
|
||||
let deltas = vec![
|
||||
@@ -193,21 +246,20 @@ mod tests {
|
||||
];
|
||||
|
||||
// Simulate streaming with a commit tick attempt after each delta.
|
||||
for d in deltas.iter() {
|
||||
ctrl.push(d);
|
||||
while let (Some(cell), idle) = ctrl.on_commit_tick() {
|
||||
lines.extend(cell.transcript_lines());
|
||||
if idle {
|
||||
break;
|
||||
}
|
||||
}
|
||||
for d in &deltas {
|
||||
ctrl.push_and_maybe_commit(d, &sink);
|
||||
let _ = ctrl.on_commit_tick(&sink);
|
||||
}
|
||||
// Finalize and flush remaining lines now.
|
||||
if let Some(cell) = ctrl.finalize() {
|
||||
lines.extend(cell.transcript_lines());
|
||||
}
|
||||
ctrl.finalize(&sink);
|
||||
|
||||
let mut flat = lines;
|
||||
// Flatten sink output and strip the header that the controller inserts (blank + "codex").
|
||||
let mut flat: Vec<ratatui::text::Line<'static>> = Vec::new();
|
||||
for batch in sink.lines.borrow().iter() {
|
||||
for l in batch {
|
||||
flat.push(l.clone());
|
||||
}
|
||||
}
|
||||
// Drop leading blank and header line if present.
|
||||
if !flat.is_empty() && lines_to_plain_strings(&[flat[0].clone()])[0].is_empty() {
|
||||
flat.remove(0);
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use ratatui::text::Line;
|
||||
|
||||
use crate::markdown_stream::AnimatedLineStreamer;
|
||||
use crate::markdown_stream::MarkdownStreamCollector;
|
||||
pub(crate) mod controller;
|
||||
|
||||
pub(crate) struct StreamState {
|
||||
pub(crate) collector: MarkdownStreamCollector,
|
||||
queued_lines: VecDeque<Line<'static>>,
|
||||
pub(crate) streamer: AnimatedLineStreamer,
|
||||
pub(crate) has_seen_delta: bool,
|
||||
}
|
||||
|
||||
@@ -15,25 +12,25 @@ impl StreamState {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
collector: MarkdownStreamCollector::new(),
|
||||
queued_lines: VecDeque::new(),
|
||||
streamer: AnimatedLineStreamer::new(),
|
||||
has_seen_delta: false,
|
||||
}
|
||||
}
|
||||
pub(crate) fn clear(&mut self) {
|
||||
self.collector.clear();
|
||||
self.queued_lines.clear();
|
||||
self.streamer.clear();
|
||||
self.has_seen_delta = false;
|
||||
}
|
||||
pub(crate) fn step(&mut self) -> Vec<Line<'static>> {
|
||||
self.queued_lines.pop_front().into_iter().collect()
|
||||
pub(crate) fn step(&mut self) -> crate::markdown_stream::StepResult {
|
||||
self.streamer.step()
|
||||
}
|
||||
pub(crate) fn drain_all(&mut self) -> Vec<Line<'static>> {
|
||||
self.queued_lines.drain(..).collect()
|
||||
pub(crate) fn drain_all(&mut self) -> crate::markdown_stream::StepResult {
|
||||
self.streamer.drain_all()
|
||||
}
|
||||
pub(crate) fn is_idle(&self) -> bool {
|
||||
self.queued_lines.is_empty()
|
||||
self.streamer.is_idle()
|
||||
}
|
||||
pub(crate) fn enqueue(&mut self, lines: Vec<Line<'static>>) {
|
||||
self.queued_lines.extend(lines);
|
||||
pub(crate) fn enqueue(&mut self, lines: Vec<ratatui::text::Line<'static>>) {
|
||||
self.streamer.enqueue(lines)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
[package]
|
||||
name = "codex-utils-readiness"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync", "time"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true, features = ["macros", "rt", "rt-multi-thread"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
Reference in New Issue
Block a user