mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
217 Commits
feature/ba
...
fix-cli
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e352f37940 | ||
|
|
516acc030b | ||
|
|
5b038135de | ||
|
|
d9dbf48828 | ||
|
|
2e95e5602d | ||
|
|
87a654cf6b | ||
|
|
27c6c5d7a7 | ||
|
|
c09e131653 | ||
|
|
ea82f86662 | ||
|
|
a8edc57740 | ||
|
|
52e591ce60 | ||
|
|
079303091f | ||
|
|
4a80059b1b | ||
|
|
bf76258cdc | ||
|
|
c64da4ff71 | ||
|
|
98efd352ae | ||
|
|
80ccec6530 | ||
|
|
c81baaabda | ||
|
|
55b74c95e2 | ||
|
|
16057e76b0 | ||
|
|
adbc38a978 | ||
|
|
83a4d4d8ed | ||
|
|
197f45a3be | ||
|
|
04c1782e52 | ||
|
|
d15253415a | ||
|
|
c4120a265b | ||
|
|
618a42adf5 | ||
|
|
a9d54b9e92 | ||
|
|
79e51dd607 | ||
|
|
ff6dbff0b6 | ||
|
|
99841332e2 | ||
|
|
7407469791 | ||
|
|
43615becf0 | ||
|
|
9ee6e6f342 | ||
|
|
d7286e9829 | ||
|
|
bcf2bc0aa5 | ||
|
|
68765214b3 | ||
|
|
5c67dc3af1 | ||
|
|
c0960c0f49 | ||
|
|
90c3a5650c | ||
|
|
a3254696c8 | ||
|
|
2719fdd12a | ||
|
|
3a1be084f9 | ||
|
|
43b63ccae8 | ||
|
|
cc1b21e47f | ||
|
|
55801700de | ||
|
|
1fba99ed85 | ||
|
|
d3f6f6629b | ||
|
|
e555a36c6a | ||
|
|
ea095e30c1 | ||
|
|
c549481513 | ||
|
|
8797145678 | ||
|
|
a53720e278 | ||
|
|
41f5d61f24 | ||
|
|
02609184be | ||
|
|
1fc3413a46 | ||
|
|
eb2b739d6a | ||
|
|
a10403d697 | ||
|
|
8e3a048fec | ||
|
|
9f2ab97fbc | ||
|
|
38c9d7dca1 | ||
|
|
67aab04c66 | ||
|
|
7355ca48c5 | ||
|
|
affb5fc1d0 | ||
|
|
4a5f05c136 | ||
|
|
acc2b63dfb | ||
|
|
344d4a1d68 | ||
|
|
a0c37f5d07 | ||
|
|
103adcdf2d | ||
|
|
d61dea6fe6 | ||
|
|
e363dac249 | ||
|
|
250b244ab4 | ||
|
|
d1ed3a4cef | ||
|
|
e85742635f | ||
|
|
87b299aa3f | ||
|
|
0e58870634 | ||
|
|
42847baaf7 | ||
|
|
6032d784ee | ||
|
|
7bff8df10e | ||
|
|
addc946d13 | ||
|
|
bffdbec2c5 | ||
|
|
353a5c2046 | ||
|
|
00c7f7a16c | ||
|
|
82e65975b2 | ||
|
|
639a6fd2f3 | ||
|
|
db4aa6f916 | ||
|
|
cb96f4f596 | ||
|
|
5b910f1f05 | ||
|
|
af6304c641 | ||
|
|
b90eeabd74 | ||
|
|
f7d2f3e54d | ||
|
|
3fe3b6328b | ||
|
|
8144ddb3da | ||
|
|
9336f2b84b | ||
|
|
af37785bca | ||
|
|
594248f415 | ||
|
|
8227a5ba1b | ||
|
|
fdb8dadcae | ||
|
|
0f9a796617 | ||
|
|
c6e8671b2a | ||
|
|
b84a920067 | ||
|
|
6cd5309d91 | ||
|
|
664ee07540 | ||
|
|
51c465bddc | ||
|
|
e0fbc112c7 | ||
|
|
76ecbb3d8e | ||
|
|
2451b19d13 | ||
|
|
5c7d9e27b1 | ||
|
|
c93e77b68b | ||
|
|
c415827ac2 | ||
|
|
4e0550b995 | ||
|
|
f54a49157b | ||
|
|
dd56750612 | ||
|
|
8bc73a2bfd | ||
|
|
be366a31ab | ||
|
|
c75920a071 | ||
|
|
8daba53808 | ||
|
|
d2940bd4c3 | ||
|
|
76a9b11678 | ||
|
|
fa80bbb587 | ||
|
|
434eb4fd49 | ||
|
|
19f46439ae | ||
|
|
e258ca61b4 | ||
|
|
e5fe50d3ce | ||
|
|
14a115d488 | ||
|
|
5996ee0e5f | ||
|
|
a4ebd069e5 | ||
|
|
04504d8218 | ||
|
|
42d335deb8 | ||
|
|
ad0c2b4db3 | ||
|
|
ff389dc52f | ||
|
|
9b18875a42 | ||
|
|
881c7978f1 | ||
|
|
a7fda70053 | ||
|
|
de64f5f007 | ||
|
|
8595237505 | ||
|
|
62258df92f | ||
|
|
b34e906396 | ||
|
|
71038381aa | ||
|
|
277fc6254e | ||
|
|
992b531180 | ||
|
|
84a0ba9bf5 | ||
|
|
4a5d6f7c71 | ||
|
|
1b3c8b8e94 | ||
|
|
d4aba772cb | ||
|
|
4c97eeb32a | ||
|
|
c9505488a1 | ||
|
|
530382db05 | ||
|
|
208089e58e | ||
|
|
e5fdb5b0fd | ||
|
|
5332f6e215 | ||
|
|
5d87f5d24a | ||
|
|
791d7b125f | ||
|
|
72733e34c4 | ||
|
|
b8d2b1a576 | ||
|
|
7fe4021f95 | ||
|
|
11285655c4 | ||
|
|
244687303b | ||
|
|
5e2c4f7e35 | ||
|
|
a8026d3846 | ||
|
|
45bccd36b0 | ||
|
|
404c126fc3 | ||
|
|
88027552dd | ||
|
|
ca8bd09d56 | ||
|
|
39ed8a7d26 | ||
|
|
2df7f7efe5 | ||
|
|
0560079c41 | ||
|
|
0de154194d | ||
|
|
5c583fe89b | ||
|
|
cf63cbf153 | ||
|
|
b1c291e2bb | ||
|
|
934d728946 | ||
|
|
f037b2fd56 | ||
|
|
d60cbed691 | ||
|
|
6aafe37752 | ||
|
|
d555b68469 | ||
|
|
9baa5c33da | ||
|
|
fdf4a68646 | ||
|
|
adc9e1526b | ||
|
|
b9af1d2b16 | ||
|
|
2d52e3b40a | ||
|
|
6039f8a126 | ||
|
|
50262a44ce | ||
|
|
839b2ae7cf | ||
|
|
6a8e743d57 | ||
|
|
a797051921 | ||
|
|
d7d9d96d6c | ||
|
|
26f1246a89 | ||
|
|
6581da9b57 | ||
|
|
900bb01486 | ||
|
|
2ad6a37192 | ||
|
|
e5dd7f0934 | ||
|
|
b6673838e8 | ||
|
|
1823906215 | ||
|
|
5185d69f13 | ||
|
|
4dffa496ac | ||
|
|
ce984b2c71 | ||
|
|
c47febf221 | ||
|
|
76c37c5493 | ||
|
|
2aa84b8891 | ||
|
|
9177bdae5e | ||
|
|
a30e5e40ee | ||
|
|
99e1d33bd1 | ||
|
|
b2f6fc3b9a | ||
|
|
51f88fd04a | ||
|
|
916fdc2a37 | ||
|
|
863d9c237e | ||
|
|
7e1543f5d8 | ||
|
|
d701eb32d7 | ||
|
|
9baae77533 | ||
|
|
e932722292 | ||
|
|
bbea6bbf7e | ||
|
|
4891ee29c5 | ||
|
|
bac8a427f3 | ||
|
|
14ab1063a7 | ||
|
|
a77364bbaa | ||
|
|
19b4ed3c96 |
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser
|
||||
|
||||
28
.github/dotslash-config.json
vendored
28
.github/dotslash-config.json
vendored
@@ -27,6 +27,34 @@
|
||||
"path": "codex.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-responses-api-proxy": {
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-apple-darwin\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-unknown-linux-musl\\.zst$",
|
||||
"path": "codex-responses-api-proxy"
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-responses-api-proxy-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-responses-api-proxy-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
23
.github/workflows/ci.yml
vendored
23
.github/workflows/ci.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
pull_request: { branches: [main] }
|
||||
pull_request: {}
|
||||
push: { branches: [main] }
|
||||
|
||||
jobs:
|
||||
@@ -27,12 +27,27 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# Run all tasks using workspace filters
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Ensure staging a release works.
|
||||
- name: Stage npm package
|
||||
id: stage_npm_package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: ./codex-cli/scripts/stage_release.sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CODEX_VERSION=0.40.0
|
||||
PACK_OUTPUT="${RUNNER_TEMP}/codex-npm.tgz"
|
||||
python3 ./codex-cli/scripts/build_npm_package.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--pack-output "$PACK_OUTPUT"
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
- name: Ensure root README.md contains only ASCII and certain Unicode code points
|
||||
run: ./scripts/asciicheck.py README.md
|
||||
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -22,6 +22,6 @@ jobs:
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2
|
||||
uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1
|
||||
with:
|
||||
ignore_words_file: .codespellignore
|
||||
|
||||
6
.github/workflows/rust-ci.yml
vendored
6
.github/workflows/rust-ci.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -143,7 +143,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
124
.github/workflows/rust-release.yml
vendored
124
.github/workflows/rust-release.yml
vendored
@@ -77,7 +77,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
sudo apt install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
@@ -107,8 +107,10 @@ jobs:
|
||||
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
|
||||
fi
|
||||
|
||||
- if: ${{ matrix.runner == 'windows-11-arm' }}
|
||||
@@ -167,6 +169,14 @@ jobs:
|
||||
needs: build
|
||||
name: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
actions: read
|
||||
outputs:
|
||||
version: ${{ steps.release_name.outputs.name }}
|
||||
tag: ${{ github.ref_name }}
|
||||
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -187,21 +197,50 @@ jobs:
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
echo "name=${version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Stage npm package
|
||||
- name: Determine npm publish settings
|
||||
id: npm_publish_settings
|
||||
env:
|
||||
VERSION: ${{ steps.release_name.outputs.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${VERSION}"
|
||||
|
||||
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=alpha" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage codex CLI npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
python3 codex-cli/scripts/stage_rust_release.py \
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--tmp "${TMP_DIR}"
|
||||
mkdir -p dist/npm
|
||||
# Produce an npm-ready tarball using `npm pack` and store it in dist/npm.
|
||||
# We then rename it to a stable name used by our publishing script.
|
||||
(cd "$TMP_DIR" && npm pack --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
mv "${GITHUB_WORKSPACE}"/dist/npm/*.tgz \
|
||||
"${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
|
||||
- name: Stage responses API proxy npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage-responses"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--package codex-responses-api-proxy \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${GITHUB_WORKSPACE}/dist/npm/codex-responses-api-proxy-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
@@ -220,6 +259,69 @@ jobs:
|
||||
tag: ${{ github.ref_name }}
|
||||
config: .github/dotslash-config.json
|
||||
|
||||
# Publish to npm using OIDC authentication.
|
||||
# July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
|
||||
# npm docs: https://docs.npmjs.com/trusted-publishers
|
||||
publish-npm:
|
||||
# Publish to npm for stable releases and alpha pre-releases with numeric suffixes.
|
||||
if: ${{ needs.release.outputs.should_publish_npm == 'true' }}
|
||||
name: publish-npm
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # Required for OIDC
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarballs from release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${{ needs.release.outputs.version }}"
|
||||
tag="${{ needs.release.outputs.tag }}"
|
||||
mkdir -p dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-responses-api-proxy-npm-${version}.tgz" \
|
||||
--dir dist/npm
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
env:
|
||||
VERSION: ${{ needs.release.outputs.version }}
|
||||
NPM_TAG: ${{ needs.release.outputs.npm_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
|
||||
tarballs=(
|
||||
"codex-npm-${VERSION}.tgz"
|
||||
"codex-responses-api-proxy-npm-${VERSION}.tgz"
|
||||
)
|
||||
|
||||
for tarball in "${tarballs[@]}"; do
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}"
|
||||
done
|
||||
|
||||
update-branch:
|
||||
name: Update latest-alpha-cli branch
|
||||
permissions:
|
||||
|
||||
43
.github/workflows/sdk.yml
vendored
Normal file
43
.github/workflows/sdk.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: sdk
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
sdks:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
working-directory: codex-rs
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run build
|
||||
|
||||
- name: Lint SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run lint
|
||||
|
||||
- name: Test SDK packages
|
||||
run: pnpm -r --filter ./sdk/typescript run test
|
||||
@@ -4,6 +4,7 @@ In the codex-rs folder where the rust code lives:
|
||||
|
||||
- Crate names are prefixed with `codex-`. For example, the `core` folder's crate is named `codex-core`
|
||||
- When using format! and you can inline variables into {}, always do that.
|
||||
- Install any commands the repo relies on (for example `just`, `rg`, or `cargo-insta`) if they aren't already available before running instructions here.
|
||||
- Never add or modify any code related to `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` or `CODEX_SANDBOX_ENV_VAR`.
|
||||
- You operate in a sandbox where `CODEX_SANDBOX_NETWORK_DISABLED=1` will be set whenever you use the `shell` tool. Any existing code that uses `CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR` was authored with this fact in mind. It is often used to early exit out of tests that the author knew you would not be able to run given your sandbox limitations.
|
||||
- Similarly, when you spawn a process using Seatbelt (`/usr/bin/sandbox-exec`), `CODEX_SANDBOX=seatbelt` will be set on the child process. Integration tests that want to run Seatbelt themselves cannot be run under Seatbelt, so checks for `CODEX_SANDBOX=seatbelt` are also often used to early exit out of tests, as appropriate.
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
<h1 align="center">OpenAI Codex CLI</h1>
|
||||
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install codex</code></p>
|
||||
|
||||
@@ -102,4 +101,3 @@ Codex CLI supports a rich set of configuration options, with preferences stored
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
|
||||
8
codex-cli/.gitignore
vendored
8
codex-cli/.gitignore
vendored
@@ -1,7 +1 @@
|
||||
# Added by ./scripts/install_native_deps.sh
|
||||
/bin/codex-aarch64-apple-darwin
|
||||
/bin/codex-aarch64-unknown-linux-musl
|
||||
/bin/codex-linux-sandbox-arm64
|
||||
/bin/codex-linux-sandbox-x64
|
||||
/bin/codex-x86_64-apple-darwin
|
||||
/bin/codex-x86_64-unknown-linux-musl
|
||||
/vendor/
|
||||
|
||||
@@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS:
|
||||
| Requirement | Details |
|
||||
| --------------------------- | --------------------------------------------------------------- |
|
||||
| Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** |
|
||||
| Node.js | **22 or newer** (LTS recommended) |
|
||||
| Node.js | **16 or newer** (Node 20 LTS recommended) |
|
||||
| Git (optional, recommended) | 2.23+ for built-in PR helpers |
|
||||
| RAM | 4-GB minimum (8-GB recommended) |
|
||||
|
||||
@@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
|
||||
<details>
|
||||
<summary>Does it work on Windows?</summary>
|
||||
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22.
|
||||
Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env node
|
||||
// Unified entry point for the Codex CLI.
|
||||
|
||||
import { spawn } from "node:child_process";
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
@@ -40,10 +42,10 @@ switch (platform) {
|
||||
case "win32":
|
||||
switch (arch) {
|
||||
case "x64":
|
||||
targetTriple = "x86_64-pc-windows-msvc.exe";
|
||||
targetTriple = "x86_64-pc-windows-msvc";
|
||||
break;
|
||||
case "arm64":
|
||||
targetTriple = "aarch64-pc-windows-msvc.exe";
|
||||
targetTriple = "aarch64-pc-windows-msvc";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@@ -57,31 +59,16 @@ if (!targetTriple) {
|
||||
throw new Error(`Unsupported platform: ${platform} (${arch})`);
|
||||
}
|
||||
|
||||
const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
|
||||
const vendorRoot = path.join(__dirname, "..", "vendor");
|
||||
const archRoot = path.join(vendorRoot, targetTriple);
|
||||
const codexBinaryName = process.platform === "win32" ? "codex.exe" : "codex";
|
||||
const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
|
||||
// Use an asynchronous spawn instead of spawnSync so that Node is able to
|
||||
// respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is
|
||||
// executing. This allows us to forward those signals to the child process
|
||||
// and guarantees that when either the child terminates or the parent
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
async function tryImport(moduleName) {
|
||||
try {
|
||||
// eslint-disable-next-line node/no-unsupported-features/es-syntax
|
||||
return await import(moduleName);
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveRgDir() {
|
||||
const ripgrep = await tryImport("@vscode/ripgrep");
|
||||
if (!ripgrep?.rgPath) {
|
||||
return null;
|
||||
}
|
||||
return path.dirname(ripgrep.rgPath);
|
||||
}
|
||||
|
||||
function getUpdatedPath(newDirs) {
|
||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||
@@ -94,9 +81,9 @@ function getUpdatedPath(newDirs) {
|
||||
}
|
||||
|
||||
const additionalDirs = [];
|
||||
const rgDir = await resolveRgDir();
|
||||
if (rgDir) {
|
||||
additionalDirs.push(rgDir);
|
||||
const pathDir = path.join(archRoot, "path");
|
||||
if (existsSync(pathDir)) {
|
||||
additionalDirs.push(pathDir);
|
||||
}
|
||||
const updatedPath = getUpdatedPath(additionalDirs);
|
||||
|
||||
|
||||
79
codex-cli/bin/rg
Executable file
79
codex-cli/bin/rg
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env dotslash
|
||||
|
||||
{
|
||||
"name": "rg",
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"size": 1787248,
|
||||
"hash": "blake3",
|
||||
"digest": "8d9942032585ea8ee805937634238d9aee7b210069f4703c88fbe568e26fb78a",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"size": 2047405,
|
||||
"hash": "blake3",
|
||||
"digest": "0b670b8fa0a3df2762af2fc82cc4932f684ca4c02dbd1260d4f3133fd4b2a515",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-unknown-linux-gnu/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-unknown-linux-gnu.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"size": 2082672,
|
||||
"hash": "blake3",
|
||||
"digest": "e9b862fc8da3127f92791f0ff6a799504154ca9d36c98bf3e60a81c6b1f7289e",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"size": 2566310,
|
||||
"hash": "blake3",
|
||||
"digest": "f73cca4e54d78c31f832c7f6e2c0b4db8b04fa3eaa747915727d570893dbee76",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-unknown-linux-musl/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-unknown-linux-musl.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"size": 2058893,
|
||||
"hash": "blake3",
|
||||
"digest": "a8ce1a6fed4f8093ee997e57f33254e94b2cd18e26358b09db599c89882eadbd",
|
||||
"format": "zip",
|
||||
"path": "ripgrep-14.1.1-x86_64-pc-windows-msvc/rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"size": 1667740,
|
||||
"hash": "blake3",
|
||||
"digest": "47b971a8c4fca1d23a4e7c19bd4d88465ebc395598458133139406d3bf85f3fa",
|
||||
"format": "zip",
|
||||
"path": "rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/microsoft/ripgrep-prebuilt/releases/download/v13.0.0-13/ripgrep-v13.0.0-13-aarch64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
103
codex-cli/package-lock.json
generated
103
codex-cli/package-lock.json
generated
@@ -2,117 +2,16 @@
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/@vscode/ripgrep": {
|
||||
"version": "1.15.14",
|
||||
"resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.15.14.tgz",
|
||||
"integrity": "sha512-/G1UJPYlm+trBWQ6cMO3sv6b8D1+G16WaJH1/DSqw32JOVlzgZbLkDxRyzIpTpv30AcYGMkCf5tUqGlW6HbDWw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"proxy-from-env": "^1.1.0",
|
||||
"yauzl": "^2.9.2"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "7.1.4",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
|
||||
"integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pend": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
|
||||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/yauzl": {
|
||||
"version": "2.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "~0.2.3",
|
||||
"fd-slicer": "~1.1.0"
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,20 +7,15 @@
|
||||
},
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
"node": ">=16"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"dist"
|
||||
"vendor"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.3.3"
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,5 +5,7 @@ Run the following:
|
||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0
|
||||
./codex-cli/scripts/build_npm_package.py --release-version 0.6.0
|
||||
```
|
||||
|
||||
Note this will create `./codex-cli/vendor/` as a side-effect.
|
||||
|
||||
307
codex-cli/scripts/build_npm_package.py
Executable file
307
codex-cli/scripts/build_npm_package.py
Executable file
@@ -0,0 +1,307 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stage and optionally package the @openai/codex npm module."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm"
|
||||
GITHUB_REPO = "openai/codex"
|
||||
|
||||
# The docs are not clear on what the expected value/format of
|
||||
# workflow/workflowName is:
|
||||
# https://cli.github.com/manual/gh_run_list
|
||||
WORKFLOW_NAME = ".github/workflows/rust-release.yml"
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--package",
|
||||
choices=("codex", "codex-responses-api-proxy"),
|
||||
default="codex",
|
||||
help="Which npm package to stage (default: codex).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
help="Version number to write to package.json inside the staged package.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version",
|
||||
help=(
|
||||
"Version to stage for npm release. When provided, the script also resolves the "
|
||||
"matching rust-release workflow unless --workflow-url is supplied."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help="Optional GitHub Actions workflow run URL used to download native binaries.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--staging-dir",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory to stage the package contents. Defaults to a new temporary directory "
|
||||
"if omitted. The directory must be empty when provided."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
dest="staging_dir",
|
||||
type=Path,
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pack-output",
|
||||
type=Path,
|
||||
help="Path where the generated npm tarball should be written.",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
package = args.package
|
||||
version = args.version
|
||||
release_version = args.release_version
|
||||
if release_version:
|
||||
if version and version != release_version:
|
||||
raise RuntimeError("--version and --release-version must match when both are provided.")
|
||||
version = release_version
|
||||
|
||||
if not version:
|
||||
raise RuntimeError("Must specify --version or --release-version.")
|
||||
|
||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||
|
||||
try:
|
||||
stage_sources(staging_dir, version, package)
|
||||
|
||||
workflow_url = args.workflow_url
|
||||
resolved_head_sha: str | None = None
|
||||
if not workflow_url:
|
||||
if release_version:
|
||||
workflow = resolve_release_workflow(version)
|
||||
workflow_url = workflow["url"]
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
else:
|
||||
workflow_url = resolve_latest_alpha_workflow_url()
|
||||
elif release_version:
|
||||
try:
|
||||
workflow = resolve_release_workflow(version)
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
except Exception:
|
||||
resolved_head_sha = None
|
||||
|
||||
if release_version and resolved_head_sha:
|
||||
print(f"should `git checkout {resolved_head_sha}`")
|
||||
|
||||
if not workflow_url:
|
||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
||||
|
||||
install_native_binaries(staging_dir, workflow_url, package)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
if package == "codex":
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the responses API proxy:\n"
|
||||
f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
if args.pack_output is not None:
|
||||
output_path = run_npm_pack(staging_dir, args.pack_output)
|
||||
print(f"npm pack output written to {output_path}")
|
||||
finally:
|
||||
if created_temp:
|
||||
# Preserve the staging directory for further inspection.
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
if staging_dir is not None:
|
||||
staging_dir = staging_dir.resolve()
|
||||
staging_dir.mkdir(parents=True, exist_ok=True)
|
||||
if any(staging_dir.iterdir()):
|
||||
raise RuntimeError(f"Staging directory {staging_dir} is not empty.")
|
||||
return staging_dir, False
|
||||
|
||||
temp_dir = Path(tempfile.mkdtemp(prefix="codex-npm-stage-"))
|
||||
return temp_dir, True
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str, package: str) -> None:
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if package == "codex":
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = CODEX_CLI_ROOT / "package.json"
|
||||
elif package == "codex-responses-api-proxy":
|
||||
launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js"
|
||||
shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js")
|
||||
|
||||
readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json"
|
||||
else:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
with open(package_json_path, "r", encoding="utf-8") as fh:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
||||
json.dump(package_json, out, indent=2)
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str, package: str) -> None:
|
||||
package_components = {
|
||||
"codex": ["codex", "rg"],
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
}
|
||||
|
||||
components = package_components.get(package)
|
||||
if components is None:
|
||||
raise RuntimeError(f"Unknown package '{package}'.")
|
||||
|
||||
cmd = ["./scripts/install_native_deps.py", "--workflow-url", workflow_url]
|
||||
for component in components:
|
||||
cmd.extend(["--component", component])
|
||||
cmd.append(str(staging_dir))
|
||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
||||
|
||||
|
||||
def resolve_latest_alpha_workflow_url() -> str:
|
||||
version = determine_latest_alpha_version()
|
||||
workflow = resolve_release_workflow(version)
|
||||
return workflow["url"]
|
||||
|
||||
|
||||
def determine_latest_alpha_version() -> str:
|
||||
releases = list_releases()
|
||||
best_key: tuple[int, int, int, int] | None = None
|
||||
best_version: str | None = None
|
||||
pattern = re.compile(r"^rust-v(\d+)\.(\d+)\.(\d+)-alpha\.(\d+)$")
|
||||
for release in releases:
|
||||
tag = release.get("tag_name", "")
|
||||
match = pattern.match(tag)
|
||||
if not match:
|
||||
continue
|
||||
key = tuple(int(match.group(i)) for i in range(1, 5))
|
||||
if best_key is None or key > best_key:
|
||||
best_key = key
|
||||
best_version = (
|
||||
f"{match.group(1)}.{match.group(2)}.{match.group(3)}-alpha.{match.group(4)}"
|
||||
)
|
||||
|
||||
if best_version is None:
|
||||
raise RuntimeError("No alpha releases found when resolving workflow URL.")
|
||||
return best_version
|
||||
|
||||
|
||||
def list_releases() -> list[dict]:
|
||||
stdout = subprocess.check_output(
|
||||
["gh", "api", f"/repos/{GITHUB_REPO}/releases?per_page=100"],
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
releases = json.loads(stdout or "[]")
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Unable to parse releases JSON.") from exc
|
||||
if not isinstance(releases, list):
|
||||
raise RuntimeError("Unexpected response when listing releases.")
|
||||
return releases
|
||||
|
||||
|
||||
def resolve_release_workflow(version: str) -> dict:
|
||||
stdout = subprocess.check_output(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--workflow",
|
||||
WORKFLOW_NAME,
|
||||
"--jq",
|
||||
"first(.[])",
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
workflow = json.loads(stdout or "[]")
|
||||
if not workflow:
|
||||
raise RuntimeError(f"Unable to find rust-release workflow for version {version}.")
|
||||
return workflow
|
||||
|
||||
|
||||
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
||||
output_path = output_path.resolve()
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-npm-pack-") as pack_dir_str:
|
||||
pack_dir = Path(pack_dir_str)
|
||||
stdout = subprocess.check_output(
|
||||
["npm", "pack", "--json", "--pack-destination", str(pack_dir)],
|
||||
cwd=staging_dir,
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
pack_output = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Failed to parse npm pack output.") from exc
|
||||
|
||||
if not pack_output:
|
||||
raise RuntimeError("npm pack did not produce an output tarball.")
|
||||
|
||||
tarball_name = pack_output[0].get("filename") or pack_output[0].get("name")
|
||||
if not tarball_name:
|
||||
raise RuntimeError("Unable to determine npm pack output filename.")
|
||||
|
||||
tarball_path = pack_dir / tarball_name
|
||||
if not tarball_path.exists():
|
||||
raise RuntimeError(f"Expected npm pack output not found: {tarball_path}")
|
||||
|
||||
shutil.move(str(tarball_path), output_path)
|
||||
|
||||
return output_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
383
codex-cli/scripts/install_native_deps.py
Executable file
383
codex-cli/scripts/install_native_deps.py
Executable file
@@ -0,0 +1,383 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
BINARY_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"aarch64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"aarch64-pc-windows-msvc",
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BinaryComponent:
|
||||
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex",
|
||||
),
|
||||
"codex-responses-api-proxy": BinaryComponent(
|
||||
artifact_prefix="codex-responses-api-proxy",
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
),
|
||||
}
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||
("x86_64-apple-darwin", "macos-x86_64"),
|
||||
("aarch64-apple-darwin", "macos-aarch64"),
|
||||
("x86_64-pc-windows-msvc", "windows-x86_64"),
|
||||
("aarch64-pc-windows-msvc", "windows-aarch64"),
|
||||
]
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help=(
|
||||
"GitHub Actions workflow URL that produced the artifacts. Defaults to a "
|
||||
"known good run when omitted."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--component",
|
||||
dest="components",
|
||||
action="append",
|
||||
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||
help=(
|
||||
"Limit installation to the specified components."
|
||||
" May be repeated. Defaults to 'codex' and 'rg'."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"root",
|
||||
nargs="?",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory containing package.json for the staged package. If omitted, the "
|
||||
"repository checkout is used."
|
||||
),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve()
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or ["codex", "rg"]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
workflow_url = DEFAULT_WORKFLOW_URL
|
||||
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
BINARY_TARGETS,
|
||||
[name for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
|
||||
if "rg" in components:
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
def fetch_rg(
|
||||
vendor_dir: Path,
|
||||
targets: Sequence[str] | None = None,
|
||||
*,
|
||||
manifest_path: Path,
|
||||
) -> list[Path]:
|
||||
"""Download ripgrep binaries described by the DotSlash manifest."""
|
||||
|
||||
if targets is None:
|
||||
targets = DEFAULT_RG_TARGETS
|
||||
|
||||
if not manifest_path.exists():
|
||||
raise FileNotFoundError(f"DotSlash manifest not found: {manifest_path}")
|
||||
|
||||
manifest = _load_manifest(manifest_path)
|
||||
platforms = manifest.get("platforms", {})
|
||||
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
task_configs: list[tuple[str, str, dict]] = []
|
||||
for target in targets:
|
||||
platform_key = RG_TARGET_TO_PLATFORM.get(target)
|
||||
if platform_key is None:
|
||||
raise ValueError(f"Unsupported ripgrep target '{target}'.")
|
||||
|
||||
platform_info = platforms.get(platform_key)
|
||||
if platform_info is None:
|
||||
raise RuntimeError(f"Platform '{platform_key}' not found in manifest {manifest_path}.")
|
||||
|
||||
task_configs.append((target, platform_key, platform_info))
|
||||
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
print("Installing ripgrep binaries for targets: " + ", ".join(targets))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(
|
||||
_fetch_single_rg,
|
||||
vendor_dir,
|
||||
target,
|
||||
platform_key,
|
||||
platform_info,
|
||||
manifest_path,
|
||||
): target
|
||||
for target, platform_key, platform_info in task_configs
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
cmd = [
|
||||
"gh",
|
||||
"run",
|
||||
"download",
|
||||
"--dir",
|
||||
str(dest_dir),
|
||||
"--repo",
|
||||
"openai/codex",
|
||||
workflow_id,
|
||||
]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def install_binary_components(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
targets: Iterable[str],
|
||||
component_names: Sequence[str],
|
||||
) -> None:
|
||||
selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
|
||||
if not selected_components:
|
||||
return
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return
|
||||
|
||||
for component in selected_components:
|
||||
print(
|
||||
f"Installing {component.binary_basename} binaries for targets: "
|
||||
+ ", ".join(targets)
|
||||
)
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
_install_single_binary,
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
target,
|
||||
component,
|
||||
): target
|
||||
for target in targets
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
installed_path = future.result()
|
||||
print(f" installed {installed_path}")
|
||||
|
||||
|
||||
def _install_single_binary(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
component: BinaryComponent,
|
||||
) -> Path:
|
||||
artifact_subdir = artifacts_dir / target
|
||||
archive_name = _archive_name_for_target(component.artifact_prefix, target)
|
||||
archive_path = artifact_subdir / archive_name
|
||||
if not archive_path.exists():
|
||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||
|
||||
dest_dir = vendor_dir / target / component.dest_dir
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
binary_name = (
|
||||
f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename
|
||||
)
|
||||
dest = dest_dir / binary_name
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(archive_path, "zst", None, dest)
|
||||
if "windows" not in target:
|
||||
dest.chmod(0o755)
|
||||
return dest
|
||||
|
||||
|
||||
def _archive_name_for_target(artifact_prefix: str, target: str) -> str:
|
||||
if "windows" in target:
|
||||
return f"{artifact_prefix}-{target}.exe.zst"
|
||||
return f"{artifact_prefix}-{target}.zst"
|
||||
|
||||
|
||||
def _fetch_single_rg(
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
platform_key: str,
|
||||
platform_info: dict,
|
||||
manifest_path: Path,
|
||||
) -> Path:
|
||||
providers = platform_info.get("providers", [])
|
||||
if not providers:
|
||||
raise RuntimeError(f"No providers listed for platform '{platform_key}' in {manifest_path}.")
|
||||
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
is_windows = platform_key.startswith("win")
|
||||
binary_name = "rg.exe" if is_windows else "rg"
|
||||
dest = dest_dir / binary_name
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir_str:
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
_download_file(url, download_path)
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
|
||||
return dest
|
||||
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
def extract_archive(
|
||||
archive_path: Path,
|
||||
archive_format: str,
|
||||
archive_member: str | None,
|
||||
dest: Path,
|
||||
) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if archive_format == "zst":
|
||||
output_path = archive_path.parent / dest.name
|
||||
subprocess.check_call(
|
||||
["zstd", "-f", "-d", str(archive_path), "-o", str(output_path)]
|
||||
)
|
||||
shutil.move(str(output_path), dest)
|
||||
return
|
||||
|
||||
if archive_format == "tar.gz":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for tar.gz archive in DotSlash manifest.")
|
||||
with tarfile.open(archive_path, "r:gz") as tar:
|
||||
try:
|
||||
member = tar.getmember(archive_member)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
tar.extract(member, path=archive_path.parent, filter="data")
|
||||
extracted = archive_path.parent / archive_member
|
||||
shutil.move(str(extracted), dest)
|
||||
return
|
||||
|
||||
if archive_format == "zip":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for zip archive in DotSlash manifest.")
|
||||
with zipfile.ZipFile(archive_path) as archive:
|
||||
try:
|
||||
with archive.open(archive_member) as src, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(src, out)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
return
|
||||
|
||||
raise RuntimeError(f"Unsupported archive format '{archive_format}'.")
|
||||
|
||||
|
||||
def _load_manifest(manifest_path: Path) -> dict:
|
||||
cmd = ["dotslash", "--", "parse", str(manifest_path)]
|
||||
stdout = subprocess.check_output(cmd, text=True)
|
||||
try:
|
||||
manifest = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError(f"Invalid DotSlash manifest output from {manifest_path}.") from exc
|
||||
|
||||
if not isinstance(manifest, dict):
|
||||
raise RuntimeError(
|
||||
f"Unexpected DotSlash manifest structure for {manifest_path}: {type(manifest)!r}"
|
||||
)
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
@@ -1,94 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Install native runtime dependencies for codex-cli.
|
||||
#
|
||||
# Usage
|
||||
# install_native_deps.sh [--workflow-url URL] [CODEX_CLI_ROOT]
|
||||
#
|
||||
# The optional RELEASE_ROOT is the path that contains package.json. Omitting
|
||||
# it installs the binaries into the repository's own bin/ folder to support
|
||||
# local development.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ------------------
|
||||
# Parse arguments
|
||||
# ------------------
|
||||
|
||||
CODEX_CLI_ROOT=""
|
||||
|
||||
# Until we start publishing stable GitHub releases, we have to grab the binaries
|
||||
# from the GitHub Action that created them. Update the URL below to point to the
|
||||
# appropriate workflow run:
|
||||
WORKFLOW_URL="https://github.com/openai/codex/actions/runs/17417194663" # rust-v0.28.0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
if [ -n "$1" ]; then
|
||||
WORKFLOW_URL="$1"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$CODEX_CLI_ROOT" ]]; then
|
||||
CODEX_CLI_ROOT="$1"
|
||||
else
|
||||
echo "Unexpected argument: $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Determine where the binaries should be installed.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -n "$CODEX_CLI_ROOT" ]; then
|
||||
# The caller supplied a release root directory.
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
else
|
||||
# No argument; fall back to the repo’s own bin directory.
|
||||
# Resolve the path of this script, then walk up to the repo root.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
fi
|
||||
|
||||
# Make sure the destination directory exists.
|
||||
mkdir -p "$BIN_DIR"
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Download and decompress the artifacts from the GitHub Actions workflow.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
WORKFLOW_ID="${WORKFLOW_URL##*/}"
|
||||
|
||||
ARTIFACTS_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$ARTIFACTS_DIR"' EXIT
|
||||
|
||||
# NB: The GitHub CLI `gh` must be installed and authenticated.
|
||||
gh run download --dir "$ARTIFACTS_DIR" --repo openai/codex "$WORKFLOW_ID"
|
||||
|
||||
# x64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-unknown-linux-musl/codex-x86_64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-unknown-linux-musl"
|
||||
# ARM64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-unknown-linux-musl/codex-aarch64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-unknown-linux-musl"
|
||||
# x64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-apple-darwin/codex-x86_64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-apple-darwin"
|
||||
# ARM64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-apple-darwin/codex-aarch64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-apple-darwin"
|
||||
# x64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-pc-windows-msvc/codex-x86_64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-pc-windows-msvc.exe"
|
||||
# ARM64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-pc-windows-msvc/codex-aarch64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-pc-windows-msvc.exe"
|
||||
|
||||
echo "Installed native dependencies into $BIN_DIR"
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# -----------------------------------------------------------------------------
|
||||
# stage_release.sh
|
||||
# -----------------------------------------------------------------------------
|
||||
# Stages an npm release for @openai/codex.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# --tmp <dir> : Use <dir> instead of a freshly created temp directory.
|
||||
# -h|--help : Print usage.
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Helper - usage / flag parsing
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: $(basename "$0") [--tmp DIR] [--version VERSION]
|
||||
|
||||
Options
|
||||
--tmp DIR Use DIR to stage the release (defaults to a fresh mktemp dir)
|
||||
--version Specify the version to release (defaults to a timestamp-based version)
|
||||
-h, --help Show this help
|
||||
|
||||
Legacy positional argument: the first non-flag argument is still interpreted
|
||||
as the temporary directory (for backwards compatibility) but is deprecated.
|
||||
EOF
|
||||
exit "${1:-0}"
|
||||
}
|
||||
|
||||
TMPDIR=""
|
||||
# Default to a timestamp-based version (keep same scheme as before)
|
||||
VERSION="$(printf '0.1.%d' "$(date +%y%m%d%H%M)")"
|
||||
WORKFLOW_URL=""
|
||||
|
||||
# Manual flag parser - Bash getopts does not handle GNU long options well.
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--tmp)
|
||||
shift || { echo "--tmp requires an argument"; usage 1; }
|
||||
TMPDIR="$1"
|
||||
;;
|
||||
--tmp=*)
|
||||
TMPDIR="${1#*=}"
|
||||
;;
|
||||
--version)
|
||||
shift || { echo "--version requires an argument"; usage 1; }
|
||||
VERSION="$1"
|
||||
;;
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
WORKFLOW_URL="$1"
|
||||
;;
|
||||
-h|--help)
|
||||
usage 0
|
||||
;;
|
||||
--*)
|
||||
echo "Unknown option: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected extra argument: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Fallback when the caller did not specify a directory.
|
||||
# If no directory was specified create a fresh temporary one.
|
||||
if [[ -z "$TMPDIR" ]]; then
|
||||
TMPDIR="$(mktemp -d)"
|
||||
fi
|
||||
|
||||
# Ensure the directory exists, then resolve to an absolute path.
|
||||
mkdir -p "$TMPDIR"
|
||||
TMPDIR="$(cd "$TMPDIR" && pwd)"
|
||||
|
||||
# Main build logic
|
||||
|
||||
echo "Staging release in $TMPDIR"
|
||||
|
||||
# The script lives in codex-cli/scripts/ - change into codex-cli root so that
|
||||
# relative paths keep working.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
pushd "$CODEX_CLI_ROOT" >/dev/null
|
||||
|
||||
# 1. Build the JS artifacts ---------------------------------------------------
|
||||
|
||||
# Paths inside the staged package
|
||||
mkdir -p "$TMPDIR/bin"
|
||||
|
||||
cp -r bin/codex.js "$TMPDIR/bin/codex.js"
|
||||
cp ../README.md "$TMPDIR" || true # README is one level up - ignore if missing
|
||||
|
||||
# Modify package.json - bump version and optionally add the native directory to
|
||||
# the files array so that the binaries are published to npm.
|
||||
|
||||
jq --arg version "$VERSION" \
|
||||
'.version = $version' \
|
||||
package.json > "$TMPDIR/package.json"
|
||||
|
||||
# 2. Native runtime deps (sandbox plus optional Rust binaries)
|
||||
|
||||
./scripts/install_native_deps.sh --workflow-url "$WORKFLOW_URL" "$TMPDIR"
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
echo "Staged version $VERSION for release in $TMPDIR"
|
||||
|
||||
echo "Verify the CLI:"
|
||||
echo " node ${TMPDIR}/bin/codex.js --version"
|
||||
echo " node ${TMPDIR}/bin/codex.js --help"
|
||||
|
||||
# Print final hint for convenience
|
||||
echo "Next: cd \"$TMPDIR\" && npm publish"
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Stage a release for the npm module.
|
||||
|
||||
Run this after the GitHub Release has been created and use
|
||||
`--release-version` to specify the version to release.
|
||||
|
||||
Optionally pass `--tmp` to control the temporary staging directory that will be
|
||||
forwarded to stage_release.sh.
|
||||
"""
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version", required=True, help="Version to release, e.g., 0.3.0"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
help="Optional path to stage the npm package; forwarded to stage_release.sh",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
version = args.release_version
|
||||
|
||||
gh_run = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--jq",
|
||||
'first(.[] | select(.workflowName == "rust-release"))',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
gh_run.check_returncode()
|
||||
workflow = json.loads(gh_run.stdout)
|
||||
sha = workflow["headSha"]
|
||||
|
||||
print(f"should `git checkout {sha}`")
|
||||
|
||||
current_dir = Path(__file__).parent.resolve()
|
||||
cmd = [
|
||||
str(current_dir / "stage_release.sh"),
|
||||
"--version",
|
||||
version,
|
||||
"--workflow-url",
|
||||
workflow["url"],
|
||||
]
|
||||
if args.tmp:
|
||||
cmd.extend(["--tmp", args.tmp])
|
||||
|
||||
stage_release = subprocess.run(cmd)
|
||||
stage_release.check_returncode()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
1789
codex-rs/Cargo.lock
generated
1789
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,23 +1,36 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"backend-client",
|
||||
"ansi-escape",
|
||||
"app-server",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
"cli",
|
||||
"common",
|
||||
"core",
|
||||
"exec",
|
||||
"execpolicy",
|
||||
"file-search",
|
||||
"git-tooling",
|
||||
"linux-sandbox",
|
||||
"login",
|
||||
"mcp-client",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"rmcp-client",
|
||||
"responses-api-proxy",
|
||||
"otel",
|
||||
"tui",
|
||||
"git-apply",
|
||||
"utils/json-to-toml",
|
||||
"utils/readiness",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -29,15 +42,187 @@ version = "0.0.0"
|
||||
# edition.
|
||||
edition = "2024"
|
||||
|
||||
[workspace.dependencies]
|
||||
# Internal
|
||||
app_test_support = { path = "app-server/tests/common" }
|
||||
codex-ansi-escape = { path = "ansi-escape" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git-tooling = { path = "git-tooling" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-client = { path = "mcp-client" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
codex-otel = { path = "otel" }
|
||||
codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
|
||||
# External
|
||||
allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = "3"
|
||||
askama = "0.12"
|
||||
assert_cmd = "2"
|
||||
async-channel = "2.3.1"
|
||||
async-stream = "0.3.6"
|
||||
async-trait = "0.1.89"
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chrono = "0.4.42"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
ctor = "0.5.0"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
dirs = "6"
|
||||
dotenvy = "0.15.7"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = "0.3"
|
||||
icu_decimal = "2.0.0"
|
||||
icu_locale_core = "2.0.0"
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.6.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.175"
|
||||
log = "0.4"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
path-absolutize = "3.1.1"
|
||||
path-clean = "1.0.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
predicates = "3"
|
||||
pretty_assertions = "1.4.1"
|
||||
pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.14"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-test = "0.4"
|
||||
tokio-util = "0.7.16"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.23.4"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.41"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.9"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
ts-rs = "11"
|
||||
unicode-segmentation = "1.12.0"
|
||||
unicode-width = "0.2"
|
||||
url = "2"
|
||||
urlencoding = "2.1"
|
||||
uuid = "1"
|
||||
vt100 = "0.16.2"
|
||||
walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.1"
|
||||
|
||||
[workspace.lints]
|
||||
rust = {}
|
||||
|
||||
[workspace.lints.clippy]
|
||||
expect_used = "deny"
|
||||
identity_op = "deny"
|
||||
manual_clamp = "deny"
|
||||
manual_filter = "deny"
|
||||
manual_find = "deny"
|
||||
manual_flatten = "deny"
|
||||
manual_map = "deny"
|
||||
manual_memcpy = "deny"
|
||||
manual_non_exhaustive = "deny"
|
||||
manual_ok_or = "deny"
|
||||
manual_range_contains = "deny"
|
||||
manual_retain = "deny"
|
||||
manual_strip = "deny"
|
||||
manual_try_fold = "deny"
|
||||
manual_unwrap_or = "deny"
|
||||
needless_borrow = "deny"
|
||||
needless_borrowed_reference = "deny"
|
||||
needless_collect = "deny"
|
||||
needless_late_init = "deny"
|
||||
needless_option_as_deref = "deny"
|
||||
needless_question_mark = "deny"
|
||||
needless_update = "deny"
|
||||
redundant_clone = "deny"
|
||||
redundant_closure = "deny"
|
||||
redundant_closure_for_method_calls = "deny"
|
||||
redundant_static_lifetimes = "deny"
|
||||
trivially_copy_pass_by_ref = "deny"
|
||||
uninlined_format_args = "deny"
|
||||
unnecessary_filter_map = "deny"
|
||||
unnecessary_lazy_evaluations = "deny"
|
||||
unnecessary_sort_by = "deny"
|
||||
unnecessary_to_owned = "deny"
|
||||
unwrap_used = "deny"
|
||||
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness"]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
# Because we bundle some of these executables with the TypeScript CLI, we
|
||||
|
||||
@@ -4,18 +4,18 @@ We provide Codex CLI as a standalone, native executable to ensure a zero-depende
|
||||
|
||||
## Installing Codex
|
||||
|
||||
Today, the easiest way to install Codex is via `npm`, though we plan to publish Codex to other package managers soon.
|
||||
Today, the easiest way to install Codex is via `npm`:
|
||||
|
||||
```shell
|
||||
npm i -g @openai/codex@native
|
||||
npm i -g @openai/codex
|
||||
codex
|
||||
```
|
||||
|
||||
You can also download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
You can also install via Homebrew (`brew install codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
While we are [working to close the gap between the TypeScript and Rust implementations of Codex CLI](https://github.com/openai/codex/issues/1262), note that the Rust CLI has a number of features that the TypeScript CLI does not!
|
||||
The Rust implementation is now the maintained Codex CLI and serves as the default experience. It includes a number of features that the legacy TypeScript CLI never supported.
|
||||
|
||||
### Config
|
||||
|
||||
@@ -25,12 +25,14 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
Codex CLI functions as an MCP client that can connect to MCP servers on startup. See the [`mcp_servers`](../docs/config.md#mcp_servers) section in the configuration documentation for details.
|
||||
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp-server`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out:
|
||||
|
||||
```shell
|
||||
npx @modelcontextprotocol/inspector codex mcp
|
||||
npx @modelcontextprotocol/inspector codex mcp-server
|
||||
```
|
||||
|
||||
Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.toml`, and `codex mcp-server` to run the MCP server directly.
|
||||
|
||||
### Notifications
|
||||
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS.
|
||||
@@ -53,6 +55,14 @@ In the transcript preview, the footer shows an `Esc edit prev` hint while editin
|
||||
|
||||
Sometimes it is not convenient to `cd` to the directory you want Codex to use as the "working root" before running Codex. Fortunately, `codex` supports a `--cd` option so you can specify whatever folder you want. You can confirm that Codex is honoring `--cd` by double-checking the **workdir** it reports in the TUI at the start of a new session.
|
||||
|
||||
### Resuming sessions
|
||||
|
||||
When you use `codex resume`, provide any follow-up prompt *before* an optional session id. This keeps combinations like `codex resume --last "fix the tests"` working while still letting you resume a specific session when needed:
|
||||
|
||||
- `codex resume --last "kick off linting"` — resume the most recent session and immediately send a new prompt.
|
||||
- `codex resume "draft release notes" d9b7b8b8-3a1f-4a4d-b0a2-4f04bb8d58df` — resume a specific session and send a follow-up prompt.
|
||||
- `codex resume d9b7b8b8-3a1f-4a4d-b0a2-4f04bb8d58df` — resume a session without sending a prompt (the CLI treats lone UUIDs as session ids).
|
||||
|
||||
### Shell completions
|
||||
|
||||
Generate shell completion scripts via:
|
||||
|
||||
@@ -8,9 +8,9 @@ name = "codex_ansi_escape"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
ansi-to-tui = "7.0.0"
|
||||
ratatui = { version = "0.29.0", features = [
|
||||
ansi-to-tui = { workspace = true }
|
||||
ratatui = { workspace = true, features = [
|
||||
"unstable-rendered-line-info",
|
||||
"unstable-widget-ref",
|
||||
] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
|
||||
51
codex-rs/app-server/Cargo.toml
Normal file
51
codex-rs/app-server/Cargo.toml
Normal file
@@ -0,0 +1,51 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-app-server"
|
||||
version = { workspace = true }
|
||||
|
||||
[[bin]]
|
||||
name = "codex-app-server"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
# We should only be using mcp-types for JSON-RPC types: it would be nice to
|
||||
# split this out into a separate crate at some point.
|
||||
mcp-types = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::json_to_toml::json_to_toml;
|
||||
use crate::fuzzy_file_search::run_fuzzy_file_search;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use crate::outgoing_message::OutgoingNotification;
|
||||
use codex_core::AuthManager;
|
||||
@@ -52,6 +52,8 @@ use codex_protocol::mcp_protocol::ExecArbitraryCommandResponse;
|
||||
use codex_protocol::mcp_protocol::ExecCommandApprovalParams;
|
||||
use codex_protocol::mcp_protocol::ExecCommandApprovalResponse;
|
||||
use codex_protocol::mcp_protocol::ExecOneOffCommandParams;
|
||||
use codex_protocol::mcp_protocol::FuzzyFileSearchParams;
|
||||
use codex_protocol::mcp_protocol::FuzzyFileSearchResponse;
|
||||
use codex_protocol::mcp_protocol::GetUserAgentResponse;
|
||||
use codex_protocol::mcp_protocol::GetUserSavedConfigResponse;
|
||||
use codex_protocol::mcp_protocol::GitDiffToRemoteResponse;
|
||||
@@ -74,6 +76,7 @@ use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnParams;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnResponse;
|
||||
use codex_protocol::mcp_protocol::ServerNotification;
|
||||
use codex_protocol::mcp_protocol::SessionConfiguredNotification;
|
||||
use codex_protocol::mcp_protocol::SetDefaultModelParams;
|
||||
use codex_protocol::mcp_protocol::SetDefaultModelResponse;
|
||||
use codex_protocol::mcp_protocol::UserInfoResponse;
|
||||
@@ -82,12 +85,15 @@ use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::InputMessageKind;
|
||||
use codex_protocol::protocol::USER_MESSAGE_BEGIN;
|
||||
use codex_utils_json_to_toml::json_to_toml;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::RequestId;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
use tokio::select;
|
||||
use tokio::sync::Mutex;
|
||||
@@ -122,6 +128,7 @@ pub(crate) struct CodexMessageProcessor {
|
||||
active_login: Arc<Mutex<Option<ActiveLogin>>>,
|
||||
// Queue of pending interrupt requests per conversation. We reply when TurnAborted arrives.
|
||||
pending_interrupts: Arc<Mutex<HashMap<ConversationId, Vec<RequestId>>>>,
|
||||
pending_fuzzy_searches: Arc<Mutex<HashMap<String, Arc<AtomicBool>>>>,
|
||||
}
|
||||
|
||||
impl CodexMessageProcessor {
|
||||
@@ -141,11 +148,15 @@ impl CodexMessageProcessor {
|
||||
conversation_listeners: HashMap::new(),
|
||||
active_login: Arc::new(Mutex::new(None)),
|
||||
pending_interrupts: Arc::new(Mutex::new(HashMap::new())),
|
||||
pending_fuzzy_searches: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn process_request(&mut self, request: ClientRequest) {
|
||||
match request {
|
||||
ClientRequest::Initialize { .. } => {
|
||||
panic!("Initialize should be handled in MessageProcessor");
|
||||
}
|
||||
ClientRequest::NewConversation { request_id, params } => {
|
||||
// Do not tokio::spawn() to process new_conversation()
|
||||
// asynchronously because we need to ensure the conversation is
|
||||
@@ -206,6 +217,9 @@ impl CodexMessageProcessor {
|
||||
ClientRequest::UserInfo { request_id } => {
|
||||
self.get_user_info(request_id).await;
|
||||
}
|
||||
ClientRequest::FuzzyFileSearch { request_id, params } => {
|
||||
self.fuzzy_file_search(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ExecOneOffCommand { request_id, params } => {
|
||||
self.exec_one_off_command(request_id, params).await;
|
||||
}
|
||||
@@ -423,32 +437,41 @@ impl CodexMessageProcessor {
|
||||
// Determine whether auth is required based on the active model provider.
|
||||
// If a custom provider is configured with `requires_openai_auth == false`,
|
||||
// then no auth step is required; otherwise, default to requiring auth.
|
||||
let requires_openai_auth = Some(self.config.model_provider.requires_openai_auth);
|
||||
let requires_openai_auth = self.config.model_provider.requires_openai_auth;
|
||||
|
||||
let response = match self.auth_manager.auth() {
|
||||
Some(auth) => {
|
||||
let (reported_auth_method, token_opt) = match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
let tok = if include_token { Some(token) } else { None };
|
||||
(Some(auth.mode), tok)
|
||||
}
|
||||
Ok(_) => (None, None),
|
||||
Err(err) => {
|
||||
tracing::warn!("failed to get token for auth status: {err}");
|
||||
(None, None)
|
||||
}
|
||||
};
|
||||
codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||
auth_method: reported_auth_method,
|
||||
auth_token: token_opt,
|
||||
requires_openai_auth,
|
||||
}
|
||||
}
|
||||
None => codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||
let response = if !requires_openai_auth {
|
||||
codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||
auth_method: None,
|
||||
auth_token: None,
|
||||
requires_openai_auth,
|
||||
},
|
||||
requires_openai_auth: Some(false),
|
||||
}
|
||||
} else {
|
||||
match self.auth_manager.auth() {
|
||||
Some(auth) => {
|
||||
let auth_mode = auth.mode;
|
||||
let (reported_auth_method, token_opt) = match auth.get_token().await {
|
||||
Ok(token) if !token.is_empty() => {
|
||||
let tok = if include_token { Some(token) } else { None };
|
||||
(Some(auth_mode), tok)
|
||||
}
|
||||
Ok(_) => (None, None),
|
||||
Err(err) => {
|
||||
tracing::warn!("failed to get token for auth status: {err}");
|
||||
(None, None)
|
||||
}
|
||||
};
|
||||
codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||
auth_method: reported_auth_method,
|
||||
auth_token: token_opt,
|
||||
requires_openai_auth: Some(true),
|
||||
}
|
||||
}
|
||||
None => codex_protocol::mcp_protocol::GetAuthStatusResponse {
|
||||
auth_method: None,
|
||||
auth_token: None,
|
||||
requires_openai_auth: Some(true),
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
@@ -580,12 +603,14 @@ impl CodexMessageProcessor {
|
||||
let codex_linux_sandbox_exe = self.config.codex_linux_sandbox_exe.clone();
|
||||
let outgoing = self.outgoing.clone();
|
||||
let req_id = request_id;
|
||||
let sandbox_cwd = self.config.cwd.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
match codex_core::exec::process_exec_tool_call(
|
||||
exec_params,
|
||||
sandbox_type,
|
||||
&effective_policy,
|
||||
sandbox_cwd.as_path(),
|
||||
&codex_linux_sandbox_exe,
|
||||
None,
|
||||
)
|
||||
@@ -741,11 +766,19 @@ impl CodexMessageProcessor {
|
||||
session_configured,
|
||||
..
|
||||
}) => {
|
||||
let event = Event {
|
||||
id: "".to_string(),
|
||||
msg: EventMsg::SessionConfigured(session_configured.clone()),
|
||||
};
|
||||
self.outgoing.send_event_as_notification(&event, None).await;
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::SessionConfigured(
|
||||
SessionConfiguredNotification {
|
||||
session_id: session_configured.session_id,
|
||||
model: session_configured.model.clone(),
|
||||
reasoning_effort: session_configured.reasoning_effort,
|
||||
history_log_id: session_configured.history_log_id,
|
||||
history_entry_count: session_configured.history_entry_count,
|
||||
initial_messages: session_configured.initial_messages.clone(),
|
||||
rollout_path: session_configured.rollout_path.clone(),
|
||||
},
|
||||
))
|
||||
.await;
|
||||
let initial_messages = session_configured.initial_messages.map(|msgs| {
|
||||
msgs.into_iter()
|
||||
.filter(|event| {
|
||||
@@ -805,7 +838,7 @@ impl CodexMessageProcessor {
|
||||
return;
|
||||
};
|
||||
|
||||
let required_suffix = format!("{}.jsonl", conversation_id.0);
|
||||
let required_suffix = format!("{conversation_id}.jsonl");
|
||||
let Some(file_name) = canonical_rollout_path.file_name().map(OsStr::to_owned) else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
@@ -998,6 +1031,7 @@ impl CodexMessageProcessor {
|
||||
model,
|
||||
effort,
|
||||
summary,
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -1155,6 +1189,46 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn fuzzy_file_search(&mut self, request_id: RequestId, params: FuzzyFileSearchParams) {
|
||||
let FuzzyFileSearchParams {
|
||||
query,
|
||||
roots,
|
||||
cancellation_token,
|
||||
} = params;
|
||||
|
||||
let cancel_flag = match cancellation_token.clone() {
|
||||
Some(token) => {
|
||||
let mut pending_fuzzy_searches = self.pending_fuzzy_searches.lock().await;
|
||||
// if a cancellation_token is provided and a pending_request exists for
|
||||
// that token, cancel it
|
||||
if let Some(existing) = pending_fuzzy_searches.get(&token) {
|
||||
existing.store(true, Ordering::Relaxed);
|
||||
}
|
||||
let flag = Arc::new(AtomicBool::new(false));
|
||||
pending_fuzzy_searches.insert(token.clone(), flag.clone());
|
||||
flag
|
||||
}
|
||||
None => Arc::new(AtomicBool::new(false)),
|
||||
};
|
||||
|
||||
let results = match query.as_str() {
|
||||
"" => vec![],
|
||||
_ => run_fuzzy_file_search(query, roots, cancel_flag.clone()).await,
|
||||
};
|
||||
|
||||
if let Some(token) = cancellation_token {
|
||||
let mut pending_fuzzy_searches = self.pending_fuzzy_searches.lock().await;
|
||||
if let Some(current_flag) = pending_fuzzy_searches.get(&token)
|
||||
&& Arc::ptr_eq(current_flag, &cancel_flag)
|
||||
{
|
||||
pending_fuzzy_searches.remove(&token);
|
||||
}
|
||||
}
|
||||
|
||||
let response = FuzzyFileSearchResponse { files: results };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn apply_bespoke_event_handling(
|
||||
@@ -1399,19 +1473,19 @@ fn extract_conversation_summary(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn extract_conversation_summary_prefers_plain_user_messages() {
|
||||
let conversation_id =
|
||||
ConversationId(Uuid::parse_str("3f941c35-29b3-493b-b0a4-e25800d9aeb0").unwrap());
|
||||
fn extract_conversation_summary_prefers_plain_user_messages() -> Result<()> {
|
||||
let conversation_id = ConversationId::from_string("3f941c35-29b3-493b-b0a4-e25800d9aeb0")?;
|
||||
let timestamp = Some("2025-09-05T16:53:11.850Z".to_string());
|
||||
let path = PathBuf::from("rollout.jsonl");
|
||||
|
||||
let head = vec![
|
||||
json!({
|
||||
"id": conversation_id.0,
|
||||
"id": conversation_id.to_string(),
|
||||
"timestamp": timestamp,
|
||||
"cwd": "/",
|
||||
"originator": "codex",
|
||||
@@ -1445,5 +1519,6 @@ mod tests {
|
||||
);
|
||||
assert_eq!(summary.path, path);
|
||||
assert_eq!(summary.preview, "Count to 5");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
2
codex-rs/app-server/src/error_code.rs
Normal file
2
codex-rs/app-server/src/error_code.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub(crate) const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
|
||||
pub(crate) const INTERNAL_ERROR_CODE: i64 = -32603;
|
||||
84
codex-rs/app-server/src/fuzzy_file_search.rs
Normal file
84
codex-rs/app-server/src/fuzzy_file_search.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use std::num::NonZero;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use codex_file_search as file_search;
|
||||
use codex_protocol::mcp_protocol::FuzzyFileSearchResult;
|
||||
use tokio::task::JoinSet;
|
||||
use tracing::warn;
|
||||
|
||||
const LIMIT_PER_ROOT: usize = 50;
|
||||
const MAX_THREADS: usize = 12;
|
||||
const COMPUTE_INDICES: bool = true;
|
||||
|
||||
pub(crate) async fn run_fuzzy_file_search(
|
||||
query: String,
|
||||
roots: Vec<String>,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
) -> Vec<FuzzyFileSearchResult> {
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
|
||||
let cores = std::thread::available_parallelism()
|
||||
.map(std::num::NonZero::get)
|
||||
.unwrap_or(1);
|
||||
let threads = cores.min(MAX_THREADS);
|
||||
let threads_per_root = (threads / roots.len()).max(1);
|
||||
let threads = NonZero::new(threads_per_root).unwrap_or(NonZeroUsize::MIN);
|
||||
|
||||
let mut files: Vec<FuzzyFileSearchResult> = Vec::new();
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
for root in roots {
|
||||
let search_dir = PathBuf::from(&root);
|
||||
let query = query.clone();
|
||||
let cancel_flag = cancellation_flag.clone();
|
||||
join_set.spawn_blocking(move || {
|
||||
match file_search::run(
|
||||
query.as_str(),
|
||||
limit_per_root,
|
||||
&search_dir,
|
||||
Vec::new(),
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(res) = join_set.join_next().await {
|
||||
match res {
|
||||
Ok(Ok((root, res))) => {
|
||||
for m in res.matches {
|
||||
let result = FuzzyFileSearchResult {
|
||||
root: root.clone(),
|
||||
path: m.path,
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
};
|
||||
files.push(result);
|
||||
}
|
||||
}
|
||||
Ok(Err((root, err))) => {
|
||||
warn!("fuzzy-file-search in dir '{root}' failed: {err}");
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("fuzzy-file-search join_next failed: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
files.sort_by(file_search::cmp_by_score_desc_then_path_asc::<
|
||||
FuzzyFileSearchResult,
|
||||
_,
|
||||
_,
|
||||
>(|f| f.score, |f| f.path.as_str()));
|
||||
|
||||
files
|
||||
}
|
||||
139
codex-rs/app-server/src/lib.rs
Normal file
139
codex-rs/app-server/src/lib.rs
Normal file
@@ -0,0 +1,139 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
|
||||
mod codex_message_processor;
|
||||
mod error_code;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod outgoing_message;
|
||||
|
||||
/// Size of the bounded channels used to communicate between tasks. The value
|
||||
/// is a balance between throughput and memory usage – 128 messages should be
|
||||
/// plenty for an interactive CLI.
|
||||
const CHANNEL_CAPACITY: usize = 128;
|
||||
|
||||
pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
) -> IoResult<()> {
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
tracing_subscriber::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
|
||||
// Task: read from stdin, push to `incoming_tx`.
|
||||
let stdin_reader_handle = tokio::spawn({
|
||||
async move {
|
||||
let stdin = io::stdin();
|
||||
let reader = BufReader::new(stdin);
|
||||
let mut lines = reader.lines();
|
||||
|
||||
while let Some(line) = lines.next_line().await.unwrap_or_default() {
|
||||
match serde_json::from_str::<JSONRPCMessage>(&line) {
|
||||
Ok(msg) => {
|
||||
if incoming_tx.send(msg).await.is_err() {
|
||||
// Receiver gone – nothing left to do.
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => error!("Failed to deserialize JSONRPCMessage: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
debug!("stdin reader finished (EOF)");
|
||||
}
|
||||
});
|
||||
|
||||
// Parse CLI overrides once and derive the base Config eagerly so later
|
||||
// components do not need to work with raw TOML values.
|
||||
let cli_kv_overrides = cli_config_overrides.parse_overrides().map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, ConfigOverrides::default())
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
);
|
||||
async move {
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
|
||||
info!("processor task exited (channel closed)");
|
||||
}
|
||||
});
|
||||
|
||||
// Task: write outgoing messages to stdout.
|
||||
let stdout_writer_handle = tokio::spawn(async move {
|
||||
let mut stdout = io::stdout();
|
||||
while let Some(outgoing_message) = outgoing_rx.recv().await {
|
||||
let msg: JSONRPCMessage = outgoing_message.into();
|
||||
match serde_json::to_string(&msg) {
|
||||
Ok(json) => {
|
||||
if let Err(e) = stdout.write_all(json.as_bytes()).await {
|
||||
error!("Failed to write to stdout: {e}");
|
||||
break;
|
||||
}
|
||||
if let Err(e) = stdout.write_all(b"\n").await {
|
||||
error!("Failed to write newline to stdout: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => error!("Failed to serialize JSONRPCMessage: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
info!("stdout writer exited (channel closed)");
|
||||
});
|
||||
|
||||
// Wait for all tasks to finish. The typical exit path is the stdin reader
|
||||
// hitting EOF which, once it drops `incoming_tx`, propagates shutdown to
|
||||
// the processor and then to the stdout task.
|
||||
let _ = tokio::join!(stdin_reader_handle, processor_handle, stdout_writer_handle);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
10
codex-rs/app-server/src/main.rs
Normal file
10
codex-rs/app-server/src/main.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use codex_app_server::run_main;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
133
codex-rs/app-server/src/message_processor.rs
Normal file
133
codex-rs/app-server/src/message_processor.rs
Normal file
@@ -0,0 +1,133 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_protocol::mcp_protocol::ClientInfo;
|
||||
use codex_protocol::mcp_protocol::ClientRequest;
|
||||
use codex_protocol::mcp_protocol::InitializeResponse;
|
||||
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use mcp_types::JSONRPCError;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
initialized: bool,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
/// Create a new `MessageProcessor`, retaining a handle to the outgoing
|
||||
/// `Sender` so handlers can enqueue messages to be written to stdout.
|
||||
pub(crate) fn new(
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(config.codex_home.clone());
|
||||
let conversation_manager = Arc::new(ConversationManager::new(auth_manager.clone()));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
auth_manager,
|
||||
conversation_manager,
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
initialized: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) {
|
||||
let request_id = request.id.clone();
|
||||
if let Ok(request_json) = serde_json::to_value(request)
|
||||
&& let Ok(codex_request) = serde_json::from_value::<ClientRequest>(request_json)
|
||||
{
|
||||
match codex_request {
|
||||
// Handle Initialize internally so CodexMessageProcessor does not have to concern
|
||||
// itself with the `initialized` bool.
|
||||
ClientRequest::Initialize { request_id, params } => {
|
||||
if self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Already initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
} else {
|
||||
let ClientInfo {
|
||||
name,
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
}
|
||||
|
||||
let user_agent = get_codex_user_agent();
|
||||
let response = InitializeResponse { user_agent };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !self.initialized {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Not initialized".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
} else {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "Invalid request".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
// Currently, we do not expect to receive any notifications from the
|
||||
// client, so we just log them.
|
||||
tracing::info!("<- notification: {:?}", notification);
|
||||
}
|
||||
|
||||
/// Handle a standalone JSON-RPC response originating from the peer.
|
||||
pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) {
|
||||
tracing::info!("<- response: {:?}", response);
|
||||
let JSONRPCResponse { id, result, .. } = response;
|
||||
self.outgoing.notify_client_response(id, result).await
|
||||
}
|
||||
|
||||
/// Handle an error object received from the peer.
|
||||
pub(crate) fn process_error(&mut self, err: JSONRPCError) {
|
||||
tracing::error!("<- error: {:?}", err);
|
||||
}
|
||||
}
|
||||
239
codex-rs/app-server/src/outgoing_message.rs
Normal file
239
codex-rs/app-server/src/outgoing_message.rs
Normal file
@@ -0,0 +1,239 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use codex_protocol::mcp_protocol::ServerNotification;
|
||||
use mcp_types::JSONRPC_VERSION;
|
||||
use mcp_types::JSONRPCError;
|
||||
use mcp_types::JSONRPCErrorError;
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use mcp_types::Result;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
|
||||
/// Sends messages to the client and manages request callbacks.
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_request_id: AtomicI64,
|
||||
sender: mpsc::UnboundedSender<OutgoingMessage>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>,
|
||||
}
|
||||
|
||||
impl OutgoingMessageSender {
|
||||
pub(crate) fn new(sender: mpsc::UnboundedSender<OutgoingMessage>) -> Self {
|
||||
Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
sender,
|
||||
request_id_to_callback: Mutex::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_request(
|
||||
&self,
|
||||
method: &str,
|
||||
params: Option<serde_json::Value>,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed));
|
||||
let outgoing_message_id = id.clone();
|
||||
let (tx_approve, rx_approve) = oneshot::channel();
|
||||
{
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.insert(id, tx_approve);
|
||||
}
|
||||
|
||||
let outgoing_message = OutgoingMessage::Request(OutgoingRequest {
|
||||
id: outgoing_message_id,
|
||||
method: method.to_string(),
|
||||
params,
|
||||
});
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
rx_approve
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) {
|
||||
let entry = {
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove_entry(&id)
|
||||
};
|
||||
|
||||
match entry {
|
||||
Some((id, sender)) => {
|
||||
if let Err(err) = sender.send(result) {
|
||||
warn!("could not notify callback for {id:?} due to: {err:?}");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
warn!("could not find callback for {id:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_response<T: Serialize>(&self, id: RequestId, response: T) {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
Err(err) => {
|
||||
self.send_error(
|
||||
id,
|
||||
JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to serialize response: {err}"),
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn send_server_notification(&self, notification: ServerNotification) {
|
||||
let _ = self
|
||||
.sender
|
||||
.send(OutgoingMessage::AppServerNotification(notification));
|
||||
}
|
||||
|
||||
/// All notifications should be migrated to [`ServerNotification`] and
|
||||
/// [`OutgoingMessage::Notification`] should be removed.
|
||||
pub(crate) async fn send_notification(&self, notification: OutgoingNotification) {
|
||||
let outgoing_message = OutgoingMessage::Notification(notification);
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
|
||||
pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
}
|
||||
|
||||
/// Outgoing message from the server to the client.
|
||||
pub(crate) enum OutgoingMessage {
|
||||
Request(OutgoingRequest),
|
||||
Notification(OutgoingNotification),
|
||||
/// AppServerNotification is specific to the case where this is run as an
|
||||
/// "app server" as opposed to an MCP server.
|
||||
AppServerNotification(ServerNotification),
|
||||
Response(OutgoingResponse),
|
||||
Error(OutgoingError),
|
||||
}
|
||||
|
||||
impl From<OutgoingMessage> for JSONRPCMessage {
|
||||
fn from(val: OutgoingMessage) -> Self {
|
||||
use OutgoingMessage::*;
|
||||
match val {
|
||||
Request(OutgoingRequest { id, method, params }) => {
|
||||
JSONRPCMessage::Request(JSONRPCRequest {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
Notification(OutgoingNotification { method, params }) => {
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
AppServerNotification(notification) => {
|
||||
let method = notification.to_string();
|
||||
let params = match notification.to_params() {
|
||||
Ok(params) => Some(params),
|
||||
Err(err) => {
|
||||
warn!("failed to serialize notification params: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
Response(OutgoingResponse { id, result }) => {
|
||||
JSONRPCMessage::Response(JSONRPCResponse {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
result,
|
||||
})
|
||||
}
|
||||
Error(OutgoingError { id, error }) => JSONRPCMessage::Error(JSONRPCError {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
error,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingRequest {
|
||||
pub id: RequestId,
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingNotification {
|
||||
pub method: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingResponse {
|
||||
pub id: RequestId,
|
||||
pub result: Result,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub(crate) struct OutgoingError {
|
||||
pub error: JSONRPCErrorError,
|
||||
pub id: RequestId,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use codex_protocol::mcp_protocol::LoginChatGptCompleteNotification;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn verify_server_notification_serialization() {
|
||||
let notification =
|
||||
ServerNotification::LoginChatGptComplete(LoginChatGptCompleteNotification {
|
||||
login_id: Uuid::nil(),
|
||||
success: true,
|
||||
error: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification: JSONRPCMessage =
|
||||
OutgoingMessage::AppServerNotification(notification).into();
|
||||
assert_eq!(
|
||||
JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: "2.0".into(),
|
||||
method: "loginChatGptComplete".into(),
|
||||
params: Some(json!({
|
||||
"loginId": Uuid::nil(),
|
||||
"success": true,
|
||||
})),
|
||||
}),
|
||||
jsonrpc_notification,
|
||||
"ensure the strum macros serialize the method field correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
3
codex-rs/app-server/tests/all.rs
Normal file
3
codex-rs/app-server/tests/all.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
// Single integration test binary that aggregates all test modules.
|
||||
// The submodules live in `tests/suite/`.
|
||||
mod suite;
|
||||
22
codex-rs/app-server/tests/common/Cargo.toml
Normal file
22
codex-rs/app-server/tests/common/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "app_test_support"
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
] }
|
||||
wiremock = { workspace = true }
|
||||
17
codex-rs/app-server/tests/common/lib.rs
Normal file
17
codex-rs/app-server/tests/common/lib.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod responses;
|
||||
|
||||
pub use mcp_process::McpProcess;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
let value = serde_json::to_value(response.result)?;
|
||||
let codex_response = serde_json::from_value(value)?;
|
||||
Ok(codex_response)
|
||||
}
|
||||
477
codex-rs/app-server/tests/common/mcp_process.rs
Normal file
477
codex-rs/app-server/tests/common/mcp_process.rs
Normal file
@@ -0,0 +1,477 @@
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use std::sync::atomic::AtomicI64;
|
||||
use std::sync::atomic::Ordering;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::process::Child;
|
||||
use tokio::process::ChildStdin;
|
||||
use tokio::process::ChildStdout;
|
||||
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::ArchiveConversationParams;
|
||||
use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
||||
use codex_protocol::mcp_protocol::ClientInfo;
|
||||
use codex_protocol::mcp_protocol::ClientNotification;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||
use codex_protocol::mcp_protocol::InitializeParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||
use codex_protocol::mcp_protocol::LoginApiKeyParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::RemoveConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnParams;
|
||||
use codex_protocol::mcp_protocol::SetDefaultModelParams;
|
||||
|
||||
use mcp_types::JSONRPC_VERSION;
|
||||
use mcp_types::JSONRPCMessage;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCRequest;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
pub struct McpProcess {
|
||||
next_request_id: AtomicI64,
|
||||
/// Retain this child process until the client is dropped. The Tokio runtime
|
||||
/// will make a "best effort" to reap the process after it exits, but it is
|
||||
/// not a guarantee. See the `kill_on_drop` documentation for details.
|
||||
#[allow(dead_code)]
|
||||
process: Child,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
}
|
||||
|
||||
impl McpProcess {
|
||||
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
|
||||
Self::new_with_env(codex_home, &[]).await
|
||||
}
|
||||
|
||||
/// Creates a new MCP process, allowing tests to override or remove
|
||||
/// specific environment variables for the child process only.
|
||||
///
|
||||
/// Pass a tuple of (key, Some(value)) to set/override, or (key, None) to
|
||||
/// remove a variable from the child's environment.
|
||||
pub async fn new_with_env(
|
||||
codex_home: &Path,
|
||||
env_overrides: &[(&str, Option<&str>)],
|
||||
) -> anyhow::Result<Self> {
|
||||
// Use assert_cmd to locate the binary path and then switch to tokio::process::Command
|
||||
let std_cmd = StdCommand::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-mcp-server")?;
|
||||
|
||||
let program = std_cmd.get_program().to_owned();
|
||||
|
||||
let mut cmd = Command::new(program);
|
||||
|
||||
cmd.stdin(Stdio::piped());
|
||||
cmd.stdout(Stdio::piped());
|
||||
cmd.stderr(Stdio::piped());
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
cmd.env("RUST_LOG", "debug");
|
||||
|
||||
for (k, v) in env_overrides {
|
||||
match v {
|
||||
Some(val) => {
|
||||
cmd.env(k, val);
|
||||
}
|
||||
None => {
|
||||
cmd.env_remove(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut process = cmd
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
.context("codex-mcp-server proc should start")?;
|
||||
let stdin = process
|
||||
.stdin
|
||||
.take()
|
||||
.ok_or_else(|| anyhow::format_err!("mcp should have stdin fd"))?;
|
||||
let stdout = process
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| anyhow::format_err!("mcp should have stdout fd"))?;
|
||||
let stdout = BufReader::new(stdout);
|
||||
|
||||
// Forward child's stderr to our stderr so failures are visible even
|
||||
// when stdout/stderr are captured by the test harness.
|
||||
if let Some(stderr) = process.stderr.take() {
|
||||
let mut stderr_reader = BufReader::new(stderr).lines();
|
||||
tokio::spawn(async move {
|
||||
while let Ok(Some(line)) = stderr_reader.next_line().await {
|
||||
eprintln!("[mcp stderr] {line}");
|
||||
}
|
||||
});
|
||||
}
|
||||
Ok(Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
process,
|
||||
stdin,
|
||||
stdout,
|
||||
})
|
||||
}
|
||||
|
||||
/// Performs the initialization handshake with the MCP server.
|
||||
pub async fn initialize(&mut self) -> anyhow::Result<()> {
|
||||
let params = Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-app-server-tests".to_string(),
|
||||
title: None,
|
||||
version: "0.1.0".to_string(),
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let JSONRPCMessage::Response(response) = initialized else {
|
||||
unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}");
|
||||
};
|
||||
if response.id != RequestId::Integer(req_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
req_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a `newConversation` JSON-RPC request.
|
||||
pub async fn send_new_conversation_request(
|
||||
&mut self,
|
||||
params: NewConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("newConversation", params).await
|
||||
}
|
||||
|
||||
/// Send an `archiveConversation` JSON-RPC request.
|
||||
pub async fn send_archive_conversation_request(
|
||||
&mut self,
|
||||
params: ArchiveConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("archiveConversation", params).await
|
||||
}
|
||||
|
||||
/// Send an `addConversationListener` JSON-RPC request.
|
||||
pub async fn send_add_conversation_listener_request(
|
||||
&mut self,
|
||||
params: AddConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("addConversationListener", params).await
|
||||
}
|
||||
|
||||
/// Send a `sendUserMessage` JSON-RPC request with a single text item.
|
||||
pub async fn send_send_user_message_request(
|
||||
&mut self,
|
||||
params: SendUserMessageParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
// Wire format expects variants in camelCase; text item uses external tagging.
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("sendUserMessage", params).await
|
||||
}
|
||||
|
||||
/// Send a `removeConversationListener` JSON-RPC request.
|
||||
pub async fn send_remove_conversation_listener_request(
|
||||
&mut self,
|
||||
params: RemoveConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("removeConversationListener", params)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Send a `sendUserTurn` JSON-RPC request.
|
||||
pub async fn send_send_user_turn_request(
|
||||
&mut self,
|
||||
params: SendUserTurnParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("sendUserTurn", params).await
|
||||
}
|
||||
|
||||
/// Send a `interruptConversation` JSON-RPC request.
|
||||
pub async fn send_interrupt_conversation_request(
|
||||
&mut self,
|
||||
params: InterruptConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("interruptConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `getAuthStatus` JSON-RPC request.
|
||||
pub async fn send_get_auth_status_request(
|
||||
&mut self,
|
||||
params: GetAuthStatusParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("getAuthStatus", params).await
|
||||
}
|
||||
|
||||
/// Send a `getUserSavedConfig` JSON-RPC request.
|
||||
pub async fn send_get_user_saved_config_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("getUserSavedConfig", None).await
|
||||
}
|
||||
|
||||
/// Send a `getUserAgent` JSON-RPC request.
|
||||
pub async fn send_get_user_agent_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("getUserAgent", None).await
|
||||
}
|
||||
|
||||
/// Send a `userInfo` JSON-RPC request.
|
||||
pub async fn send_user_info_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("userInfo", None).await
|
||||
}
|
||||
|
||||
/// Send a `setDefaultModel` JSON-RPC request.
|
||||
pub async fn send_set_default_model_request(
|
||||
&mut self,
|
||||
params: SetDefaultModelParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("setDefaultModel", params).await
|
||||
}
|
||||
|
||||
/// Send a `listConversations` JSON-RPC request.
|
||||
pub async fn send_list_conversations_request(
|
||||
&mut self,
|
||||
params: ListConversationsParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("listConversations", params).await
|
||||
}
|
||||
|
||||
/// Send a `resumeConversation` JSON-RPC request.
|
||||
pub async fn send_resume_conversation_request(
|
||||
&mut self,
|
||||
params: ResumeConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("resumeConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginApiKey` JSON-RPC request.
|
||||
pub async fn send_login_api_key_request(
|
||||
&mut self,
|
||||
params: LoginApiKeyParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("loginApiKey", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginChatGpt` JSON-RPC request.
|
||||
pub async fn send_login_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("loginChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
params: CancelLoginChatGptParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("cancelLoginChatGpt", params).await
|
||||
}
|
||||
|
||||
/// Send a `logoutChatGpt` JSON-RPC request.
|
||||
pub async fn send_logout_chat_gpt_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
/// Send a `fuzzyFileSearch` JSON-RPC request.
|
||||
pub async fn send_fuzzy_file_search_request(
|
||||
&mut self,
|
||||
query: &str,
|
||||
roots: Vec<String>,
|
||||
cancellation_token: Option<String>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let mut params = serde_json::json!({
|
||||
"query": query,
|
||||
"roots": roots,
|
||||
});
|
||||
if let Some(token) = cancellation_token {
|
||||
params["cancellationToken"] = serde_json::json!(token);
|
||||
}
|
||||
self.send_request("fuzzyFileSearch", Some(params)).await
|
||||
}
|
||||
|
||||
async fn send_request(
|
||||
&mut self,
|
||||
method: &str,
|
||||
params: Option<serde_json::Value>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let request_id = self.next_request_id.fetch_add(1, Ordering::Relaxed);
|
||||
|
||||
let message = JSONRPCMessage::Request(JSONRPCRequest {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id: RequestId::Integer(request_id),
|
||||
method: method.to_string(),
|
||||
params,
|
||||
});
|
||||
self.send_jsonrpc_message(message).await?;
|
||||
Ok(request_id)
|
||||
}
|
||||
|
||||
pub async fn send_response(
|
||||
&mut self,
|
||||
id: RequestId,
|
||||
result: serde_json::Value,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Response(JSONRPCResponse {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
id,
|
||||
result,
|
||||
}))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
&mut self,
|
||||
notification: ClientNotification,
|
||||
) -> anyhow::Result<()> {
|
||||
let value = serde_json::to_value(notification)?;
|
||||
self.send_jsonrpc_message(JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
jsonrpc: JSONRPC_VERSION.into(),
|
||||
method: value
|
||||
.get("method")
|
||||
.and_then(|m| m.as_str())
|
||||
.ok_or_else(|| anyhow::format_err!("notification missing method field"))?
|
||||
.to_string(),
|
||||
params: value.get("params").cloned(),
|
||||
}))
|
||||
.await
|
||||
}
|
||||
|
||||
async fn send_jsonrpc_message(&mut self, message: JSONRPCMessage) -> anyhow::Result<()> {
|
||||
eprintln!("writing message to stdin: {message:?}");
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
self.stdin.write_all(payload.as_bytes()).await?;
|
||||
self.stdin.write_all(b"\n").await?;
|
||||
self.stdin.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn read_jsonrpc_message(&mut self) -> anyhow::Result<JSONRPCMessage> {
|
||||
let mut line = String::new();
|
||||
self.stdout.read_line(&mut line).await?;
|
||||
let message = serde_json::from_str::<JSONRPCMessage>(&line)?;
|
||||
eprintln!("read message from stdout: {message:?}");
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<JSONRPCRequest> {
|
||||
eprintln!("in read_stream_until_request_message()");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(jsonrpc_request) => {
|
||||
return Ok(jsonrpc_request);
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_response_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
eprintln!("in read_stream_until_response_message({request_id:?})");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(jsonrpc_response) => {
|
||||
if jsonrpc_response.id == request_id {
|
||||
return Ok(jsonrpc_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_error_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<mcp_types::JSONRPCError> {
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(_) => {
|
||||
eprintln!("notification: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
// Keep scanning; we're waiting for an error with matching id.
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
return Ok(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_notification_message(
|
||||
&mut self,
|
||||
method: &str,
|
||||
) -> anyhow::Result<JSONRPCNotification> {
|
||||
eprintln!("in read_stream_until_notification_message({method})");
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if notification.method == method {
|
||||
return Ok(notification);
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
47
codex-rs/app-server/tests/common/mock_model_server.rs
Normal file
47
codex-rs/app-server/tests/common/mock_model_server.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
/// Create a mock server that will provide the responses, in order, for
|
||||
/// requests to the `/v1/chat/completions` endpoint.
|
||||
pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let num_calls = responses.len();
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
responses,
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.expect(num_calls as u64)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
struct SeqResponder {
|
||||
num_calls: AtomicUsize,
|
||||
responses: Vec<String>,
|
||||
}
|
||||
|
||||
impl Respond for SeqResponder {
|
||||
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
|
||||
let call_num = self.num_calls.fetch_add(1, Ordering::SeqCst);
|
||||
match self.responses.get(call_num) {
|
||||
Some(response) => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(response.clone(), "text/event-stream"),
|
||||
None => panic!("no response for {call_num}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
95
codex-rs/app-server/tests/common/responses.rs
Normal file
95
codex-rs/app-server/tests/common/responses.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn create_shell_sse_response(
|
||||
command: Vec<String>,
|
||||
workdir: Option<&Path>,
|
||||
timeout_ms: Option<u64>,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments`` for the `shell` tool is a serialized JSON object.
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
"timeout": timeout_ms
|
||||
}))?;
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_final_assistant_message_sse_response(message: &str) -> anyhow::Result<String> {
|
||||
let assistant_message = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": message
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&assistant_message)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_apply_patch_sse_response(
|
||||
patch_content: &str,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// Use shell command to call apply_patch with heredoc format
|
||||
let shell_command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": ["bash", "-lc", shell_command]
|
||||
}))?;
|
||||
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_protocol::mcp_protocol::ArchiveConversationParams;
|
||||
use codex_protocol::mcp_protocol::ArchiveConversationResponse;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use tempfile::TempDir;
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusResponse;
|
||||
use codex_protocol::mcp_protocol::LoginApiKeyParams;
|
||||
use codex_protocol::mcp_protocol::LoginApiKeyResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -15,11 +15,17 @@ use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
// Helper to create a config.toml; mirrors create_conversation.rs
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
fn create_config_toml_custom_provider(
|
||||
codex_home: &Path,
|
||||
requires_openai_auth: bool,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
let requires_line = if requires_openai_auth {
|
||||
"requires_openai_auth = true\n"
|
||||
} else {
|
||||
""
|
||||
};
|
||||
let contents = format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
@@ -33,6 +39,20 @@ base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
"#
|
||||
);
|
||||
std::fs::write(config_toml, contents)
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "danger-full-access"
|
||||
"#,
|
||||
)
|
||||
}
|
||||
@@ -124,6 +144,47 @@ async fn get_auth_status_with_api_key() {
|
||||
assert_eq!(status.auth_token, Some("sk-test-key".to_string()));
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_when_auth_not_required() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
create_config_toml_custom_provider(codex_home.path(), false)
|
||||
.unwrap_or_else(|err| panic!("write config.toml: {err}"));
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path())
|
||||
.await
|
||||
.expect("spawn mcp process");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
login_with_api_key_via_request(&mut mcp, "sk-test-key").await;
|
||||
|
||||
let request_id = mcp
|
||||
.send_get_auth_status_request(GetAuthStatusParams {
|
||||
include_token: Some(true),
|
||||
refresh_token: Some(false),
|
||||
})
|
||||
.await
|
||||
.expect("send getAuthStatus");
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("getAuthStatus timeout")
|
||||
.expect("getAuthStatus response");
|
||||
let status: GetAuthStatusResponse = to_response(resp).expect("deserialize status");
|
||||
assert_eq!(status.auth_method, None, "expected no auth method");
|
||||
assert_eq!(status.auth_token, None, "expected no token");
|
||||
assert_eq!(
|
||||
status.requires_openai_auth,
|
||||
Some(false),
|
||||
"requires_openai_auth should be false",
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn get_auth_status_with_api_key_no_include_token() {
|
||||
let codex_home = TempDir::new().unwrap_or_else(|e| panic!("create tempdir: {e}"));
|
||||
@@ -1,5 +1,10 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
@@ -16,11 +21,6 @@ use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnParams;
|
||||
use codex_protocol::mcp_protocol::SendUserTurnResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::create_shell_sse_response;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
@@ -1,6 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
@@ -11,8 +13,6 @@ use codex_protocol::mcp_protocol::Profile;
|
||||
use codex_protocol::mcp_protocol::SandboxSettings;
|
||||
use codex_protocol::mcp_protocol::Tools;
|
||||
use codex_protocol::mcp_protocol::UserSavedConfig;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -26,7 +26,7 @@ fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "gpt-5"
|
||||
model = "gpt-5-codex"
|
||||
approval_policy = "on-request"
|
||||
sandbox_mode = "workspace-write"
|
||||
model_reasoning_summary = "detailed"
|
||||
@@ -92,7 +92,7 @@ async fn get_config_toml_parses_all_fields() {
|
||||
exclude_tmpdir_env_var: Some(true),
|
||||
exclude_slash_tmp: Some(true),
|
||||
}),
|
||||
model: Some("gpt-5".into()),
|
||||
model: Some("gpt-5-codex".into()),
|
||||
model_reasoning_effort: Some(ReasoningEffort::High),
|
||||
model_reasoning_summary: Some(ReasoningSummary::Detailed),
|
||||
model_verbosity: Some(Verbosity::Medium),
|
||||
@@ -1,5 +1,9 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::InputItem;
|
||||
@@ -7,10 +11,6 @@ use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
104
codex-rs/app-server/tests/suite/fuzzy_file_search.rs
Normal file
104
codex-rs/app-server/tests/suite/fuzzy_file_search.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use app_test_support::McpProcess;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_sorts_and_includes_indices() {
|
||||
// Prepare a temporary Codex home and a separate root with test files.
|
||||
let codex_home = TempDir::new().expect("create temp codex home");
|
||||
let root = TempDir::new().expect("create temp search root");
|
||||
|
||||
// Create files designed to have deterministic ordering for query "abc".
|
||||
std::fs::write(root.path().join("abc"), "x").expect("write file abc");
|
||||
std::fs::write(root.path().join("abcde"), "x").expect("write file abcx");
|
||||
std::fs::write(root.path().join("abexy"), "x").expect("write file abcx");
|
||||
std::fs::write(root.path().join("zzz.txt"), "x").expect("write file zzz");
|
||||
|
||||
// Start MCP server and initialize.
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
// Send fuzzyFileSearch request.
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("abe", vec![root_path.clone()], None)
|
||||
.await
|
||||
.expect("send fuzzyFileSearch");
|
||||
|
||||
// Read response and verify shape and ordering.
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await
|
||||
.expect("fuzzyFileSearch timeout")
|
||||
.expect("fuzzyFileSearch resp");
|
||||
|
||||
let value = resp.result;
|
||||
assert_eq!(
|
||||
value,
|
||||
json!({
|
||||
"files": [
|
||||
{ "root": root_path.clone(), "path": "abexy", "score": 88, "indices": [0, 1, 2] },
|
||||
{ "root": root_path.clone(), "path": "abcde", "score": 74, "indices": [0, 1, 4] },
|
||||
]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_accepts_cancellation_token() {
|
||||
let codex_home = TempDir::new().expect("create temp codex home");
|
||||
let root = TempDir::new().expect("create temp search root");
|
||||
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents").expect("write alpha");
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await.expect("spawn mcp");
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize())
|
||||
.await
|
||||
.expect("init timeout")
|
||||
.expect("init failed");
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_request("alp", vec![root_path.clone()], None)
|
||||
.await
|
||||
.expect("send fuzzyFileSearch");
|
||||
|
||||
let request_id_2 = mcp
|
||||
.send_fuzzy_file_search_request(
|
||||
"alp",
|
||||
vec![root_path.clone()],
|
||||
Some(request_id.to_string()),
|
||||
)
|
||||
.await
|
||||
.expect("send fuzzyFileSearch");
|
||||
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id_2)),
|
||||
)
|
||||
.await
|
||||
.expect("fuzzyFileSearch timeout")
|
||||
.expect("fuzzyFileSearch resp");
|
||||
|
||||
let files = resp
|
||||
.result
|
||||
.get("files")
|
||||
.and_then(|value| value.as_array())
|
||||
.cloned()
|
||||
.expect("files array");
|
||||
|
||||
assert_eq!(files.len(), 1);
|
||||
assert_eq!(files[0]["root"], root_path);
|
||||
assert_eq!(files[0]["path"], "alpha.txt");
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
#![cfg(unix)]
|
||||
// Support code lives in the `mcp_test_support` crate under tests/common.
|
||||
// Support code lives in the `app_test_support` crate under tests/common.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use codex_core::protocol::TurnAbortReason;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationParams;
|
||||
use codex_protocol::mcp_protocol::InterruptConversationResponse;
|
||||
@@ -12,26 +11,22 @@ use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::create_shell_sse_response;
|
||||
use mcp_test_support::to_response;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_shell_command_interruption() {
|
||||
if std::env::var(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR).is_ok() {
|
||||
println!(
|
||||
"Skipping test because it cannot execute when network is disabled in a Codex sandbox."
|
||||
);
|
||||
return;
|
||||
}
|
||||
skip_if_no_network!();
|
||||
|
||||
if let Err(err) = shell_command_interruption().await {
|
||||
panic!("failure: {err}");
|
||||
@@ -1,13 +1,15 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_protocol::mcp_protocol::ListConversationsParams;
|
||||
use codex_protocol::mcp_protocol::ListConversationsResponse;
|
||||
use codex_protocol::mcp_protocol::NewConversationParams; // reused for overrides shape
|
||||
use codex_protocol::mcp_protocol::ResumeConversationParams;
|
||||
use codex_protocol::mcp_protocol::ResumeConversationResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use codex_protocol::mcp_protocol::ServerNotification;
|
||||
use codex_protocol::mcp_protocol::SessionConfiguredNotification;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
@@ -111,23 +113,28 @@ async fn test_list_and_resume_conversations() {
|
||||
.await
|
||||
.expect("send resumeConversation");
|
||||
|
||||
// Expect a codex/event notification with msg.type == session_configured
|
||||
// Expect a codex/event notification with msg.type == sessionConfigured
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event"),
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await
|
||||
.expect("session_configured notification timeout")
|
||||
.expect("session_configured notification");
|
||||
// Basic shape assertion: ensure event type is session_configured
|
||||
let msg_type = notification
|
||||
.params
|
||||
.as_ref()
|
||||
.and_then(|p| p.get("msg"))
|
||||
.and_then(|m| m.get("type"))
|
||||
.and_then(|t| t.as_str())
|
||||
.unwrap_or("");
|
||||
assert_eq!(msg_type, "session_configured");
|
||||
.expect("sessionConfigured notification timeout")
|
||||
.expect("sessionConfigured notification");
|
||||
let session_configured: ServerNotification = notification
|
||||
.try_into()
|
||||
.expect("deserialize sessionConfigured notification");
|
||||
// Basic shape assertion: ensure event type is sessionConfigured
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
rollout_path,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
assert_eq!(model, "o3");
|
||||
assert_eq!(items[0].path.clone(), rollout_path);
|
||||
|
||||
// Then the response for resumeConversation
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
@@ -142,7 +149,7 @@ async fn test_list_and_resume_conversations() {
|
||||
} = to_response::<ResumeConversationResponse>(resume_resp)
|
||||
.expect("deserialize resumeConversation response");
|
||||
// conversation id should be a valid UUID
|
||||
let _: uuid::Uuid = conversation_id.into();
|
||||
assert!(!conversation_id.to_string().is_empty());
|
||||
}
|
||||
|
||||
fn create_fake_rollout(codex_home: &Path, filename_ts: &str, meta_rfc3339: &str, preview: &str) {
|
||||
@@ -1,6 +1,8 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_login::login_with_api_key;
|
||||
use codex_protocol::mcp_protocol::CancelLoginChatGptParams;
|
||||
use codex_protocol::mcp_protocol::CancelLoginChatGptResponse;
|
||||
@@ -8,8 +10,6 @@ use codex_protocol::mcp_protocol::GetAuthStatusParams;
|
||||
use codex_protocol::mcp_protocol::GetAuthStatusResponse;
|
||||
use codex_protocol::mcp_protocol::LoginChatGptResponse;
|
||||
use codex_protocol::mcp_protocol::LogoutChatGptResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use tempfile::TempDir;
|
||||
13
codex-rs/app-server/tests/suite/mod.rs
Normal file
13
codex-rs/app-server/tests/suite/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
mod archive_conversation;
|
||||
mod auth;
|
||||
mod codex_message_processor_flow;
|
||||
mod config;
|
||||
mod create_conversation;
|
||||
mod fuzzy_file_search;
|
||||
mod interrupt;
|
||||
mod list_resume;
|
||||
mod login;
|
||||
mod send_message;
|
||||
mod set_default_model;
|
||||
mod user_agent;
|
||||
mod user_info;
|
||||
@@ -1,5 +1,9 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_protocol::mcp_protocol::AddConversationListenerParams;
|
||||
use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
@@ -8,10 +12,6 @@ use codex_protocol::mcp_protocol::NewConversationParams;
|
||||
use codex_protocol::mcp_protocol::NewConversationResponse;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageParams;
|
||||
use codex_protocol::mcp_protocol::SendUserMessageResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::create_final_assistant_message_sse_response;
|
||||
use mcp_test_support::create_mock_chat_completions_server;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCNotification;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
@@ -136,7 +136,7 @@ async fn test_send_message_session_not_found() {
|
||||
.expect("timeout")
|
||||
.expect("init");
|
||||
|
||||
let unknown = ConversationId(uuid::Uuid::new_v4());
|
||||
let unknown = ConversationId::new();
|
||||
let req_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id: unknown,
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::path::Path;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_core::config::ConfigToml;
|
||||
use codex_protocol::mcp_protocol::SetDefaultModelParams;
|
||||
use codex_protocol::mcp_protocol::SetDefaultModelResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -69,7 +69,7 @@ fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
r#"
|
||||
model = "gpt-5"
|
||||
model = "gpt-5-codex"
|
||||
model_reasoning_effort = "medium"
|
||||
"#,
|
||||
)
|
||||
@@ -1,6 +1,6 @@
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_protocol::mcp_protocol::GetUserAgentResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -35,7 +35,7 @@ async fn get_user_agent_returns_current_codex_user_agent() {
|
||||
|
||||
let os_info = os_info::get();
|
||||
let user_agent = format!(
|
||||
"codex_cli_rs/0.0.0 ({} {}; {}) {} (elicitation test; 0.0.0)",
|
||||
"codex_cli_rs/0.0.0 ({} {}; {}) {} (codex-app-server-tests; 0.1.0)",
|
||||
os_info.os_type(),
|
||||
os_info.version(),
|
||||
os_info.architecture().unwrap_or("unknown"),
|
||||
@@ -1,6 +1,8 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use base64::Engine;
|
||||
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
|
||||
use codex_core::auth::AuthDotJson;
|
||||
@@ -9,8 +11,6 @@ use codex_core::auth::write_auth_json;
|
||||
use codex_core::token_data::IdTokenInfo;
|
||||
use codex_core::token_data::TokenData;
|
||||
use codex_protocol::mcp_protocol::UserInfoResponse;
|
||||
use mcp_test_support::McpProcess;
|
||||
use mcp_test_support::to_response;
|
||||
use mcp_types::JSONRPCResponse;
|
||||
use mcp_types::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -15,14 +15,13 @@ path = "src/main.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
similar = "2.7.0"
|
||||
thiserror = "2.0.16"
|
||||
tree-sitter = "0.25.9"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
once_cell = "1"
|
||||
anyhow = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
pretty_assertions = "1.4.1"
|
||||
tempfile = "3.13.0"
|
||||
assert_cmd = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -6,10 +6,10 @@ use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::str::Utf8Error;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use once_cell::sync::Lazy;
|
||||
pub use parser::Hunk;
|
||||
pub use parser::ParseError;
|
||||
use parser::ParseError::*;
|
||||
@@ -351,7 +351,7 @@ fn extract_apply_patch_from_bash(
|
||||
// also run an arbitrary query against the AST. This is useful for understanding
|
||||
// how tree-sitter parses the script and whether the query syntax is correct. Be sure
|
||||
// to test both positive and negative cases.
|
||||
static APPLY_PATCH_QUERY: Lazy<Query> = Lazy::new(|| {
|
||||
static APPLY_PATCH_QUERY: LazyLock<Query> = LazyLock::new(|| {
|
||||
let language = BASH.into();
|
||||
#[expect(clippy::expect_used)]
|
||||
Query::new(
|
||||
@@ -648,21 +648,18 @@ fn derive_new_contents_from_chunks(
|
||||
}
|
||||
};
|
||||
|
||||
let mut original_lines: Vec<String> = original_contents
|
||||
.split('\n')
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let mut original_lines: Vec<String> = original_contents.split('\n').map(String::from).collect();
|
||||
|
||||
// Drop the trailing empty element that results from the final newline so
|
||||
// that line counts match the behaviour of standard `diff`.
|
||||
if original_lines.last().is_some_and(|s| s.is_empty()) {
|
||||
if original_lines.last().is_some_and(String::is_empty) {
|
||||
original_lines.pop();
|
||||
}
|
||||
|
||||
let replacements = compute_replacements(&original_lines, path, chunks)?;
|
||||
let new_lines = apply_replacements(original_lines, &replacements);
|
||||
let mut new_lines = new_lines;
|
||||
if !new_lines.last().is_some_and(|s| s.is_empty()) {
|
||||
if !new_lines.last().is_some_and(String::is_empty) {
|
||||
new_lines.push(String::new());
|
||||
}
|
||||
let new_contents = new_lines.join("\n");
|
||||
@@ -706,7 +703,7 @@ fn compute_replacements(
|
||||
if chunk.old_lines.is_empty() {
|
||||
// Pure addition (no old lines). We'll add them at the end or just
|
||||
// before the final empty line if one exists.
|
||||
let insertion_idx = if original_lines.last().is_some_and(|s| s.is_empty()) {
|
||||
let insertion_idx = if original_lines.last().is_some_and(String::is_empty) {
|
||||
original_lines.len() - 1
|
||||
} else {
|
||||
original_lines.len()
|
||||
@@ -732,11 +729,11 @@ fn compute_replacements(
|
||||
|
||||
let mut new_slice: &[String] = &chunk.new_lines;
|
||||
|
||||
if found.is_none() && pattern.last().is_some_and(|s| s.is_empty()) {
|
||||
if found.is_none() && pattern.last().is_some_and(String::is_empty) {
|
||||
// Retry without the trailing empty line which represents the final
|
||||
// newline in the file.
|
||||
pattern = &pattern[..pattern.len() - 1];
|
||||
if new_slice.last().is_some_and(|s| s.is_empty()) {
|
||||
if new_slice.last().is_some_and(String::is_empty) {
|
||||
new_slice = &new_slice[..new_slice.len() - 1];
|
||||
}
|
||||
|
||||
@@ -848,6 +845,7 @@ mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fs;
|
||||
use std::string::ToString;
|
||||
use tempfile::tempdir;
|
||||
|
||||
/// Helper to construct a patch with the given body.
|
||||
@@ -856,7 +854,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn strs_to_strings(strs: &[&str]) -> Vec<String> {
|
||||
strs.iter().map(|s| s.to_string()).collect()
|
||||
strs.iter().map(ToString::to_string).collect()
|
||||
}
|
||||
|
||||
// Test helpers to reduce repetition when building bash -lc heredoc scripts
|
||||
|
||||
@@ -112,9 +112,10 @@ pub(crate) fn seek_sequence(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::seek_sequence;
|
||||
use std::string::ToString;
|
||||
|
||||
fn to_vec(strings: &[&str]) -> Vec<String> {
|
||||
strings.iter().map(|s| s.to_string()).collect()
|
||||
strings.iter().map(ToString::to_string).collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -11,10 +11,10 @@ path = "src/lib.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
codex-apply-patch = { path = "../apply-patch" }
|
||||
codex-core = { path = "../core" }
|
||||
codex-linux-sandbox = { path = "../linux-sandbox" }
|
||||
dotenvy = "0.15.7"
|
||||
tempfile = "3"
|
||||
tokio = { version = "1", features = ["rt-multi-thread"] }
|
||||
anyhow = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-linux-sandbox = { workspace = true }
|
||||
dotenvy = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tokio = { workspace = true, features = ["rt-multi-thread"] }
|
||||
|
||||
@@ -54,7 +54,7 @@ where
|
||||
|
||||
let argv1 = args.next().unwrap_or_default();
|
||||
if argv1 == CODEX_APPLY_PATCH_ARG1 {
|
||||
let patch_arg = args.next().and_then(|s| s.to_str().map(|s| s.to_owned()));
|
||||
let patch_arg = args.next().and_then(|s| s.to_str().map(str::to_owned));
|
||||
let exit_code = match patch_arg {
|
||||
Some(patch_arg) => {
|
||||
let mut stdout = std::io::stdout();
|
||||
|
||||
18
codex-rs/backend-client/Cargo.toml
Normal file
18
codex-rs/backend-client/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "codex-backend-client"
|
||||
version = "0.0.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
codex-backend-openapi-models = { path = "../codex-backend-openapi-models" }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "1"
|
||||
244
codex-rs/backend-client/src/client.rs
Normal file
244
codex-rs/backend-client/src/client.rs
Normal file
@@ -0,0 +1,244 @@
|
||||
use crate::types::CodeTaskDetailsResponse;
|
||||
use crate::types::PaginatedListTaskListItem;
|
||||
use crate::types::TurnAttemptsSiblingTurnsResponse;
|
||||
use anyhow::Result;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use reqwest::header::USER_AGENT;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum PathStyle {
|
||||
/// /api/codex/…
|
||||
CodexApi,
|
||||
/// /wham/…
|
||||
ChatGptApi,
|
||||
}
|
||||
|
||||
impl PathStyle {
|
||||
pub fn from_base_url(base_url: &str) -> Self {
|
||||
if base_url.contains("/backend-api") {
|
||||
PathStyle::ChatGptApi
|
||||
} else {
|
||||
PathStyle::CodexApi
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Client {
|
||||
base_url: String,
|
||||
http: reqwest::Client,
|
||||
bearer_token: Option<String>,
|
||||
user_agent: Option<HeaderValue>,
|
||||
chatgpt_account_id: Option<String>,
|
||||
path_style: PathStyle,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(base_url: impl Into<String>) -> Result<Self> {
|
||||
let mut base_url = base_url.into();
|
||||
// Normalize common ChatGPT hostnames to include /backend-api so we hit the WHAM paths.
|
||||
// Also trim trailing slashes for consistent URL building.
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let path_style = PathStyle::from_base_url(&base_url);
|
||||
Ok(Self {
|
||||
base_url,
|
||||
http,
|
||||
bearer_token: None,
|
||||
user_agent: None,
|
||||
chatgpt_account_id: None,
|
||||
path_style,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.bearer_token = Some(token.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
|
||||
if let Ok(hv) = HeaderValue::from_str(&ua.into()) {
|
||||
self.user_agent = Some(hv);
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.chatgpt_account_id = Some(account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_path_style(mut self, style: PathStyle) -> Self {
|
||||
self.path_style = style;
|
||||
self
|
||||
}
|
||||
|
||||
fn headers(&self) -> HeaderMap {
|
||||
let mut h = HeaderMap::new();
|
||||
if let Some(ua) = &self.user_agent {
|
||||
h.insert(USER_AGENT, ua.clone());
|
||||
} else {
|
||||
h.insert(USER_AGENT, HeaderValue::from_static("codex-cli"));
|
||||
}
|
||||
if let Some(token) = &self.bearer_token {
|
||||
let value = format!("Bearer {token}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&value) {
|
||||
h.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
}
|
||||
if let Some(acc) = &self.chatgpt_account_id
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(acc)
|
||||
{
|
||||
h.insert(name, hv);
|
||||
}
|
||||
h
|
||||
}
|
||||
|
||||
async fn exec_request(
|
||||
&self,
|
||||
req: reqwest::RequestBuilder,
|
||||
method: &str,
|
||||
url: &str,
|
||||
) -> Result<(String, String)> {
|
||||
let res = req.send().await?;
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
if !status.is_success() {
|
||||
anyhow::bail!("{method} {url} failed: {status}; content-type={ct}; body={body}");
|
||||
}
|
||||
Ok((body, ct))
|
||||
}
|
||||
|
||||
fn decode_json<T: DeserializeOwned>(&self, url: &str, ct: &str, body: &str) -> Result<T> {
|
||||
match serde_json::from_str::<T>(body) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(e) => {
|
||||
anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_tasks(
|
||||
&self,
|
||||
limit: Option<i32>,
|
||||
task_filter: Option<&str>,
|
||||
environment_id: Option<&str>,
|
||||
) -> Result<PaginatedListTaskListItem> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks/list", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks/list", self.base_url),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let req = if let Some(lim) = limit {
|
||||
req.query(&[("limit", lim)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let req = if let Some(tf) = task_filter {
|
||||
req.query(&[("task_filter", tf)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let req = if let Some(id) = environment_id {
|
||||
req.query(&[("environment_id", id)])
|
||||
} else {
|
||||
req
|
||||
};
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<PaginatedListTaskListItem>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
pub async fn get_task_details(&self, task_id: &str) -> Result<CodeTaskDetailsResponse> {
|
||||
let (parsed, _body, _ct) = self.get_task_details_with_body(task_id).await?;
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
pub async fn get_task_details_with_body(
|
||||
&self,
|
||||
task_id: &str,
|
||||
) -> Result<(CodeTaskDetailsResponse, String, String)> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks/{}", self.base_url, task_id),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks/{}", self.base_url, task_id),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
let parsed: CodeTaskDetailsResponse = self.decode_json(&url, &ct, &body)?;
|
||||
Ok((parsed, body, ct))
|
||||
}
|
||||
|
||||
pub async fn list_sibling_turns(
|
||||
&self,
|
||||
task_id: &str,
|
||||
turn_id: &str,
|
||||
) -> Result<TurnAttemptsSiblingTurnsResponse> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!(
|
||||
"{}/api/codex/tasks/{}/turns/{}/sibling_turns",
|
||||
self.base_url, task_id, turn_id
|
||||
),
|
||||
PathStyle::ChatGptApi => format!(
|
||||
"{}/wham/tasks/{}/turns/{}/sibling_turns",
|
||||
self.base_url, task_id, turn_id
|
||||
),
|
||||
};
|
||||
let req = self.http.get(&url).headers(self.headers());
|
||||
let (body, ct) = self.exec_request(req, "GET", &url).await?;
|
||||
self.decode_json::<TurnAttemptsSiblingTurnsResponse>(&url, &ct, &body)
|
||||
}
|
||||
|
||||
/// Create a new task (user turn) by POSTing to the appropriate backend path
|
||||
/// based on `path_style`. Returns the created task id.
|
||||
pub async fn create_task(&self, request_body: serde_json::Value) -> Result<String> {
|
||||
let url = match self.path_style {
|
||||
PathStyle::CodexApi => format!("{}/api/codex/tasks", self.base_url),
|
||||
PathStyle::ChatGptApi => format!("{}/wham/tasks", self.base_url),
|
||||
};
|
||||
let req = self
|
||||
.http
|
||||
.post(&url)
|
||||
.headers(self.headers())
|
||||
.header(CONTENT_TYPE, HeaderValue::from_static("application/json"))
|
||||
.json(&request_body);
|
||||
let (body, ct) = self.exec_request(req, "POST", &url).await?;
|
||||
// Extract id from JSON: prefer `task.id`; fallback to top-level `id` when present.
|
||||
match serde_json::from_str::<serde_json::Value>(&body) {
|
||||
Ok(v) => {
|
||||
if let Some(id) = v
|
||||
.get("task")
|
||||
.and_then(|t| t.get("id"))
|
||||
.and_then(|s| s.as_str())
|
||||
{
|
||||
Ok(id.to_string())
|
||||
} else if let Some(id) = v.get("id").and_then(|s| s.as_str()) {
|
||||
Ok(id.to_string())
|
||||
} else {
|
||||
anyhow::bail!(
|
||||
"POST {url} succeeded but no task id found; content-type={ct}; body={body}"
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
9
codex-rs/backend-client/src/lib.rs
Normal file
9
codex-rs/backend-client/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod client;
|
||||
pub mod types;
|
||||
|
||||
pub use client::Client;
|
||||
pub use types::CodeTaskDetailsResponse;
|
||||
pub use types::CodeTaskDetailsResponseExt;
|
||||
pub use types::PaginatedListTaskListItem;
|
||||
pub use types::TaskListItem;
|
||||
pub use types::TurnAttemptsSiblingTurnsResponse;
|
||||
369
codex-rs/backend-client/src/types.rs
Normal file
369
codex-rs/backend-client/src/types.rs
Normal file
@@ -0,0 +1,369 @@
|
||||
pub use codex_backend_openapi_models::models::PaginatedListTaskListItem;
|
||||
pub use codex_backend_openapi_models::models::TaskListItem;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::de::Deserializer;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Hand-rolled models for the Cloud Tasks task-details response.
|
||||
/// The generated OpenAPI models are pretty bad. This is a half-step
|
||||
/// towards hand-rolling them.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct CodeTaskDetailsResponse {
|
||||
#[serde(default)]
|
||||
pub current_user_turn: Option<Turn>,
|
||||
#[serde(default)]
|
||||
pub current_assistant_turn: Option<Turn>,
|
||||
#[serde(default)]
|
||||
pub current_diff_task_turn: Option<Turn>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Turn {
|
||||
#[serde(default)]
|
||||
pub id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub attempt_placement: Option<i64>,
|
||||
#[serde(default, rename = "turn_status")]
|
||||
pub turn_status: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub input_items: Vec<TurnItem>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub output_items: Vec<TurnItem>,
|
||||
#[serde(default)]
|
||||
pub worklog: Option<Worklog>,
|
||||
#[serde(default)]
|
||||
pub error: Option<TurnError>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct TurnItem {
|
||||
#[serde(rename = "type", default)]
|
||||
pub kind: String,
|
||||
#[serde(default)]
|
||||
pub role: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub content: Vec<ContentFragment>,
|
||||
#[serde(default)]
|
||||
pub diff: Option<String>,
|
||||
#[serde(default)]
|
||||
pub output_diff: Option<DiffPayload>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ContentFragment {
|
||||
Structured(StructuredContent),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct StructuredContent {
|
||||
#[serde(rename = "content_type", default)]
|
||||
pub content_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub text: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct DiffPayload {
|
||||
#[serde(default)]
|
||||
pub diff: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Worklog {
|
||||
#[serde(default, deserialize_with = "deserialize_vec")]
|
||||
pub messages: Vec<WorklogMessage>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct WorklogMessage {
|
||||
#[serde(default)]
|
||||
pub author: Option<Author>,
|
||||
#[serde(default)]
|
||||
pub content: Option<WorklogContent>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct Author {
|
||||
#[serde(default)]
|
||||
pub role: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct WorklogContent {
|
||||
#[serde(default)]
|
||||
pub parts: Vec<ContentFragment>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct TurnError {
|
||||
#[serde(default)]
|
||||
pub code: Option<String>,
|
||||
#[serde(default)]
|
||||
pub message: Option<String>,
|
||||
}
|
||||
|
||||
impl ContentFragment {
|
||||
fn text(&self) -> Option<&str> {
|
||||
match self {
|
||||
ContentFragment::Structured(inner) => {
|
||||
if inner
|
||||
.content_type
|
||||
.as_deref()
|
||||
.map(|ct| ct.eq_ignore_ascii_case("text"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
inner.text.as_deref().filter(|s| !s.is_empty())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ContentFragment::Text(raw) => {
|
||||
if raw.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(raw.as_str())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TurnItem {
|
||||
fn text_values(&self) -> Vec<String> {
|
||||
self.content
|
||||
.iter()
|
||||
.filter_map(|fragment| fragment.text().map(str::to_string))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn diff_text(&self) -> Option<String> {
|
||||
if self.kind == "output_diff" {
|
||||
if let Some(diff) = &self.diff
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.clone());
|
||||
}
|
||||
} else if self.kind == "pr"
|
||||
&& let Some(payload) = &self.output_diff
|
||||
&& let Some(diff) = &payload.diff
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl Turn {
|
||||
fn unified_diff(&self) -> Option<String> {
|
||||
self.output_items.iter().find_map(TurnItem::diff_text)
|
||||
}
|
||||
|
||||
fn message_texts(&self) -> Vec<String> {
|
||||
let mut out: Vec<String> = self
|
||||
.output_items
|
||||
.iter()
|
||||
.filter(|item| item.kind == "message")
|
||||
.flat_map(TurnItem::text_values)
|
||||
.collect();
|
||||
|
||||
if let Some(log) = &self.worklog {
|
||||
for message in &log.messages {
|
||||
if message.is_assistant() {
|
||||
out.extend(message.text_values());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
fn user_prompt(&self) -> Option<String> {
|
||||
let parts: Vec<String> = self
|
||||
.input_items
|
||||
.iter()
|
||||
.filter(|item| item.kind == "message")
|
||||
.filter(|item| {
|
||||
item.role
|
||||
.as_deref()
|
||||
.map(|r| r.eq_ignore_ascii_case("user"))
|
||||
.unwrap_or(true)
|
||||
})
|
||||
.flat_map(TurnItem::text_values)
|
||||
.collect();
|
||||
|
||||
if parts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(parts.join(
|
||||
"
|
||||
|
||||
",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn error_summary(&self) -> Option<String> {
|
||||
self.error.as_ref().and_then(TurnError::summary)
|
||||
}
|
||||
}
|
||||
|
||||
impl WorklogMessage {
|
||||
fn is_assistant(&self) -> bool {
|
||||
self.author
|
||||
.as_ref()
|
||||
.and_then(|a| a.role.as_deref())
|
||||
.map(|role| role.eq_ignore_ascii_case("assistant"))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn text_values(&self) -> Vec<String> {
|
||||
self.content
|
||||
.as_ref()
|
||||
.map(|content| {
|
||||
content
|
||||
.parts
|
||||
.iter()
|
||||
.filter_map(|fragment| fragment.text().map(str::to_string))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
impl TurnError {
|
||||
fn summary(&self) -> Option<String> {
|
||||
let code = self.code.as_deref().unwrap_or("");
|
||||
let message = self.message.as_deref().unwrap_or("");
|
||||
match (code.is_empty(), message.is_empty()) {
|
||||
(true, true) => None,
|
||||
(false, true) => Some(code.to_string()),
|
||||
(true, false) => Some(message.to_string()),
|
||||
(false, false) => Some(format!("{code}: {message}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CodeTaskDetailsResponseExt {
|
||||
/// Attempt to extract a unified diff string from the assistant or diff turn.
|
||||
fn unified_diff(&self) -> Option<String>;
|
||||
/// Extract assistant text output messages (no diff) from current turns.
|
||||
fn assistant_text_messages(&self) -> Vec<String>;
|
||||
/// Extract the user's prompt text from the current user turn, when present.
|
||||
fn user_text_prompt(&self) -> Option<String>;
|
||||
/// Extract an assistant error message (if the turn failed and provided one).
|
||||
fn assistant_error_message(&self) -> Option<String>;
|
||||
}
|
||||
|
||||
impl CodeTaskDetailsResponseExt for CodeTaskDetailsResponse {
|
||||
fn unified_diff(&self) -> Option<String> {
|
||||
[
|
||||
self.current_diff_task_turn.as_ref(),
|
||||
self.current_assistant_turn.as_ref(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.find_map(Turn::unified_diff)
|
||||
}
|
||||
|
||||
fn assistant_text_messages(&self) -> Vec<String> {
|
||||
let mut out = Vec::new();
|
||||
for turn in [
|
||||
self.current_diff_task_turn.as_ref(),
|
||||
self.current_assistant_turn.as_ref(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
{
|
||||
out.extend(turn.message_texts());
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn user_text_prompt(&self) -> Option<String> {
|
||||
self.current_user_turn.as_ref().and_then(Turn::user_prompt)
|
||||
}
|
||||
|
||||
fn assistant_error_message(&self) -> Option<String> {
|
||||
self.current_assistant_turn
|
||||
.as_ref()
|
||||
.and_then(Turn::error_summary)
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_vec<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
T: Deserialize<'de>,
|
||||
{
|
||||
Option::<Vec<T>>::deserialize(deserializer).map(|opt| opt.unwrap_or_default())
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct TurnAttemptsSiblingTurnsResponse {
|
||||
#[serde(default)]
|
||||
pub sibling_turns: Vec<HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn fixture(name: &str) -> CodeTaskDetailsResponse {
|
||||
let json = match name {
|
||||
"diff" => include_str!("../tests/fixtures/task_details_with_diff.json"),
|
||||
"error" => include_str!("../tests/fixtures/task_details_with_error.json"),
|
||||
other => panic!("unknown fixture {other}"),
|
||||
};
|
||||
serde_json::from_str(json).expect("fixture should deserialize")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unified_diff_prefers_current_diff_task_turn() {
|
||||
let details = fixture("diff");
|
||||
let diff = details.unified_diff().expect("diff present");
|
||||
assert!(diff.contains("diff --git"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unified_diff_falls_back_to_pr_output_diff() {
|
||||
let details = fixture("error");
|
||||
let diff = details.unified_diff().expect("diff from pr output");
|
||||
assert!(diff.contains("lib.rs"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assistant_text_messages_extracts_text_content() {
|
||||
let details = fixture("diff");
|
||||
let messages = details.assistant_text_messages();
|
||||
assert_eq!(messages, vec!["Assistant response".to_string()]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn user_text_prompt_joins_parts_with_spacing() {
|
||||
let details = fixture("diff");
|
||||
let prompt = details.user_text_prompt().expect("prompt present");
|
||||
assert_eq!(
|
||||
prompt,
|
||||
"First line
|
||||
|
||||
Second line"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assistant_error_message_combines_code_and_message() {
|
||||
let details = fixture("error");
|
||||
let msg = details
|
||||
.assistant_error_message()
|
||||
.expect("error should be present");
|
||||
assert_eq!(msg, "APPLY_FAILED: Patch could not be applied");
|
||||
}
|
||||
}
|
||||
38
codex-rs/backend-client/tests/fixtures/task_details_with_diff.json
vendored
Normal file
38
codex-rs/backend-client/tests/fixtures/task_details_with_diff.json
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"task": {
|
||||
"id": "task_123",
|
||||
"title": "Refactor cloud task client",
|
||||
"archived": false,
|
||||
"external_pull_requests": []
|
||||
},
|
||||
"current_user_turn": {
|
||||
"input_items": [
|
||||
{
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [
|
||||
{ "content_type": "text", "text": "First line" },
|
||||
{ "content_type": "text", "text": "Second line" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"current_assistant_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "message",
|
||||
"content": [
|
||||
{ "content_type": "text", "text": "Assistant response" }
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"current_diff_task_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "output_diff",
|
||||
"diff": "diff --git a/src/main.rs b/src/main.rs\n+fn main() { println!(\"hi\"); }\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
22
codex-rs/backend-client/tests/fixtures/task_details_with_error.json
vendored
Normal file
22
codex-rs/backend-client/tests/fixtures/task_details_with_error.json
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"task": {
|
||||
"id": "task_456",
|
||||
"title": "Investigate failure",
|
||||
"archived": false,
|
||||
"external_pull_requests": []
|
||||
},
|
||||
"current_assistant_turn": {
|
||||
"output_items": [
|
||||
{
|
||||
"type": "pr",
|
||||
"output_diff": {
|
||||
"diff": "diff --git a/lib.rs b/lib.rs\n+pub fn hello() {}\n"
|
||||
}
|
||||
}
|
||||
],
|
||||
"error": {
|
||||
"code": "APPLY_FAILED",
|
||||
"message": "Patch could not be applied"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,13 +7,14 @@ version = { workspace = true }
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -56,46 +56,24 @@ pub async fn apply_diff_from_task(
|
||||
}
|
||||
|
||||
async fn apply_diff(diff: &str, cwd: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let mut cmd = tokio::process::Command::new("git");
|
||||
if let Some(cwd) = cwd {
|
||||
cmd.current_dir(cwd);
|
||||
}
|
||||
let toplevel_output = cmd
|
||||
.args(vec!["rev-parse", "--show-toplevel"])
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !toplevel_output.status.success() {
|
||||
anyhow::bail!("apply must be run from a git repository.");
|
||||
}
|
||||
|
||||
let repo_root = String::from_utf8(toplevel_output.stdout)?
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let mut git_apply_cmd = tokio::process::Command::new("git")
|
||||
.args(vec!["apply", "--3way"])
|
||||
.current_dir(&repo_root)
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
if let Some(mut stdin) = git_apply_cmd.stdin.take() {
|
||||
tokio::io::AsyncWriteExt::write_all(&mut stdin, diff.as_bytes()).await?;
|
||||
drop(stdin);
|
||||
}
|
||||
|
||||
let output = git_apply_cmd.wait_with_output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let cwd = cwd.unwrap_or(std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()));
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
cwd,
|
||||
diff: diff.to_string(),
|
||||
revert: false,
|
||||
preflight: false,
|
||||
};
|
||||
let res = codex_git_apply::apply_git_patch(&req)?;
|
||||
if res.exit_code != 0 {
|
||||
anyhow::bail!(
|
||||
"Git apply failed with status {}: {}",
|
||||
output.status,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
"Git apply failed (applied={}, skipped={}, conflicts={})\nstdout:\n{}\nstderr:\n{}",
|
||||
res.applied_paths.len(),
|
||||
res.skipped_paths.len(),
|
||||
res.conflicted_paths.len(),
|
||||
res.stdout,
|
||||
res.stderr
|
||||
);
|
||||
}
|
||||
|
||||
println!("Successfully applied diff");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -44,6 +44,6 @@ pub(crate) async fn chatgpt_get_request<T: DeserializeOwned>(
|
||||
} else {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Request failed with status {}: {}", status, body)
|
||||
anyhow::bail!("Request failed with status {status}: {body}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,26 +15,40 @@ path = "src/lib.rs"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
clap_complete = "4"
|
||||
codex-arg0 = { path = "../arg0" }
|
||||
codex-chatgpt = { path = "../chatgpt" }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
codex-core = { path = "../core" }
|
||||
codex-exec = { path = "../exec" }
|
||||
codex-login = { path = "../login" }
|
||||
codex-mcp-server = { path = "../mcp-server" }
|
||||
codex-protocol = { path = "../protocol" }
|
||||
codex-tui = { path = "../tui" }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = [
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
clap_complete = { workspace = true }
|
||||
codex-app-server = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-chatgpt = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-exec = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-mcp-server = { workspace = true }
|
||||
codex-process-hardening = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-protocol-ts = { workspace = true }
|
||||
codex-responses-api-proxy = { workspace = true }
|
||||
codex-tui = { workspace = true }
|
||||
codex-cloud-tasks = { path = "../cloud-tasks" }
|
||||
ctor = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
supports-color = { workspace = true }
|
||||
uuid = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
"process",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = "0.3.19"
|
||||
codex-protocol-ts = { path = "../protocol-ts" }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -64,7 +64,6 @@ async fn run_command_under_sandbox(
|
||||
sandbox_type: SandboxType,
|
||||
) -> anyhow::Result<()> {
|
||||
let sandbox_mode = create_sandbox_mode(full_auto);
|
||||
let cwd = std::env::current_dir()?;
|
||||
let config = Config::load_with_cli_overrides(
|
||||
config_overrides
|
||||
.parse_overrides()
|
||||
@@ -75,13 +74,29 @@ async fn run_command_under_sandbox(
|
||||
..Default::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
// In practice, this should be `std::env::current_dir()` because this CLI
|
||||
// does not support `--cwd`, but let's use the config value for consistency.
|
||||
let cwd = config.cwd.clone();
|
||||
// For now, we always use the same cwd for both the command and the
|
||||
// sandbox policy. In the future, we could add a CLI option to set them
|
||||
// separately.
|
||||
let sandbox_policy_cwd = cwd.clone();
|
||||
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy);
|
||||
|
||||
let mut child = match sandbox_type {
|
||||
SandboxType::Seatbelt => {
|
||||
spawn_command_under_seatbelt(command, &config.sandbox_policy, cwd, stdio_policy, env)
|
||||
.await?
|
||||
spawn_command_under_seatbelt(
|
||||
command,
|
||||
cwd,
|
||||
&config.sandbox_policy,
|
||||
sandbox_policy_cwd.as_path(),
|
||||
stdio_policy,
|
||||
env,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
SandboxType::Landlock => {
|
||||
#[expect(clippy::expect_used)]
|
||||
@@ -91,8 +106,9 @@ async fn run_command_under_sandbox(
|
||||
spawn_command_under_linux_sandbox(
|
||||
codex_linux_sandbox_exe,
|
||||
command,
|
||||
&config.sandbox_policy,
|
||||
cwd,
|
||||
&config.sandbox_policy,
|
||||
sandbox_policy_cwd.as_path(),
|
||||
stdio_policy,
|
||||
env,
|
||||
)
|
||||
|
||||
@@ -6,6 +6,7 @@ use codex_core::auth::logout;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_login::ServerOptions;
|
||||
use codex_login::run_device_code_login;
|
||||
use codex_login::run_login_server;
|
||||
use codex_protocol::mcp_protocol::AuthMode;
|
||||
use std::path::PathBuf;
|
||||
@@ -55,6 +56,32 @@ pub async fn run_login_with_api_key(
|
||||
}
|
||||
}
|
||||
|
||||
/// Login using the OAuth device code flow.
|
||||
pub async fn run_login_with_device_code(
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
issuer_base_url: Option<String>,
|
||||
client_id: Option<String>,
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
let mut opts = ServerOptions::new(
|
||||
config.codex_home,
|
||||
client_id.unwrap_or(CLIENT_ID.to_string()),
|
||||
);
|
||||
if let Some(iss) = issuer_base_url {
|
||||
opts.issuer = iss;
|
||||
}
|
||||
match run_device_code_login(opts).await {
|
||||
Ok(()) => {
|
||||
eprintln!("Successfully logged in");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error logging in with device code: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides);
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use clap::CommandFactory;
|
||||
use clap::Parser;
|
||||
use clap::error::Error as ClapError;
|
||||
use clap::error::ErrorKind as ClapErrorKind;
|
||||
use clap_complete::Shell;
|
||||
use clap_complete::generate;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
@@ -10,13 +12,23 @@ use codex_cli::SeatbeltCommand;
|
||||
use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::login::run_login_with_device_code;
|
||||
use codex_cli::login::run_logout;
|
||||
use codex_cli::proto;
|
||||
use codex_cloud_tasks::Cli as CloudTasksCli;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_responses_api_proxy::Args as ResponsesApiProxyArgs;
|
||||
use codex_tui::AppExitInfo;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::path::PathBuf;
|
||||
use supports_color::Stream;
|
||||
use uuid::Uuid;
|
||||
|
||||
mod mcp_cmd;
|
||||
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use crate::proto::ProtoCli;
|
||||
|
||||
/// Codex CLI
|
||||
@@ -56,8 +68,14 @@ enum Subcommand {
|
||||
/// Remove stored authentication credentials.
|
||||
Logout(LogoutCommand),
|
||||
|
||||
/// Experimental: run Codex as an MCP server.
|
||||
Mcp,
|
||||
/// [experimental] Run Codex as an MCP server and manage MCP servers.
|
||||
Mcp(McpCli),
|
||||
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server.
|
||||
AppServer,
|
||||
|
||||
/// Run the Protocol stream via stdin/stdout
|
||||
#[clap(visible_alias = "p")]
|
||||
@@ -73,9 +91,19 @@ enum Subcommand {
|
||||
#[clap(visible_alias = "a")]
|
||||
Apply(ApplyCommand),
|
||||
|
||||
/// Resume a previous interactive session (picker by default; use --last to continue the most recent).
|
||||
Resume(ResumeCommand),
|
||||
|
||||
/// Internal: generate TypeScript protocol bindings.
|
||||
#[clap(hide = true)]
|
||||
GenerateTs(GenerateTsCommand),
|
||||
/// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally.
|
||||
#[clap(name = "cloud", alias = "cloud-tasks")]
|
||||
Cloud(CloudTasksCli),
|
||||
|
||||
/// Internal: run the responses API proxy.
|
||||
#[clap(hide = true)]
|
||||
ResponsesApiProxy(ResponsesApiProxyArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
@@ -85,6 +113,21 @@ struct CompletionCommand {
|
||||
shell: Shell,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct ResumeCommand {
|
||||
#[clap(flatten)]
|
||||
config_overrides: TuiCli,
|
||||
|
||||
/// Continue the most recent session without showing the picker.
|
||||
#[arg(long = "last", default_value_t = false)]
|
||||
last: bool,
|
||||
|
||||
/// Conversation/session id (UUID). When provided, resumes this session.
|
||||
/// If omitted, use --last to pick the most recent recorded session.
|
||||
#[arg(value_name = "SESSION_ID", index = 2)]
|
||||
session_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct DebugArgs {
|
||||
#[command(subcommand)]
|
||||
@@ -108,6 +151,20 @@ struct LoginCommand {
|
||||
#[arg(long = "api-key", value_name = "API_KEY")]
|
||||
api_key: Option<String>,
|
||||
|
||||
/// EXPERIMENTAL: Use device code flow (not yet supported)
|
||||
/// This feature is experimental and may changed in future releases.
|
||||
#[arg(long = "experimental_use-device-code", hide = true)]
|
||||
use_device_code: bool,
|
||||
|
||||
/// EXPERIMENTAL: Use custom OAuth issuer base URL (advanced)
|
||||
/// Override the OAuth issuer base URL (advanced)
|
||||
#[arg(long = "experimental_issuer", value_name = "URL", hide = true)]
|
||||
issuer_base_url: Option<String>,
|
||||
|
||||
/// EXPERIMENTAL: Use custom OAuth client ID (advanced)
|
||||
#[arg(long = "experimental_client-id", value_name = "CLIENT_ID", hide = true)]
|
||||
client_id: Option<String>,
|
||||
|
||||
#[command(subcommand)]
|
||||
action: Option<LoginSubcommand>,
|
||||
}
|
||||
@@ -135,6 +192,62 @@ struct GenerateTsCommand {
|
||||
prettier: Option<PathBuf>,
|
||||
}
|
||||
|
||||
fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec<String> {
|
||||
let AppExitInfo {
|
||||
token_usage,
|
||||
conversation_id,
|
||||
} = exit_info;
|
||||
|
||||
if token_usage.is_zero() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut lines = vec![format!(
|
||||
"{}",
|
||||
codex_core::protocol::FinalOutput::from(token_usage)
|
||||
)];
|
||||
|
||||
if let Some(session_id) = conversation_id {
|
||||
let resume_cmd = format!("codex resume {session_id}");
|
||||
let command = if color_enabled {
|
||||
resume_cmd.cyan().to_string()
|
||||
} else {
|
||||
resume_cmd
|
||||
};
|
||||
lines.push(format!("To continue this session, run {command}"));
|
||||
}
|
||||
|
||||
lines
|
||||
}
|
||||
|
||||
fn print_exit_messages(exit_info: AppExitInfo) {
|
||||
let color_enabled = supports_color::on(Stream::Stdout).is_some();
|
||||
for line in format_exit_messages(exit_info, color_enabled) {
|
||||
println!("{line}");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const CODEX_SECURE_MODE_ENV_VAR: &str = "CODEX_SECURE_MODE";
|
||||
|
||||
/// As early as possible in the process lifecycle, apply hardening measures
|
||||
/// if the CODEX_SECURE_MODE environment variable is set to "1".
|
||||
#[ctor::ctor]
|
||||
fn pre_main_hardening() {
|
||||
let secure_mode = match std::env::var(CODEX_SECURE_MODE_ENV_VAR) {
|
||||
Ok(value) => value,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
if secure_mode == "1" {
|
||||
codex_process_hardening::pre_main_hardening();
|
||||
}
|
||||
|
||||
// Always clear this env var so child processes don't inherit it.
|
||||
unsafe {
|
||||
std::env::remove_var(CODEX_SECURE_MODE_ENV_VAR);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
cli_main(codex_linux_sandbox_exe).await?;
|
||||
@@ -143,32 +256,75 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
|
||||
async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()> {
|
||||
let cli = MultitoolCli::parse();
|
||||
let MultitoolCli {
|
||||
config_overrides: root_config_overrides,
|
||||
mut interactive,
|
||||
subcommand,
|
||||
} = MultitoolCli::parse();
|
||||
|
||||
match cli.subcommand {
|
||||
match subcommand {
|
||||
None => {
|
||||
let mut tui_cli = cli.interactive;
|
||||
prepend_config_flags(&mut tui_cli.config_overrides, cli.config_overrides);
|
||||
let usage = codex_tui::run_main(tui_cli, codex_linux_sandbox_exe).await?;
|
||||
if !usage.is_zero() {
|
||||
println!("{}", codex_core::protocol::FinalOutput::from(usage));
|
||||
}
|
||||
prepend_config_flags(
|
||||
&mut interactive.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
let exit_info = codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
print_exit_messages(exit_info);
|
||||
}
|
||||
Some(Subcommand::Exec(mut exec_cli)) => {
|
||||
prepend_config_flags(&mut exec_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut exec_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_exec::run_main(exec_cli, codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Mcp) => {
|
||||
codex_mcp_server::run_main(codex_linux_sandbox_exe, cli.config_overrides).await?;
|
||||
Some(Subcommand::McpServer) => {
|
||||
codex_mcp_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::Mcp(mut mcp_cli)) => {
|
||||
// Propagate any root-level config overrides (e.g. `-c key=value`).
|
||||
prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone());
|
||||
mcp_cli.run().await?;
|
||||
}
|
||||
Some(Subcommand::AppServer) => {
|
||||
codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?;
|
||||
}
|
||||
Some(Subcommand::Resume(mut resume_cmd)) => {
|
||||
if let Err(err) = resume_cmd.normalize() {
|
||||
err.exit();
|
||||
}
|
||||
let ResumeCommand {
|
||||
config_overrides,
|
||||
last,
|
||||
session_id,
|
||||
} = resume_cmd;
|
||||
interactive = finalize_resume_interactive(
|
||||
interactive,
|
||||
root_config_overrides.clone(),
|
||||
session_id,
|
||||
last,
|
||||
config_overrides,
|
||||
);
|
||||
codex_tui::run_main(interactive, codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Login(mut login_cli)) => {
|
||||
prepend_config_flags(&mut login_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut login_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
match login_cli.action {
|
||||
Some(LoginSubcommand::Status) => {
|
||||
run_login_status(login_cli.config_overrides).await;
|
||||
}
|
||||
None => {
|
||||
if let Some(api_key) = login_cli.api_key {
|
||||
if login_cli.use_device_code {
|
||||
run_login_with_device_code(
|
||||
login_cli.config_overrides,
|
||||
login_cli.issuer_base_url,
|
||||
login_cli.client_id,
|
||||
)
|
||||
.await;
|
||||
} else if let Some(api_key) = login_cli.api_key {
|
||||
run_login_with_api_key(login_cli.config_overrides, api_key).await;
|
||||
} else {
|
||||
run_login_with_chatgpt(login_cli.config_overrides).await;
|
||||
@@ -177,19 +333,35 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
}
|
||||
}
|
||||
Some(Subcommand::Logout(mut logout_cli)) => {
|
||||
prepend_config_flags(&mut logout_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut logout_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
run_logout(logout_cli.config_overrides).await;
|
||||
}
|
||||
Some(Subcommand::Proto(mut proto_cli)) => {
|
||||
prepend_config_flags(&mut proto_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut proto_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
proto::run_main(proto_cli).await?;
|
||||
}
|
||||
Some(Subcommand::Completion(completion_cli)) => {
|
||||
print_completion(completion_cli);
|
||||
}
|
||||
Some(Subcommand::Cloud(mut cloud_cli)) => {
|
||||
prepend_config_flags(
|
||||
&mut cloud_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cloud_tasks::run_main(cloud_cli, codex_linux_sandbox_exe).await?;
|
||||
}
|
||||
Some(Subcommand::Debug(debug_args)) => match debug_args.cmd {
|
||||
DebugCommand::Seatbelt(mut seatbelt_cli) => {
|
||||
prepend_config_flags(&mut seatbelt_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut seatbelt_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_seatbelt(
|
||||
seatbelt_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
@@ -197,7 +369,10 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
.await?;
|
||||
}
|
||||
DebugCommand::Landlock(mut landlock_cli) => {
|
||||
prepend_config_flags(&mut landlock_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut landlock_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
codex_cli::debug_sandbox::run_command_under_landlock(
|
||||
landlock_cli,
|
||||
codex_linux_sandbox_exe,
|
||||
@@ -206,9 +381,16 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
}
|
||||
},
|
||||
Some(Subcommand::Apply(mut apply_cli)) => {
|
||||
prepend_config_flags(&mut apply_cli.config_overrides, cli.config_overrides);
|
||||
prepend_config_flags(
|
||||
&mut apply_cli.config_overrides,
|
||||
root_config_overrides.clone(),
|
||||
);
|
||||
run_apply_command(apply_cli, None).await?;
|
||||
}
|
||||
Some(Subcommand::ResponsesApiProxy(args)) => {
|
||||
tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args))
|
||||
.await??;
|
||||
}
|
||||
Some(Subcommand::GenerateTs(gen_cli)) => {
|
||||
codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?;
|
||||
}
|
||||
@@ -228,8 +410,336 @@ fn prepend_config_flags(
|
||||
.splice(0..0, cli_config_overrides.raw_overrides);
|
||||
}
|
||||
|
||||
/// Build the final `TuiCli` for a `codex resume` invocation.
|
||||
fn finalize_resume_interactive(
|
||||
mut interactive: TuiCli,
|
||||
root_config_overrides: CliConfigOverrides,
|
||||
session_id: Option<String>,
|
||||
last: bool,
|
||||
resume_cli: TuiCli,
|
||||
) -> TuiCli {
|
||||
// Start with the parsed interactive CLI so resume shares the same
|
||||
// configuration surface area as `codex` without additional flags.
|
||||
let resume_session_id = session_id;
|
||||
interactive.resume_picker = resume_session_id.is_none() && !last;
|
||||
interactive.resume_last = last;
|
||||
interactive.resume_session_id = resume_session_id;
|
||||
|
||||
// Merge resume-scoped flags and overrides with highest precedence.
|
||||
merge_resume_cli_flags(&mut interactive, resume_cli);
|
||||
|
||||
// Propagate any root-level config overrides (e.g. `-c key=value`).
|
||||
prepend_config_flags(&mut interactive.config_overrides, root_config_overrides);
|
||||
|
||||
interactive
|
||||
}
|
||||
|
||||
/// Merge flags provided to `codex resume` so they take precedence over any
|
||||
/// root-level flags. Only overrides fields explicitly set on the resume-scoped
|
||||
/// CLI. Also appends `-c key=value` overrides with highest precedence.
|
||||
fn merge_resume_cli_flags(interactive: &mut TuiCli, resume_cli: TuiCli) {
|
||||
if let Some(model) = resume_cli.model {
|
||||
interactive.model = Some(model);
|
||||
}
|
||||
if resume_cli.oss {
|
||||
interactive.oss = true;
|
||||
}
|
||||
if let Some(profile) = resume_cli.config_profile {
|
||||
interactive.config_profile = Some(profile);
|
||||
}
|
||||
if let Some(sandbox) = resume_cli.sandbox_mode {
|
||||
interactive.sandbox_mode = Some(sandbox);
|
||||
}
|
||||
if let Some(approval) = resume_cli.approval_policy {
|
||||
interactive.approval_policy = Some(approval);
|
||||
}
|
||||
if resume_cli.full_auto {
|
||||
interactive.full_auto = true;
|
||||
}
|
||||
if resume_cli.dangerously_bypass_approvals_and_sandbox {
|
||||
interactive.dangerously_bypass_approvals_and_sandbox = true;
|
||||
}
|
||||
if let Some(cwd) = resume_cli.cwd {
|
||||
interactive.cwd = Some(cwd);
|
||||
}
|
||||
if resume_cli.web_search {
|
||||
interactive.web_search = true;
|
||||
}
|
||||
if !resume_cli.images.is_empty() {
|
||||
interactive.images = resume_cli.images;
|
||||
}
|
||||
if let Some(prompt) = resume_cli.prompt {
|
||||
interactive.prompt = Some(prompt);
|
||||
}
|
||||
|
||||
interactive
|
||||
.config_overrides
|
||||
.raw_overrides
|
||||
.extend(resume_cli.config_overrides.raw_overrides);
|
||||
}
|
||||
|
||||
fn print_completion(cmd: CompletionCommand) {
|
||||
let mut app = MultitoolCli::command();
|
||||
let name = "codex";
|
||||
generate(cmd.shell, &mut app, name, &mut std::io::stdout());
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
|
||||
fn finalize_from_args(args: &[&str]) -> TuiCli {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
let MultitoolCli {
|
||||
interactive,
|
||||
config_overrides: root_overrides,
|
||||
subcommand,
|
||||
} = cli;
|
||||
|
||||
let mut resume_cmd = match subcommand.expect("resume present") {
|
||||
Subcommand::Resume(cmd) => cmd,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
resume_cmd.normalize().expect("normalize resume args");
|
||||
let ResumeCommand {
|
||||
config_overrides: resume_cli,
|
||||
last,
|
||||
session_id,
|
||||
} = resume_cmd;
|
||||
|
||||
finalize_resume_interactive(interactive, root_overrides, session_id, last, resume_cli)
|
||||
}
|
||||
|
||||
fn sample_exit_info(conversation: Option<&str>) -> AppExitInfo {
|
||||
let token_usage = TokenUsage {
|
||||
output_tokens: 2,
|
||||
total_tokens: 2,
|
||||
..Default::default()
|
||||
};
|
||||
AppExitInfo {
|
||||
token_usage,
|
||||
conversation_id: conversation
|
||||
.map(ConversationId::from_string)
|
||||
.map(Result::unwrap),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_skips_zero_usage() {
|
||||
let exit_info = AppExitInfo {
|
||||
token_usage: TokenUsage::default(),
|
||||
conversation_id: None,
|
||||
};
|
||||
let lines = format_exit_messages(exit_info, false);
|
||||
assert!(lines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_includes_resume_hint_without_color() {
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
|
||||
let lines = format_exit_messages(exit_info, false);
|
||||
assert_eq!(
|
||||
lines,
|
||||
vec![
|
||||
"Token usage: total=2 input=0 output=2".to_string(),
|
||||
"To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000"
|
||||
.to_string(),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_exit_messages_applies_color_when_enabled() {
|
||||
let exit_info = sample_exit_info(Some("123e4567-e89b-12d3-a456-426614174000"));
|
||||
let lines = format_exit_messages(exit_info, true);
|
||||
assert_eq!(lines.len(), 2);
|
||||
assert!(lines[1].contains("\u{1b}[36m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_model_flag_applies_when_no_root_flags() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "-m", "gpt-5-test"].as_ref());
|
||||
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5-test"));
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_picker_logic_none_and_not_last() {
|
||||
let interactive = finalize_from_args(["codex", "resume"].as_ref());
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_picker_logic_last() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "--last"].as_ref());
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_last_accepts_follow_up_prompt() {
|
||||
let interactive = finalize_from_args(["codex", "resume", "--last", "hi there"].as_ref());
|
||||
assert!(interactive.resume_last);
|
||||
assert_eq!(interactive.prompt.as_deref(), Some("hi there"));
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_prompt_before_session_id() {
|
||||
let interactive = finalize_from_args(
|
||||
[
|
||||
"codex",
|
||||
"resume",
|
||||
"summarize progress",
|
||||
"123e4567-e89b-12d3-a456-426614174000",
|
||||
]
|
||||
.as_ref(),
|
||||
);
|
||||
assert_eq!(interactive.prompt.as_deref(), Some("summarize progress"));
|
||||
assert_eq!(
|
||||
interactive.resume_session_id.as_deref(),
|
||||
Some("123e4567-e89b-12d3-a456-426614174000"),
|
||||
);
|
||||
assert!(!interactive.resume_last);
|
||||
assert!(!interactive.resume_picker);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_picker_logic_with_session_id() {
|
||||
let interactive = finalize_from_args(
|
||||
["codex", "resume", "123e4567-e89b-12d3-a456-426614174000"].as_ref(),
|
||||
);
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(
|
||||
interactive.resume_session_id.as_deref(),
|
||||
Some("123e4567-e89b-12d3-a456-426614174000")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_merges_option_flags_and_full_auto() {
|
||||
let interactive = finalize_from_args(
|
||||
[
|
||||
"codex",
|
||||
"resume",
|
||||
"123e4567-e89b-12d3-a456-426614174000",
|
||||
"--oss",
|
||||
"--full-auto",
|
||||
"--search",
|
||||
"--sandbox",
|
||||
"workspace-write",
|
||||
"--ask-for-approval",
|
||||
"on-request",
|
||||
"-m",
|
||||
"gpt-5-test",
|
||||
"-p",
|
||||
"my-profile",
|
||||
"-C",
|
||||
"/tmp",
|
||||
"-i",
|
||||
"/tmp/a.png,/tmp/b.png",
|
||||
]
|
||||
.as_ref(),
|
||||
);
|
||||
|
||||
assert_eq!(interactive.model.as_deref(), Some("gpt-5-test"));
|
||||
assert!(interactive.oss);
|
||||
assert_eq!(interactive.config_profile.as_deref(), Some("my-profile"));
|
||||
assert!(matches!(
|
||||
interactive.sandbox_mode,
|
||||
Some(codex_common::SandboxModeCliArg::WorkspaceWrite)
|
||||
));
|
||||
assert!(matches!(
|
||||
interactive.approval_policy,
|
||||
Some(codex_common::ApprovalModeCliArg::OnRequest)
|
||||
));
|
||||
assert!(interactive.full_auto);
|
||||
assert_eq!(
|
||||
interactive.cwd.as_deref(),
|
||||
Some(std::path::Path::new("/tmp"))
|
||||
);
|
||||
assert!(interactive.web_search);
|
||||
let has_a = interactive
|
||||
.images
|
||||
.iter()
|
||||
.any(|p| p == std::path::Path::new("/tmp/a.png"));
|
||||
let has_b = interactive
|
||||
.images
|
||||
.iter()
|
||||
.any(|p| p == std::path::Path::new("/tmp/b.png"));
|
||||
assert!(has_a && has_b);
|
||||
assert!(!interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(
|
||||
interactive.resume_session_id.as_deref(),
|
||||
Some("123e4567-e89b-12d3-a456-426614174000")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resume_merges_dangerously_bypass_flag() {
|
||||
let interactive = finalize_from_args(
|
||||
[
|
||||
"codex",
|
||||
"resume",
|
||||
"--dangerously-bypass-approvals-and-sandbox",
|
||||
]
|
||||
.as_ref(),
|
||||
);
|
||||
assert!(interactive.dangerously_bypass_approvals_and_sandbox);
|
||||
assert!(interactive.resume_picker);
|
||||
assert!(!interactive.resume_last);
|
||||
assert_eq!(interactive.resume_session_id, None);
|
||||
}
|
||||
}
|
||||
|
||||
impl ResumeCommand {
|
||||
fn normalize(&mut self) -> Result<(), ClapError> {
|
||||
if self.last {
|
||||
if let Some(value) = self.session_id.take() {
|
||||
if Self::looks_like_session_id(&value) {
|
||||
return Err(ClapError::raw(
|
||||
ClapErrorKind::ArgumentConflict,
|
||||
"The argument '--last' cannot be used with '[SESSION_ID]'",
|
||||
));
|
||||
}
|
||||
if let Some(existing) = &mut self.config_overrides.prompt {
|
||||
if !existing.is_empty() {
|
||||
existing.push(' ');
|
||||
}
|
||||
existing.push_str(&value);
|
||||
} else {
|
||||
self.config_overrides.prompt = Some(value);
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if self.session_id.is_some() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(prompt) = self.config_overrides.prompt.take() {
|
||||
if Self::looks_like_session_id(&prompt) {
|
||||
self.session_id = Some(prompt);
|
||||
} else {
|
||||
self.config_overrides.prompt = Some(prompt);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn looks_like_session_id(value: &str) -> bool {
|
||||
Uuid::parse_str(value).is_ok()
|
||||
}
|
||||
}
|
||||
|
||||
453
codex-rs/cli/src/mcp_cmd.rs
Normal file
453
codex-rs/cli/src/mcp_cmd.rs
Normal file
@@ -0,0 +1,453 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::bail;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config::write_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerConfig;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
|
||||
/// [experimental] Launch Codex as an MCP server or manage configured MCP servers.
|
||||
///
|
||||
/// Subcommands:
|
||||
/// - `serve` — run the MCP server on stdio
|
||||
/// - `list` — list configured servers (with `--json`)
|
||||
/// - `get` — show a single server (with `--json`)
|
||||
/// - `add` — add a server launcher entry to `~/.codex/config.toml`
|
||||
/// - `remove` — delete a server entry
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct McpCli {
|
||||
#[clap(flatten)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub subcommand: McpSubcommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum McpSubcommand {
|
||||
/// [experimental] List configured MCP servers.
|
||||
List(ListArgs),
|
||||
|
||||
/// [experimental] Show details for a configured MCP server.
|
||||
Get(GetArgs),
|
||||
|
||||
/// [experimental] Add a global MCP server entry.
|
||||
Add(AddArgs),
|
||||
|
||||
/// [experimental] Remove a global MCP server entry.
|
||||
Remove(RemoveArgs),
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct ListArgs {
|
||||
/// Output the configured servers as JSON.
|
||||
#[arg(long)]
|
||||
pub json: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct GetArgs {
|
||||
/// Name of the MCP server to display.
|
||||
pub name: String,
|
||||
|
||||
/// Output the server configuration as JSON.
|
||||
#[arg(long)]
|
||||
pub json: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct AddArgs {
|
||||
/// Name for the MCP server configuration.
|
||||
pub name: String,
|
||||
|
||||
/// Environment variables to set when launching the server.
|
||||
#[arg(long, value_parser = parse_env_pair, value_name = "KEY=VALUE")]
|
||||
pub env: Vec<(String, String)>,
|
||||
|
||||
/// Command to launch the MCP server.
|
||||
#[arg(trailing_var_arg = true, num_args = 1..)]
|
||||
pub command: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Parser)]
|
||||
pub struct RemoveArgs {
|
||||
/// Name of the MCP server configuration to remove.
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl McpCli {
|
||||
pub async fn run(self) -> Result<()> {
|
||||
let McpCli {
|
||||
config_overrides,
|
||||
subcommand,
|
||||
} = self;
|
||||
|
||||
match subcommand {
|
||||
McpSubcommand::List(args) => {
|
||||
run_list(&config_overrides, args)?;
|
||||
}
|
||||
McpSubcommand::Get(args) => {
|
||||
run_get(&config_overrides, args)?;
|
||||
}
|
||||
McpSubcommand::Add(args) => {
|
||||
run_add(&config_overrides, args)?;
|
||||
}
|
||||
McpSubcommand::Remove(args) => {
|
||||
run_remove(&config_overrides, args)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<()> {
|
||||
// Validate any provided overrides even though they are not currently applied.
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
|
||||
let AddArgs { name, env, command } = add_args;
|
||||
|
||||
validate_server_name(&name)?;
|
||||
|
||||
let mut command_parts = command.into_iter();
|
||||
let command_bin = command_parts
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("command is required"))?;
|
||||
let command_args: Vec<String> = command_parts.collect();
|
||||
|
||||
let env_map = if env.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let mut map = HashMap::new();
|
||||
for (key, value) in env {
|
||||
map.insert(key, value);
|
||||
}
|
||||
Some(map)
|
||||
};
|
||||
|
||||
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
|
||||
let mut servers = load_global_mcp_servers(&codex_home)
|
||||
.with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?;
|
||||
|
||||
let new_entry = McpServerConfig {
|
||||
transport: McpServerTransportConfig::Stdio {
|
||||
command: command_bin,
|
||||
args: command_args,
|
||||
env: env_map,
|
||||
},
|
||||
startup_timeout_sec: None,
|
||||
tool_timeout_sec: None,
|
||||
};
|
||||
|
||||
servers.insert(name.clone(), new_entry);
|
||||
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
|
||||
println!("Added global MCP server '{name}'.");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_remove(config_overrides: &CliConfigOverrides, remove_args: RemoveArgs) -> Result<()> {
|
||||
config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
|
||||
let RemoveArgs { name } = remove_args;
|
||||
|
||||
validate_server_name(&name)?;
|
||||
|
||||
let codex_home = find_codex_home().context("failed to resolve CODEX_HOME")?;
|
||||
let mut servers = load_global_mcp_servers(&codex_home)
|
||||
.with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?;
|
||||
|
||||
let removed = servers.remove(&name).is_some();
|
||||
|
||||
if removed {
|
||||
write_global_mcp_servers(&codex_home, &servers)
|
||||
.with_context(|| format!("failed to write MCP servers to {}", codex_home.display()))?;
|
||||
}
|
||||
|
||||
if removed {
|
||||
println!("Removed global MCP server '{name}'.");
|
||||
} else {
|
||||
println!("No MCP server named '{name}' found.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
let mut entries: Vec<_> = config.mcp_servers.iter().collect();
|
||||
entries.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
|
||||
if list_args.json {
|
||||
let json_entries: Vec<_> = entries
|
||||
.into_iter()
|
||||
.map(|(name, cfg)| {
|
||||
let transport = match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
serde_json::json!({
|
||||
"name": name,
|
||||
"transport": transport,
|
||||
"startup_timeout_sec": cfg
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
"tool_timeout_sec": cfg
|
||||
.tool_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
let output = serde_json::to_string_pretty(&json_entries)?;
|
||||
println!("{output}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if entries.is_empty() {
|
||||
println!("No MCP servers configured yet. Try `codex mcp add my-tool -- my-command`.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut stdio_rows: Vec<[String; 4]> = Vec::new();
|
||||
let mut http_rows: Vec<[String; 3]> = Vec::new();
|
||||
|
||||
for (name, cfg) in entries {
|
||||
match &cfg.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
stdio_rows.push([name.clone(), command.clone(), args_display, env_display]);
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
let has_bearer = if bearer_token.is_some() {
|
||||
"True"
|
||||
} else {
|
||||
"False"
|
||||
};
|
||||
http_rows.push([name.clone(), url.clone(), has_bearer.into()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()];
|
||||
for row in &stdio_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
"Name",
|
||||
"Command",
|
||||
"Args",
|
||||
"Env",
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
|
||||
for row in &stdio_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<cmd_w$} {:<args_w$} {:<env_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
row[3],
|
||||
name_w = widths[0],
|
||||
cmd_w = widths[1],
|
||||
args_w = widths[2],
|
||||
env_w = widths[3],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if !stdio_rows.is_empty() && !http_rows.is_empty() {
|
||||
println!();
|
||||
}
|
||||
|
||||
if !http_rows.is_empty() {
|
||||
let mut widths = ["Name".len(), "Url".len(), "Has Bearer Token".len()];
|
||||
for row in &http_rows {
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
widths[i] = widths[i].max(cell.len());
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
"Name",
|
||||
"Url",
|
||||
"Has Bearer Token",
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
);
|
||||
|
||||
for row in &http_rows {
|
||||
println!(
|
||||
"{:<name_w$} {:<url_w$} {:<token_w$}",
|
||||
row[0],
|
||||
row[1],
|
||||
row[2],
|
||||
name_w = widths[0],
|
||||
url_w = widths[1],
|
||||
token_w = widths[2],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<()> {
|
||||
let overrides = config_overrides.parse_overrides().map_err(|e| anyhow!(e))?;
|
||||
let config = Config::load_with_cli_overrides(overrides, ConfigOverrides::default())
|
||||
.context("failed to load configuration")?;
|
||||
|
||||
let Some(server) = config.mcp_servers.get(&get_args.name) else {
|
||||
bail!("No MCP server named '{name}' found.", name = get_args.name);
|
||||
};
|
||||
|
||||
if get_args.json {
|
||||
let transport = match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"env": env,
|
||||
}),
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => serde_json::json!({
|
||||
"type": "streamable_http",
|
||||
"url": url,
|
||||
"bearer_token": bearer_token,
|
||||
}),
|
||||
};
|
||||
let output = serde_json::to_string_pretty(&serde_json::json!({
|
||||
"name": get_args.name,
|
||||
"transport": transport,
|
||||
"startup_timeout_sec": server
|
||||
.startup_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
"tool_timeout_sec": server
|
||||
.tool_timeout_sec
|
||||
.map(|timeout| timeout.as_secs_f64()),
|
||||
}))?;
|
||||
println!("{output}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", get_args.name);
|
||||
match &server.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
println!(" transport: stdio");
|
||||
println!(" command: {command}");
|
||||
let args_display = if args.is_empty() {
|
||||
"-".to_string()
|
||||
} else {
|
||||
args.join(" ")
|
||||
};
|
||||
println!(" args: {args_display}");
|
||||
let env_display = match env.as_ref() {
|
||||
None => "-".to_string(),
|
||||
Some(map) if map.is_empty() => "-".to_string(),
|
||||
Some(map) => {
|
||||
let mut pairs: Vec<_> = map.iter().collect();
|
||||
pairs.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
pairs
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}
|
||||
};
|
||||
println!(" env: {env_display}");
|
||||
}
|
||||
McpServerTransportConfig::StreamableHttp { url, bearer_token } => {
|
||||
println!(" transport: streamable_http");
|
||||
println!(" url: {url}");
|
||||
let bearer = bearer_token.as_deref().unwrap_or("-");
|
||||
println!(" bearer_token: {bearer}");
|
||||
}
|
||||
}
|
||||
if let Some(timeout) = server.startup_timeout_sec {
|
||||
println!(" startup_timeout_sec: {}", timeout.as_secs_f64());
|
||||
}
|
||||
if let Some(timeout) = server.tool_timeout_sec {
|
||||
println!(" tool_timeout_sec: {}", timeout.as_secs_f64());
|
||||
}
|
||||
println!(" remove: codex mcp remove {}", get_args.name);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_env_pair(raw: &str) -> Result<(String, String), String> {
|
||||
let mut parts = raw.splitn(2, '=');
|
||||
let key = parts
|
||||
.next()
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.ok_or_else(|| "environment entries must be in KEY=VALUE form".to_string())?;
|
||||
let value = parts
|
||||
.next()
|
||||
.map(str::to_string)
|
||||
.ok_or_else(|| "environment entries must be in KEY=VALUE form".to_string())?;
|
||||
|
||||
Ok((key.to_string(), value))
|
||||
}
|
||||
|
||||
fn validate_server_name(name: &str) -> Result<()> {
|
||||
let is_valid = !name.is_empty()
|
||||
&& name
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_');
|
||||
|
||||
if is_valid {
|
||||
Ok(())
|
||||
} else {
|
||||
bail!("invalid server name '{name}' (use letters, numbers, '-', '_')");
|
||||
}
|
||||
}
|
||||
95
codex-rs/cli/tests/mcp_add_remove.rs
Normal file
95
codex-rs/cli/tests/mcp_add_remove.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::config::load_global_mcp_servers;
|
||||
use codex_core::config_types::McpServerTransportConfig;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
let mut cmd = assert_cmd::Command::cargo_bin("codex")?;
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_and_remove_server_updates_global_config() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args(["mcp", "add", "docs", "--", "echo", "hello"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Added global MCP server 'docs'."));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
assert_eq!(servers.len(), 1);
|
||||
let docs = servers.get("docs").expect("server should exist");
|
||||
match &docs.transport {
|
||||
McpServerTransportConfig::Stdio { command, args, env } => {
|
||||
assert_eq!(command, "echo");
|
||||
assert_eq!(args, &vec!["hello".to_string()]);
|
||||
assert!(env.is_none());
|
||||
}
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
}
|
||||
|
||||
let mut remove_cmd = codex_command(codex_home.path())?;
|
||||
remove_cmd
|
||||
.args(["mcp", "remove", "docs"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Removed global MCP server 'docs'."));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
let mut remove_again_cmd = codex_command(codex_home.path())?;
|
||||
remove_again_cmd
|
||||
.args(["mcp", "remove", "docs"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("No MCP server named 'docs' found."));
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
assert!(servers.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_with_env_preserves_key_order_and_values() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add_cmd = codex_command(codex_home.path())?;
|
||||
add_cmd
|
||||
.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"envy",
|
||||
"--env",
|
||||
"FOO=bar",
|
||||
"--env",
|
||||
"ALPHA=beta",
|
||||
"--",
|
||||
"python",
|
||||
"server.py",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let servers = load_global_mcp_servers(codex_home.path())?;
|
||||
let envy = servers.get("envy").expect("server should exist");
|
||||
let env = match &envy.transport {
|
||||
McpServerTransportConfig::Stdio { env: Some(env), .. } => env,
|
||||
other => panic!("unexpected transport: {other:?}"),
|
||||
};
|
||||
|
||||
assert_eq!(env.len(), 2);
|
||||
assert_eq!(env.get("FOO"), Some(&"bar".to_string()));
|
||||
assert_eq!(env.get("ALPHA"), Some(&"beta".to_string()));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
104
codex-rs/cli/tests/mcp_list.rs
Normal file
104
codex-rs/cli/tests/mcp_list.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use predicates::str::contains;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value as JsonValue;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn codex_command(codex_home: &Path) -> Result<assert_cmd::Command> {
|
||||
let mut cmd = assert_cmd::Command::cargo_bin("codex")?;
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_shows_empty_state() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut cmd = codex_command(codex_home.path())?;
|
||||
let output = cmd.args(["mcp", "list"]).output()?;
|
||||
assert!(output.status.success());
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
assert!(stdout.contains("No MCP servers configured yet."));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_and_get_render_expected_output() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let mut add = codex_command(codex_home.path())?;
|
||||
add.args([
|
||||
"mcp",
|
||||
"add",
|
||||
"docs",
|
||||
"--env",
|
||||
"TOKEN=secret",
|
||||
"--",
|
||||
"docs-server",
|
||||
"--port",
|
||||
"4000",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let mut list_cmd = codex_command(codex_home.path())?;
|
||||
let list_output = list_cmd.args(["mcp", "list"]).output()?;
|
||||
assert!(list_output.status.success());
|
||||
let stdout = String::from_utf8(list_output.stdout)?;
|
||||
assert!(stdout.contains("Name"));
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("docs-server"));
|
||||
assert!(stdout.contains("TOKEN=secret"));
|
||||
|
||||
let mut list_json_cmd = codex_command(codex_home.path())?;
|
||||
let json_output = list_json_cmd.args(["mcp", "list", "--json"]).output()?;
|
||||
assert!(json_output.status.success());
|
||||
let stdout = String::from_utf8(json_output.stdout)?;
|
||||
let parsed: JsonValue = serde_json::from_str(&stdout)?;
|
||||
assert_eq!(
|
||||
parsed,
|
||||
json!([
|
||||
{
|
||||
"name": "docs",
|
||||
"transport": {
|
||||
"type": "stdio",
|
||||
"command": "docs-server",
|
||||
"args": [
|
||||
"--port",
|
||||
"4000"
|
||||
],
|
||||
"env": {
|
||||
"TOKEN": "secret"
|
||||
}
|
||||
},
|
||||
"startup_timeout_sec": null,
|
||||
"tool_timeout_sec": null
|
||||
}
|
||||
]
|
||||
)
|
||||
);
|
||||
|
||||
let mut get_cmd = codex_command(codex_home.path())?;
|
||||
let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?;
|
||||
assert!(get_output.status.success());
|
||||
let stdout = String::from_utf8(get_output.stdout)?;
|
||||
assert!(stdout.contains("docs"));
|
||||
assert!(stdout.contains("transport: stdio"));
|
||||
assert!(stdout.contains("command: docs-server"));
|
||||
assert!(stdout.contains("args: --port 4000"));
|
||||
assert!(stdout.contains("env: TOKEN=secret"));
|
||||
assert!(stdout.contains("remove: codex mcp remove docs"));
|
||||
|
||||
let mut get_json_cmd = codex_command(codex_home.path())?;
|
||||
get_json_cmd
|
||||
.args(["mcp", "get", "docs", "--json"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"docs\""));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
27
codex-rs/cloud-tasks-client/Cargo.toml
Normal file
27
codex-rs/cloud-tasks-client/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "codex-cloud-tasks-client"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_cloud_tasks_client"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
default = ["online"]
|
||||
online = ["dep:codex-backend-client"]
|
||||
mock = []
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
async-trait = "0.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
diffy = "0.4.2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
thiserror = "2.0.12"
|
||||
codex-backend-client = { path = "../backend-client", optional = true }
|
||||
codex-git-apply = { path = "../git-apply" }
|
||||
158
codex-rs/cloud-tasks-client/src/api.rs
Normal file
158
codex-rs/cloud-tasks-client/src/api.rs
Normal file
@@ -0,0 +1,158 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, CloudTaskError>;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum CloudTaskError {
|
||||
#[error("unimplemented: {0}")]
|
||||
Unimplemented(&'static str),
|
||||
#[error("http error: {0}")]
|
||||
Http(String),
|
||||
#[error("io error: {0}")]
|
||||
Io(String),
|
||||
#[error("{0}")]
|
||||
Msg(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct TaskId(pub String);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum TaskStatus {
|
||||
Pending,
|
||||
Ready,
|
||||
Applied,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct TaskSummary {
|
||||
pub id: TaskId,
|
||||
pub title: String,
|
||||
pub status: TaskStatus,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
/// Backend environment identifier (when available)
|
||||
pub environment_id: Option<String>,
|
||||
/// Human-friendly environment label (when available)
|
||||
pub environment_label: Option<String>,
|
||||
pub summary: DiffSummary,
|
||||
/// True when the backend reports this task as a code review.
|
||||
#[serde(default)]
|
||||
pub is_review: bool,
|
||||
/// Number of assistant attempts (best-of-N), when reported by the backend.
|
||||
#[serde(default)]
|
||||
pub attempt_total: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
|
||||
pub enum AttemptStatus {
|
||||
Pending,
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed,
|
||||
Cancelled,
|
||||
#[default]
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct TurnAttempt {
|
||||
pub turn_id: String,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub created_at: Option<DateTime<Utc>>,
|
||||
pub status: AttemptStatus,
|
||||
pub diff: Option<String>,
|
||||
pub messages: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ApplyStatus {
|
||||
Success,
|
||||
Partial,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ApplyOutcome {
|
||||
pub applied: bool,
|
||||
pub status: ApplyStatus,
|
||||
pub message: String,
|
||||
#[serde(default)]
|
||||
pub skipped_paths: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub conflict_paths: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct CreatedTask {
|
||||
pub id: TaskId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct DiffSummary {
|
||||
pub files_changed: usize,
|
||||
pub lines_added: usize,
|
||||
pub lines_removed: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct TaskText {
|
||||
pub prompt: Option<String>,
|
||||
pub messages: Vec<String>,
|
||||
pub turn_id: Option<String>,
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub attempt_status: AttemptStatus,
|
||||
}
|
||||
|
||||
impl Default for TaskText {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
prompt: None,
|
||||
messages: Vec::new(),
|
||||
turn_id: None,
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: None,
|
||||
attempt_status: AttemptStatus::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait CloudBackend: Send + Sync {
|
||||
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>>;
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>>;
|
||||
/// Return assistant output messages (no diff) when available.
|
||||
async fn get_task_messages(&self, id: TaskId) -> Result<Vec<String>>;
|
||||
/// Return the creating prompt and assistant messages (when available).
|
||||
async fn get_task_text(&self, id: TaskId) -> Result<TaskText>;
|
||||
/// Return any sibling attempts (best-of-N) for the given assistant turn.
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>>;
|
||||
/// Dry-run apply (preflight) that validates whether the patch would apply cleanly.
|
||||
/// Never modifies the working tree. When `diff_override` is supplied, the provided diff is
|
||||
/// used instead of re-fetching the task details so callers can apply alternate attempts.
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome>;
|
||||
async fn apply_task(&self, id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome>;
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<CreatedTask>;
|
||||
}
|
||||
769
codex-rs/cloud-tasks-client/src/http.rs
Normal file
769
codex-rs/cloud-tasks-client/src/http.rs
Normal file
@@ -0,0 +1,769 @@
|
||||
use crate::ApplyOutcome;
|
||||
use crate::ApplyStatus;
|
||||
use crate::AttemptStatus;
|
||||
use crate::CloudBackend;
|
||||
use crate::CloudTaskError;
|
||||
use crate::DiffSummary;
|
||||
use crate::Result;
|
||||
use crate::TaskId;
|
||||
use crate::TaskStatus;
|
||||
use crate::TaskSummary;
|
||||
use crate::TurnAttempt;
|
||||
use crate::api::TaskText;
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
|
||||
use codex_backend_client as backend;
|
||||
use codex_backend_client::CodeTaskDetailsResponseExt;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HttpClient {
|
||||
pub base_url: String,
|
||||
backend: backend::Client,
|
||||
}
|
||||
|
||||
impl HttpClient {
|
||||
pub fn new(base_url: impl Into<String>) -> anyhow::Result<Self> {
|
||||
let base_url = base_url.into();
|
||||
let backend = backend::Client::new(base_url.clone())?;
|
||||
Ok(Self { base_url, backend })
|
||||
}
|
||||
|
||||
pub fn with_bearer_token(mut self, token: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_bearer_token(token);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_user_agent(mut self, ua: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_user_agent(ua);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_chatgpt_account_id(mut self, account_id: impl Into<String>) -> Self {
|
||||
self.backend = self.backend.clone().with_chatgpt_account_id(account_id);
|
||||
self
|
||||
}
|
||||
|
||||
fn tasks_api(&self) -> api::Tasks<'_> {
|
||||
api::Tasks::new(self)
|
||||
}
|
||||
|
||||
fn attempts_api(&self) -> api::Attempts<'_> {
|
||||
api::Attempts::new(self)
|
||||
}
|
||||
|
||||
fn apply_api(&self) -> api::Apply<'_> {
|
||||
api::Apply::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CloudBackend for HttpClient {
|
||||
async fn list_tasks(&self, env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
self.tasks_api().list(env).await
|
||||
}
|
||||
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>> {
|
||||
self.tasks_api().diff(id).await
|
||||
}
|
||||
|
||||
async fn get_task_messages(&self, id: TaskId) -> Result<Vec<String>> {
|
||||
self.tasks_api().messages(id).await
|
||||
}
|
||||
|
||||
async fn get_task_text(&self, id: TaskId) -> Result<TaskText> {
|
||||
self.tasks_api().task_text(id).await
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>> {
|
||||
self.attempts_api().list(task, turn_id).await
|
||||
}
|
||||
|
||||
async fn apply_task(&self, id: TaskId, diff_override: Option<String>) -> Result<ApplyOutcome> {
|
||||
self.apply_api().run(id, diff_override, false).await
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome> {
|
||||
self.apply_api().run(id, diff_override, true).await
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<crate::CreatedTask> {
|
||||
self.tasks_api()
|
||||
.create(env_id, prompt, git_ref, qa_mode, best_of_n)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
mod api {
|
||||
use super::*;
|
||||
use serde_json::Value;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub(crate) struct Tasks<'a> {
|
||||
base_url: &'a str,
|
||||
backend: &'a backend::Client,
|
||||
}
|
||||
|
||||
impl<'a> Tasks<'a> {
|
||||
pub(crate) fn new(client: &'a HttpClient) -> Self {
|
||||
Self {
|
||||
base_url: &client.base_url,
|
||||
backend: &client.backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn list(&self, env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
let resp = self
|
||||
.backend
|
||||
.list_tasks(Some(20), Some("current"), env)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("list_tasks failed: {e}")))?;
|
||||
|
||||
let tasks: Vec<TaskSummary> = resp
|
||||
.items
|
||||
.into_iter()
|
||||
.map(map_task_list_item_to_summary)
|
||||
.collect();
|
||||
|
||||
append_error_log(&format!(
|
||||
"http.list_tasks: env={} items={}",
|
||||
env.unwrap_or("<all>"),
|
||||
tasks.len()
|
||||
));
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
pub(crate) async fn diff(&self, id: TaskId) -> Result<Option<String>> {
|
||||
let (details, body, ct) = self
|
||||
.details_with_body(&id.0)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
if let Some(diff) = details.unified_diff() {
|
||||
return Ok(Some(diff));
|
||||
}
|
||||
let _ = (body, ct);
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub(crate) async fn messages(&self, id: TaskId) -> Result<Vec<String>> {
|
||||
let (details, body, ct) = self
|
||||
.details_with_body(&id.0)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
|
||||
let mut msgs = details.assistant_text_messages();
|
||||
if msgs.is_empty() {
|
||||
msgs.extend(extract_assistant_messages_from_body(&body));
|
||||
}
|
||||
if !msgs.is_empty() {
|
||||
return Ok(msgs);
|
||||
}
|
||||
if let Some(err) = details.assistant_error_message() {
|
||||
return Ok(vec![format!("Task failed: {err}")]);
|
||||
}
|
||||
|
||||
let url = match details_path(self.base_url, &id.0) {
|
||||
Some(url) => url,
|
||||
None => format!("{}/api/codex/tasks/{}", self.base_url, id.0),
|
||||
};
|
||||
Err(CloudTaskError::Http(format!(
|
||||
"No assistant text messages in response. GET {url}; content-type={ct}; body={body}"
|
||||
)))
|
||||
}
|
||||
|
||||
pub(crate) async fn task_text(&self, id: TaskId) -> Result<TaskText> {
|
||||
let (details, body, _ct) = self
|
||||
.details_with_body(&id.0)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?;
|
||||
let prompt = details.user_text_prompt();
|
||||
let mut messages = details.assistant_text_messages();
|
||||
if messages.is_empty() {
|
||||
messages.extend(extract_assistant_messages_from_body(&body));
|
||||
}
|
||||
let assistant_turn = details.current_assistant_turn.as_ref();
|
||||
let turn_id = assistant_turn.and_then(|turn| turn.id.clone());
|
||||
let sibling_turn_ids = assistant_turn
|
||||
.map(|turn| turn.sibling_turn_ids.clone())
|
||||
.unwrap_or_default();
|
||||
let attempt_placement = assistant_turn.and_then(|turn| turn.attempt_placement);
|
||||
let attempt_status = attempt_status_from_str(
|
||||
assistant_turn.and_then(|turn| turn.turn_status.as_deref()),
|
||||
);
|
||||
Ok(TaskText {
|
||||
prompt,
|
||||
messages,
|
||||
turn_id,
|
||||
sibling_turn_ids,
|
||||
attempt_placement,
|
||||
attempt_status,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn create(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<crate::CreatedTask> {
|
||||
let mut input_items: Vec<serde_json::Value> = Vec::new();
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "message",
|
||||
"role": "user",
|
||||
"content": [{ "content_type": "text", "text": prompt }]
|
||||
}));
|
||||
|
||||
if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF")
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
input_items.push(serde_json::json!({
|
||||
"type": "pre_apply_patch",
|
||||
"output_diff": { "diff": diff }
|
||||
}));
|
||||
}
|
||||
|
||||
let mut request_body = serde_json::json!({
|
||||
"new_task": {
|
||||
"environment_id": env_id,
|
||||
"branch": git_ref,
|
||||
"run_environment_in_qa_mode": qa_mode,
|
||||
},
|
||||
"input_items": input_items,
|
||||
});
|
||||
|
||||
if best_of_n > 1
|
||||
&& let Some(obj) = request_body.as_object_mut()
|
||||
{
|
||||
obj.insert(
|
||||
"metadata".to_string(),
|
||||
serde_json::json!({ "best_of_n": best_of_n }),
|
||||
);
|
||||
}
|
||||
|
||||
match self.backend.create_task(request_body).await {
|
||||
Ok(id) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: created id={id} env={} prompt_chars={}",
|
||||
env_id,
|
||||
prompt.chars().count()
|
||||
));
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
Err(e) => {
|
||||
append_error_log(&format!(
|
||||
"new_task: create failed env={} prompt_chars={}: {}",
|
||||
env_id,
|
||||
prompt.chars().count(),
|
||||
e
|
||||
));
|
||||
Err(CloudTaskError::Http(format!("create_task failed: {e}")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn details_with_body(
|
||||
&self,
|
||||
id: &str,
|
||||
) -> anyhow::Result<(backend::CodeTaskDetailsResponse, String, String)> {
|
||||
let (parsed, body, ct) = self.backend.get_task_details_with_body(id).await?;
|
||||
Ok((parsed, body, ct))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Attempts<'a> {
|
||||
backend: &'a backend::Client,
|
||||
}
|
||||
|
||||
impl<'a> Attempts<'a> {
|
||||
pub(crate) fn new(client: &'a HttpClient) -> Self {
|
||||
Self {
|
||||
backend: &client.backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn list(&self, task: TaskId, turn_id: String) -> Result<Vec<TurnAttempt>> {
|
||||
let resp = self
|
||||
.backend
|
||||
.list_sibling_turns(&task.0, &turn_id)
|
||||
.await
|
||||
.map_err(|e| CloudTaskError::Http(format!("list_sibling_turns failed: {e}")))?;
|
||||
|
||||
let mut attempts: Vec<TurnAttempt> = resp
|
||||
.sibling_turns
|
||||
.iter()
|
||||
.filter_map(turn_attempt_from_map)
|
||||
.collect();
|
||||
attempts.sort_by(compare_attempts);
|
||||
Ok(attempts)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Apply<'a> {
|
||||
backend: &'a backend::Client,
|
||||
}
|
||||
|
||||
impl<'a> Apply<'a> {
|
||||
pub(crate) fn new(client: &'a HttpClient) -> Self {
|
||||
Self {
|
||||
backend: &client.backend,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn run(
|
||||
&self,
|
||||
task_id: TaskId,
|
||||
diff_override: Option<String>,
|
||||
preflight: bool,
|
||||
) -> Result<ApplyOutcome> {
|
||||
let id = task_id.0.clone();
|
||||
let diff = match diff_override {
|
||||
Some(diff) => diff,
|
||||
None => {
|
||||
let details = self.backend.get_task_details(&id).await.map_err(|e| {
|
||||
CloudTaskError::Http(format!("get_task_details failed: {e}"))
|
||||
})?;
|
||||
details.unified_diff().ok_or_else(|| {
|
||||
CloudTaskError::Msg(format!("No diff available for task {id}"))
|
||||
})?
|
||||
}
|
||||
};
|
||||
|
||||
if !is_unified_diff(&diff) {
|
||||
let summary = summarize_patch_for_logging(&diff);
|
||||
let mode = if preflight { "preflight" } else { "apply" };
|
||||
append_error_log(&format!(
|
||||
"apply_error: id={id} mode={mode} format=non-unified; {summary}"
|
||||
));
|
||||
return Ok(ApplyOutcome {
|
||||
applied: false,
|
||||
status: ApplyStatus::Error,
|
||||
message: "Expected unified git diff; backend returned an incompatible format."
|
||||
.to_string(),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
let req = codex_git_apply::ApplyGitRequest {
|
||||
cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()),
|
||||
diff: diff.clone(),
|
||||
revert: false,
|
||||
preflight,
|
||||
};
|
||||
let r = codex_git_apply::apply_git_patch(&req)
|
||||
.map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?;
|
||||
|
||||
let status = if r.exit_code == 0 {
|
||||
ApplyStatus::Success
|
||||
} else if !r.applied_paths.is_empty() || !r.conflicted_paths.is_empty() {
|
||||
ApplyStatus::Partial
|
||||
} else {
|
||||
ApplyStatus::Error
|
||||
};
|
||||
let applied = matches!(status, ApplyStatus::Success) && !preflight;
|
||||
|
||||
let message = if preflight {
|
||||
match status {
|
||||
ApplyStatus::Success => {
|
||||
format!("Preflight passed for task {id} (applies cleanly)")
|
||||
}
|
||||
ApplyStatus::Partial => format!(
|
||||
"Preflight: patch does not fully apply for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
ApplyStatus::Error => format!(
|
||||
"Preflight failed for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
}
|
||||
} else {
|
||||
match status {
|
||||
ApplyStatus::Success => format!(
|
||||
"Applied task {id} locally ({} files)",
|
||||
r.applied_paths.len()
|
||||
),
|
||||
ApplyStatus::Partial => format!(
|
||||
"Apply partially succeeded for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
ApplyStatus::Error => format!(
|
||||
"Apply failed for task {id} (applied={}, skipped={}, conflicts={})",
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len()
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
if matches!(status, ApplyStatus::Partial | ApplyStatus::Error)
|
||||
|| (preflight && !matches!(status, ApplyStatus::Success))
|
||||
{
|
||||
let mut log = String::new();
|
||||
let summary = summarize_patch_for_logging(&diff);
|
||||
let mode = if preflight { "preflight" } else { "apply" };
|
||||
use std::fmt::Write as _;
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"apply_result: mode={} id={} status={:?} applied={} skipped={} conflicts={} cmd={}",
|
||||
mode,
|
||||
id,
|
||||
status,
|
||||
r.applied_paths.len(),
|
||||
r.skipped_paths.len(),
|
||||
r.conflicted_paths.len(),
|
||||
r.cmd_for_log
|
||||
);
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"stdout_tail=\n{}\nstderr_tail=\n{}",
|
||||
tail(&r.stdout, 2000),
|
||||
tail(&r.stderr, 2000)
|
||||
);
|
||||
let _ = writeln!(&mut log, "{summary}");
|
||||
let _ = writeln!(
|
||||
&mut log,
|
||||
"----- PATCH BEGIN -----\n{diff}\n----- PATCH END -----"
|
||||
);
|
||||
append_error_log(&log);
|
||||
}
|
||||
|
||||
Ok(ApplyOutcome {
|
||||
applied,
|
||||
status,
|
||||
message,
|
||||
skipped_paths: r.skipped_paths,
|
||||
conflict_paths: r.conflicted_paths,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn details_path(base_url: &str, id: &str) -> Option<String> {
|
||||
if base_url.contains("/backend-api") {
|
||||
Some(format!("{base_url}/wham/tasks/{id}"))
|
||||
} else if base_url.contains("/api/codex") {
|
||||
Some(format!("{base_url}/tasks/{id}"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_assistant_messages_from_body(body: &str) -> Vec<String> {
|
||||
let mut msgs = Vec::new();
|
||||
if let Ok(full) = serde_json::from_str::<serde_json::Value>(body)
|
||||
&& let Some(arr) = full
|
||||
.get("current_assistant_turn")
|
||||
.and_then(|v| v.get("worklog"))
|
||||
.and_then(|v| v.get("messages"))
|
||||
.and_then(|v| v.as_array())
|
||||
{
|
||||
for m in arr {
|
||||
let is_assistant = m
|
||||
.get("author")
|
||||
.and_then(|a| a.get("role"))
|
||||
.and_then(|r| r.as_str())
|
||||
== Some("assistant");
|
||||
if !is_assistant {
|
||||
continue;
|
||||
}
|
||||
if let Some(parts) = m
|
||||
.get("content")
|
||||
.and_then(|c| c.get("parts"))
|
||||
.and_then(|p| p.as_array())
|
||||
{
|
||||
for p in parts {
|
||||
if let Some(s) = p.as_str() {
|
||||
if !s.is_empty() {
|
||||
msgs.push(s.to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(obj) = p.as_object()
|
||||
&& obj.get("content_type").and_then(|t| t.as_str()) == Some("text")
|
||||
&& let Some(txt) = obj.get("text").and_then(|t| t.as_str())
|
||||
{
|
||||
msgs.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
msgs
|
||||
}
|
||||
|
||||
fn turn_attempt_from_map(turn: &HashMap<String, Value>) -> Option<TurnAttempt> {
|
||||
let turn_id = turn.get("id").and_then(Value::as_str)?.to_string();
|
||||
let attempt_placement = turn.get("attempt_placement").and_then(Value::as_i64);
|
||||
let created_at = parse_timestamp_value(turn.get("created_at"));
|
||||
let status = attempt_status_from_str(turn.get("turn_status").and_then(Value::as_str));
|
||||
let diff = extract_diff_from_turn(turn);
|
||||
let messages = extract_assistant_messages_from_turn(turn);
|
||||
Some(TurnAttempt {
|
||||
turn_id,
|
||||
attempt_placement,
|
||||
created_at,
|
||||
status,
|
||||
diff,
|
||||
messages,
|
||||
})
|
||||
}
|
||||
|
||||
fn compare_attempts(a: &TurnAttempt, b: &TurnAttempt) -> Ordering {
|
||||
match (a.attempt_placement, b.attempt_placement) {
|
||||
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
|
||||
(Some(_), None) => Ordering::Less,
|
||||
(None, Some(_)) => Ordering::Greater,
|
||||
(None, None) => match (a.created_at, b.created_at) {
|
||||
(Some(lhs), Some(rhs)) => lhs.cmp(&rhs),
|
||||
(Some(_), None) => Ordering::Less,
|
||||
(None, Some(_)) => Ordering::Greater,
|
||||
(None, None) => a.turn_id.cmp(&b.turn_id),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_diff_from_turn(turn: &HashMap<String, Value>) -> Option<String> {
|
||||
let items = turn.get("output_items").and_then(Value::as_array)?;
|
||||
for item in items {
|
||||
match item.get("type").and_then(Value::as_str) {
|
||||
Some("output_diff") => {
|
||||
if let Some(diff) = item.get("diff").and_then(Value::as_str)
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.to_string());
|
||||
}
|
||||
}
|
||||
Some("pr") => {
|
||||
if let Some(diff) = item
|
||||
.get("output_diff")
|
||||
.and_then(Value::as_object)
|
||||
.and_then(|od| od.get("diff"))
|
||||
.and_then(Value::as_str)
|
||||
&& !diff.is_empty()
|
||||
{
|
||||
return Some(diff.to_string());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn extract_assistant_messages_from_turn(turn: &HashMap<String, Value>) -> Vec<String> {
|
||||
let mut msgs = Vec::new();
|
||||
if let Some(items) = turn.get("output_items").and_then(Value::as_array) {
|
||||
for item in items {
|
||||
if item.get("type").and_then(Value::as_str) != Some("message") {
|
||||
continue;
|
||||
}
|
||||
if let Some(content) = item.get("content").and_then(Value::as_array) {
|
||||
for part in content {
|
||||
if part.get("content_type").and_then(Value::as_str) == Some("text")
|
||||
&& let Some(txt) = part.get("text").and_then(Value::as_str)
|
||||
&& !txt.is_empty()
|
||||
{
|
||||
msgs.push(txt.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
msgs
|
||||
}
|
||||
|
||||
fn attempt_status_from_str(raw: Option<&str>) -> AttemptStatus {
|
||||
match raw.unwrap_or_default() {
|
||||
"failed" => AttemptStatus::Failed,
|
||||
"completed" => AttemptStatus::Completed,
|
||||
"in_progress" => AttemptStatus::InProgress,
|
||||
"pending" => AttemptStatus::Pending,
|
||||
_ => AttemptStatus::Pending,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_timestamp_value(v: Option<&Value>) -> Option<DateTime<Utc>> {
|
||||
let ts = v?.as_f64()?;
|
||||
let secs = ts as i64;
|
||||
let nanos = ((ts - secs as f64) * 1_000_000_000.0) as u32;
|
||||
Some(DateTime::<Utc>::from(
|
||||
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
|
||||
))
|
||||
}
|
||||
|
||||
fn map_task_list_item_to_summary(src: backend::TaskListItem) -> TaskSummary {
|
||||
let status_display = src.task_status_display.as_ref();
|
||||
TaskSummary {
|
||||
id: TaskId(src.id),
|
||||
title: src.title,
|
||||
status: map_status(status_display),
|
||||
updated_at: parse_updated_at(src.updated_at.as_ref()),
|
||||
environment_id: None,
|
||||
environment_label: env_label_from_status_display(status_display),
|
||||
summary: diff_summary_from_status_display(status_display),
|
||||
is_review: src
|
||||
.pull_requests
|
||||
.as_ref()
|
||||
.is_some_and(|prs| !prs.is_empty()),
|
||||
attempt_total: attempt_total_from_status_display(status_display),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_status(v: Option<&HashMap<String, Value>>) -> TaskStatus {
|
||||
if let Some(val) = v {
|
||||
if let Some(turn) = val
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object)
|
||||
&& let Some(s) = turn.get("turn_status").and_then(Value::as_str)
|
||||
{
|
||||
return match s {
|
||||
"failed" => TaskStatus::Error,
|
||||
"completed" => TaskStatus::Ready,
|
||||
"in_progress" => TaskStatus::Pending,
|
||||
"pending" => TaskStatus::Pending,
|
||||
"cancelled" => TaskStatus::Error,
|
||||
_ => TaskStatus::Pending,
|
||||
};
|
||||
}
|
||||
if let Some(state) = val.get("state").and_then(Value::as_str) {
|
||||
return match state {
|
||||
"pending" => TaskStatus::Pending,
|
||||
"ready" => TaskStatus::Ready,
|
||||
"applied" => TaskStatus::Applied,
|
||||
"error" => TaskStatus::Error,
|
||||
_ => TaskStatus::Pending,
|
||||
};
|
||||
}
|
||||
}
|
||||
TaskStatus::Pending
|
||||
}
|
||||
|
||||
fn parse_updated_at(ts: Option<&f64>) -> DateTime<Utc> {
|
||||
if let Some(v) = ts {
|
||||
let secs = *v as i64;
|
||||
let nanos = ((*v - secs as f64) * 1_000_000_000.0) as u32;
|
||||
return DateTime::<Utc>::from(
|
||||
std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos),
|
||||
);
|
||||
}
|
||||
Utc::now()
|
||||
}
|
||||
|
||||
fn env_label_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<String> {
|
||||
let map = v?;
|
||||
map.get("environment_label")
|
||||
.and_then(Value::as_str)
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
fn diff_summary_from_status_display(v: Option<&HashMap<String, Value>>) -> DiffSummary {
|
||||
let mut out = DiffSummary::default();
|
||||
let Some(map) = v else { return out };
|
||||
let latest = map
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object);
|
||||
let Some(latest) = latest else { return out };
|
||||
if let Some(ds) = latest.get("diff_stats").and_then(Value::as_object) {
|
||||
if let Some(n) = ds.get("files_modified").and_then(Value::as_i64) {
|
||||
out.files_changed = n.max(0) as usize;
|
||||
}
|
||||
if let Some(n) = ds.get("lines_added").and_then(Value::as_i64) {
|
||||
out.lines_added = n.max(0) as usize;
|
||||
}
|
||||
if let Some(n) = ds.get("lines_removed").and_then(Value::as_i64) {
|
||||
out.lines_removed = n.max(0) as usize;
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn attempt_total_from_status_display(v: Option<&HashMap<String, Value>>) -> Option<usize> {
|
||||
let map = v?;
|
||||
let latest = map
|
||||
.get("latest_turn_status_display")
|
||||
.and_then(Value::as_object)?;
|
||||
let siblings = latest.get("sibling_turn_ids").and_then(Value::as_array)?;
|
||||
Some(siblings.len().saturating_add(1))
|
||||
}
|
||||
|
||||
fn is_unified_diff(diff: &str) -> bool {
|
||||
let t = diff.trim_start();
|
||||
if t.starts_with("diff --git ") {
|
||||
return true;
|
||||
}
|
||||
let has_dash_headers = diff.contains("\n--- ") && diff.contains("\n+++ ");
|
||||
let has_hunk = diff.contains("\n@@ ") || diff.starts_with("@@ ");
|
||||
has_dash_headers && has_hunk
|
||||
}
|
||||
|
||||
fn tail(s: &str, max: usize) -> String {
|
||||
if s.len() <= max {
|
||||
s.to_string()
|
||||
} else {
|
||||
s[s.len() - max..].to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn summarize_patch_for_logging(patch: &str) -> String {
|
||||
let trimmed = patch.trim_start();
|
||||
let kind = if trimmed.starts_with("*** Begin Patch") {
|
||||
"codex-patch"
|
||||
} else if trimmed.starts_with("diff --git ") || trimmed.contains("\n*** End Patch\n") {
|
||||
"git-diff"
|
||||
} else if trimmed.starts_with("@@ ") || trimmed.contains("\n@@ ") {
|
||||
"unified-diff"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
let lines = patch.lines().count();
|
||||
let chars = patch.len();
|
||||
let cwd = std::env::current_dir()
|
||||
.ok()
|
||||
.map(|p| p.display().to_string())
|
||||
.unwrap_or_else(|| "<unknown>".to_string());
|
||||
let head: String = patch.lines().take(20).collect::<Vec<&str>>().join("\n");
|
||||
let head_trunc = if head.len() > 800 {
|
||||
format!("{}…", &head[..800])
|
||||
} else {
|
||||
head
|
||||
};
|
||||
format!(
|
||||
"patch_summary: kind={kind} lines={lines} chars={chars} cwd={cwd} ; head=\n{head_trunc}"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn append_error_log(message: &str) {
|
||||
let ts = Utc::now().to_rfc3339();
|
||||
if let Ok(mut f) = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open("error.log")
|
||||
{
|
||||
use std::io::Write as _;
|
||||
let _ = writeln!(f, "[{ts}] {message}");
|
||||
}
|
||||
}
|
||||
29
codex-rs/cloud-tasks-client/src/lib.rs
Normal file
29
codex-rs/cloud-tasks-client/src/lib.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
mod api;
|
||||
|
||||
pub use api::ApplyOutcome;
|
||||
pub use api::ApplyStatus;
|
||||
pub use api::AttemptStatus;
|
||||
pub use api::CloudBackend;
|
||||
pub use api::CloudTaskError;
|
||||
pub use api::CreatedTask;
|
||||
pub use api::DiffSummary;
|
||||
pub use api::Result;
|
||||
pub use api::TaskId;
|
||||
pub use api::TaskStatus;
|
||||
pub use api::TaskSummary;
|
||||
pub use api::TaskText;
|
||||
pub use api::TurnAttempt;
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
mod mock;
|
||||
|
||||
#[cfg(feature = "online")]
|
||||
mod http;
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
pub use mock::MockClient;
|
||||
|
||||
#[cfg(feature = "online")]
|
||||
pub use http::HttpClient;
|
||||
|
||||
// Reusable apply engine now lives in the shared crate `codex-git-apply`.
|
||||
180
codex-rs/cloud-tasks-client/src/mock.rs
Normal file
180
codex-rs/cloud-tasks-client/src/mock.rs
Normal file
@@ -0,0 +1,180 @@
|
||||
use crate::ApplyOutcome;
|
||||
use crate::AttemptStatus;
|
||||
use crate::CloudBackend;
|
||||
use crate::DiffSummary;
|
||||
use crate::Result;
|
||||
use crate::TaskId;
|
||||
use crate::TaskStatus;
|
||||
use crate::TaskSummary;
|
||||
use crate::TurnAttempt;
|
||||
use crate::api::TaskText;
|
||||
use chrono::Utc;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct MockClient;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl CloudBackend for MockClient {
|
||||
async fn list_tasks(&self, _env: Option<&str>) -> Result<Vec<TaskSummary>> {
|
||||
// Slightly vary content by env to aid tests that rely on the mock
|
||||
let rows = match _env {
|
||||
Some("env-A") => vec![("T-2000", "A: First", TaskStatus::Ready)],
|
||||
Some("env-B") => vec![
|
||||
("T-3000", "B: One", TaskStatus::Ready),
|
||||
("T-3001", "B: Two", TaskStatus::Pending),
|
||||
],
|
||||
_ => vec![
|
||||
("T-1000", "Update README formatting", TaskStatus::Ready),
|
||||
("T-1001", "Fix clippy warnings in core", TaskStatus::Pending),
|
||||
("T-1002", "Add contributing guide", TaskStatus::Ready),
|
||||
],
|
||||
};
|
||||
let environment_id = _env.map(str::to_string);
|
||||
let environment_label = match _env {
|
||||
Some("env-A") => Some("Env A".to_string()),
|
||||
Some("env-B") => Some("Env B".to_string()),
|
||||
Some(other) => Some(other.to_string()),
|
||||
None => Some("Global".to_string()),
|
||||
};
|
||||
let mut out = Vec::new();
|
||||
for (id_str, title, status) in rows {
|
||||
let id = TaskId(id_str.to_string());
|
||||
let diff = mock_diff_for(&id);
|
||||
let (a, d) = count_from_unified(&diff);
|
||||
out.push(TaskSummary {
|
||||
id,
|
||||
title: title.to_string(),
|
||||
status,
|
||||
updated_at: Utc::now(),
|
||||
environment_id: environment_id.clone(),
|
||||
environment_label: environment_label.clone(),
|
||||
summary: DiffSummary {
|
||||
files_changed: 1,
|
||||
lines_added: a,
|
||||
lines_removed: d,
|
||||
},
|
||||
is_review: false,
|
||||
attempt_total: Some(if id_str == "T-1000" { 2 } else { 1 }),
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
async fn get_task_diff(&self, id: TaskId) -> Result<Option<String>> {
|
||||
Ok(Some(mock_diff_for(&id)))
|
||||
}
|
||||
|
||||
async fn get_task_messages(&self, _id: TaskId) -> Result<Vec<String>> {
|
||||
Ok(vec![
|
||||
"Mock assistant output: this task contains no diff.".to_string(),
|
||||
])
|
||||
}
|
||||
|
||||
async fn get_task_text(&self, _id: TaskId) -> Result<TaskText> {
|
||||
Ok(TaskText {
|
||||
prompt: Some("Why is there no diff?".to_string()),
|
||||
messages: vec!["Mock assistant output: this task contains no diff.".to_string()],
|
||||
turn_id: Some("mock-turn".to_string()),
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: Some(0),
|
||||
attempt_status: AttemptStatus::Completed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn apply_task(&self, id: TaskId, _diff_override: Option<String>) -> Result<ApplyOutcome> {
|
||||
Ok(ApplyOutcome {
|
||||
applied: true,
|
||||
status: crate::ApplyStatus::Success,
|
||||
message: format!("Applied task {} locally (mock)", id.0),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> Result<ApplyOutcome> {
|
||||
Ok(ApplyOutcome {
|
||||
applied: false,
|
||||
status: crate::ApplyStatus::Success,
|
||||
message: format!("Preflight passed for task {} (mock)", id.0),
|
||||
skipped_paths: Vec::new(),
|
||||
conflict_paths: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
task: TaskId,
|
||||
_turn_id: String,
|
||||
) -> Result<Vec<TurnAttempt>> {
|
||||
if task.0 == "T-1000" {
|
||||
return Ok(vec![TurnAttempt {
|
||||
turn_id: "T-1000-attempt-2".to_string(),
|
||||
attempt_placement: Some(1),
|
||||
created_at: Some(Utc::now()),
|
||||
status: AttemptStatus::Completed,
|
||||
diff: Some(mock_diff_for(&task)),
|
||||
messages: vec!["Mock alternate attempt".to_string()],
|
||||
}]);
|
||||
}
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
env_id: &str,
|
||||
prompt: &str,
|
||||
git_ref: &str,
|
||||
qa_mode: bool,
|
||||
best_of_n: usize,
|
||||
) -> Result<crate::CreatedTask> {
|
||||
let _ = (env_id, prompt, git_ref, qa_mode, best_of_n);
|
||||
let id = format!("task_local_{}", chrono::Utc::now().timestamp_millis());
|
||||
Ok(crate::CreatedTask { id: TaskId(id) })
|
||||
}
|
||||
}
|
||||
|
||||
fn mock_diff_for(id: &TaskId) -> String {
|
||||
match id.0.as_str() {
|
||||
"T-1000" => {
|
||||
"diff --git a/README.md b/README.md\nindex 000000..111111 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,2 +1,3 @@\n Intro\n-Hello\n+Hello, world!\n+Task: T-1000\n".to_string()
|
||||
}
|
||||
"T-1001" => {
|
||||
"diff --git a/core/src/lib.rs b/core/src/lib.rs\nindex 000000..111111 100644\n--- a/core/src/lib.rs\n+++ b/core/src/lib.rs\n@@ -1,2 +1,1 @@\n-use foo;\n use bar;\n".to_string()
|
||||
}
|
||||
_ => {
|
||||
"diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md\nindex 000000..111111 100644\n--- /dev/null\n+++ b/CONTRIBUTING.md\n@@ -0,0 +1,3 @@\n+## Contributing\n+Please open PRs.\n+Thanks!\n".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn count_from_unified(diff: &str) -> (usize, usize) {
|
||||
if let Ok(patch) = diffy::Patch::from_str(diff) {
|
||||
patch
|
||||
.hunks()
|
||||
.iter()
|
||||
.flat_map(diffy::Hunk::lines)
|
||||
.fold((0, 0), |(a, d), l| match l {
|
||||
diffy::Line::Insert(_) => (a + 1, d),
|
||||
diffy::Line::Delete(_) => (a, d + 1),
|
||||
_ => (a, d),
|
||||
})
|
||||
} else {
|
||||
let mut a = 0;
|
||||
let mut d = 0;
|
||||
for l in diff.lines() {
|
||||
if l.starts_with("+++") || l.starts_with("---") || l.starts_with("@@") {
|
||||
continue;
|
||||
}
|
||||
match l.as_bytes().first() {
|
||||
Some(b'+') => a += 1,
|
||||
Some(b'-') => d += 1,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
(a, d)
|
||||
}
|
||||
}
|
||||
36
codex-rs/cloud-tasks/Cargo.toml
Normal file
36
codex-rs/cloud-tasks/Cargo.toml
Normal file
@@ -0,0 +1,36 @@
|
||||
[package]
|
||||
name = "codex-cloud-tasks"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_cloud_tasks"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
codex-common = { path = "../common", features = ["cli"] }
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = ["mock", "online"] }
|
||||
ratatui = { version = "0.29.0" }
|
||||
crossterm = { version = "0.28.1", features = ["event-stream"] }
|
||||
tokio-stream = "0.1.17"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
codex-login = { path = "../login" }
|
||||
codex-core = { path = "../core" }
|
||||
throbber-widgets-tui = "0.8.0"
|
||||
base64 = "0.22"
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
unicode-width = "0.1"
|
||||
codex-tui = { path = "../tui" }
|
||||
|
||||
[dev-dependencies]
|
||||
async-trait = "0.1"
|
||||
482
codex-rs/cloud-tasks/src/app.rs
Normal file
482
codex-rs/cloud-tasks/src/app.rs
Normal file
@@ -0,0 +1,482 @@
|
||||
use std::time::Duration;
|
||||
|
||||
// Environment filter data models for the TUI
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct EnvironmentRow {
|
||||
pub id: String,
|
||||
pub label: Option<String>,
|
||||
pub is_pinned: bool,
|
||||
pub repo_hints: Option<String>, // e.g., "openai/codex"
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct EnvModalState {
|
||||
pub query: String,
|
||||
pub selected: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct BestOfModalState {
|
||||
pub selected: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Copy, PartialEq, Eq)]
|
||||
pub enum ApplyResultLevel {
|
||||
Success,
|
||||
Partial,
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ApplyModalState {
|
||||
pub task_id: TaskId,
|
||||
pub title: String,
|
||||
pub result_message: Option<String>,
|
||||
pub result_level: Option<ApplyResultLevel>,
|
||||
pub skipped_paths: Vec<String>,
|
||||
pub conflict_paths: Vec<String>,
|
||||
pub diff_override: Option<String>,
|
||||
}
|
||||
|
||||
use crate::scrollable_diff::ScrollableDiff;
|
||||
use codex_cloud_tasks_client::CloudBackend;
|
||||
use codex_cloud_tasks_client::TaskId;
|
||||
use codex_cloud_tasks_client::TaskSummary;
|
||||
use throbber_widgets_tui::ThrobberState;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct App {
|
||||
pub tasks: Vec<TaskSummary>,
|
||||
pub selected: usize,
|
||||
pub status: String,
|
||||
pub diff_overlay: Option<DiffOverlay>,
|
||||
pub throbber: ThrobberState,
|
||||
pub refresh_inflight: bool,
|
||||
pub details_inflight: bool,
|
||||
// Environment filter state
|
||||
pub env_filter: Option<String>,
|
||||
pub env_modal: Option<EnvModalState>,
|
||||
pub apply_modal: Option<ApplyModalState>,
|
||||
pub best_of_modal: Option<BestOfModalState>,
|
||||
pub environments: Vec<EnvironmentRow>,
|
||||
pub env_last_loaded: Option<std::time::Instant>,
|
||||
pub env_loading: bool,
|
||||
pub env_error: Option<String>,
|
||||
// New Task page
|
||||
pub new_task: Option<crate::new_task::NewTaskPage>,
|
||||
pub best_of_n: usize,
|
||||
// Apply preflight spinner state
|
||||
pub apply_preflight_inflight: bool,
|
||||
// Apply action spinner state
|
||||
pub apply_inflight: bool,
|
||||
// Background enrichment coordination
|
||||
pub list_generation: u64,
|
||||
pub in_flight: std::collections::HashSet<String>,
|
||||
// Background enrichment caches were planned; currently unused.
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tasks: Vec::new(),
|
||||
selected: 0,
|
||||
status: "Press r to refresh".to_string(),
|
||||
diff_overlay: None,
|
||||
throbber: ThrobberState::default(),
|
||||
refresh_inflight: false,
|
||||
details_inflight: false,
|
||||
env_filter: None,
|
||||
env_modal: None,
|
||||
apply_modal: None,
|
||||
best_of_modal: None,
|
||||
environments: Vec::new(),
|
||||
env_last_loaded: None,
|
||||
env_loading: false,
|
||||
env_error: None,
|
||||
new_task: None,
|
||||
best_of_n: 1,
|
||||
apply_preflight_inflight: false,
|
||||
apply_inflight: false,
|
||||
list_generation: 0,
|
||||
in_flight: std::collections::HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self) {
|
||||
if self.tasks.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.selected = (self.selected + 1).min(self.tasks.len().saturating_sub(1));
|
||||
}
|
||||
|
||||
pub fn prev(&mut self) {
|
||||
if self.tasks.is_empty() {
|
||||
return;
|
||||
}
|
||||
if self.selected > 0 {
|
||||
self.selected -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_tasks(
|
||||
backend: &dyn CloudBackend,
|
||||
env: Option<&str>,
|
||||
) -> anyhow::Result<Vec<TaskSummary>> {
|
||||
// In later milestones, add a small debounce, spinner, and error display.
|
||||
let tasks = tokio::time::timeout(Duration::from_secs(5), backend.list_tasks(env)).await??;
|
||||
// Hide review-only tasks from the main list.
|
||||
let filtered: Vec<TaskSummary> = tasks.into_iter().filter(|t| !t.is_review).collect();
|
||||
Ok(filtered)
|
||||
}
|
||||
|
||||
pub struct DiffOverlay {
|
||||
pub title: String,
|
||||
pub task_id: TaskId,
|
||||
pub sd: ScrollableDiff,
|
||||
pub base_can_apply: bool,
|
||||
pub diff_lines: Vec<String>,
|
||||
pub text_lines: Vec<String>,
|
||||
pub prompt: Option<String>,
|
||||
pub attempts: Vec<AttemptView>,
|
||||
pub selected_attempt: usize,
|
||||
pub current_view: DetailView,
|
||||
pub base_turn_id: Option<String>,
|
||||
pub sibling_turn_ids: Vec<String>,
|
||||
pub attempt_total_hint: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct AttemptView {
|
||||
pub turn_id: Option<String>,
|
||||
pub status: codex_cloud_tasks_client::AttemptStatus,
|
||||
pub attempt_placement: Option<i64>,
|
||||
pub diff_lines: Vec<String>,
|
||||
pub text_lines: Vec<String>,
|
||||
pub prompt: Option<String>,
|
||||
pub diff_raw: Option<String>,
|
||||
}
|
||||
|
||||
impl AttemptView {
|
||||
pub fn has_diff(&self) -> bool {
|
||||
!self.diff_lines.is_empty()
|
||||
}
|
||||
|
||||
pub fn has_text(&self) -> bool {
|
||||
!self.text_lines.is_empty() || self.prompt.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
impl DiffOverlay {
|
||||
pub fn new(task_id: TaskId, title: String, attempt_total_hint: Option<usize>) -> Self {
|
||||
let mut sd = ScrollableDiff::new();
|
||||
sd.set_content(Vec::new());
|
||||
Self {
|
||||
title,
|
||||
task_id,
|
||||
sd,
|
||||
base_can_apply: false,
|
||||
diff_lines: Vec::new(),
|
||||
text_lines: Vec::new(),
|
||||
prompt: None,
|
||||
attempts: vec![AttemptView::default()],
|
||||
selected_attempt: 0,
|
||||
current_view: DetailView::Prompt,
|
||||
base_turn_id: None,
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_total_hint,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn current_attempt(&self) -> Option<&AttemptView> {
|
||||
self.attempts.get(self.selected_attempt)
|
||||
}
|
||||
|
||||
pub fn base_attempt_mut(&mut self) -> &mut AttemptView {
|
||||
if self.attempts.is_empty() {
|
||||
self.attempts.push(AttemptView::default());
|
||||
}
|
||||
&mut self.attempts[0]
|
||||
}
|
||||
|
||||
pub fn set_view(&mut self, view: DetailView) {
|
||||
self.current_view = view;
|
||||
self.apply_selection_to_fields();
|
||||
}
|
||||
|
||||
pub fn expected_attempts(&self) -> Option<usize> {
|
||||
self.attempt_total_hint.or({
|
||||
if self.attempts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(self.attempts.len())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn attempt_count(&self) -> usize {
|
||||
self.attempts.len()
|
||||
}
|
||||
|
||||
pub fn attempt_display_total(&self) -> usize {
|
||||
self.expected_attempts()
|
||||
.unwrap_or_else(|| self.attempts.len().max(1))
|
||||
}
|
||||
|
||||
pub fn step_attempt(&mut self, delta: isize) -> bool {
|
||||
let total = self.attempts.len();
|
||||
if total <= 1 {
|
||||
return false;
|
||||
}
|
||||
let total_isize = total as isize;
|
||||
let current = self.selected_attempt as isize;
|
||||
let mut next = current + delta;
|
||||
next = ((next % total_isize) + total_isize) % total_isize;
|
||||
let next = next as usize;
|
||||
self.selected_attempt = next;
|
||||
self.apply_selection_to_fields();
|
||||
true
|
||||
}
|
||||
|
||||
pub fn current_can_apply(&self) -> bool {
|
||||
matches!(self.current_view, DetailView::Diff)
|
||||
&& self
|
||||
.current_attempt()
|
||||
.and_then(|attempt| attempt.diff_raw.as_ref())
|
||||
.map(|diff| !diff.is_empty())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn apply_selection_to_fields(&mut self) {
|
||||
let (diff_lines, text_lines, prompt) = if let Some(attempt) = self.current_attempt() {
|
||||
(
|
||||
attempt.diff_lines.clone(),
|
||||
attempt.text_lines.clone(),
|
||||
attempt.prompt.clone(),
|
||||
)
|
||||
} else {
|
||||
self.diff_lines.clear();
|
||||
self.text_lines.clear();
|
||||
self.prompt = None;
|
||||
self.sd.set_content(vec!["<loading attempt>".to_string()]);
|
||||
return;
|
||||
};
|
||||
|
||||
self.diff_lines = diff_lines.clone();
|
||||
self.text_lines = text_lines.clone();
|
||||
self.prompt = prompt;
|
||||
|
||||
match self.current_view {
|
||||
DetailView::Diff => {
|
||||
if diff_lines.is_empty() {
|
||||
self.sd.set_content(vec!["<no diff available>".to_string()]);
|
||||
} else {
|
||||
self.sd.set_content(diff_lines);
|
||||
}
|
||||
}
|
||||
DetailView::Prompt => {
|
||||
if text_lines.is_empty() {
|
||||
self.sd.set_content(vec!["<no output>".to_string()]);
|
||||
} else {
|
||||
self.sd.set_content(text_lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum DetailView {
|
||||
Diff,
|
||||
Prompt,
|
||||
}
|
||||
|
||||
/// Internal app events delivered from background tasks.
|
||||
/// These let the UI event loop remain responsive and keep the spinner animating.
|
||||
#[derive(Debug)]
|
||||
pub enum AppEvent {
|
||||
TasksLoaded {
|
||||
env: Option<String>,
|
||||
result: anyhow::Result<Vec<TaskSummary>>,
|
||||
},
|
||||
// Background diff summary events were planned; removed for now to keep code minimal.
|
||||
/// Autodetection of a likely environment id finished
|
||||
EnvironmentAutodetected(anyhow::Result<crate::env_detect::AutodetectSelection>),
|
||||
/// Background completion of environment list fetch
|
||||
EnvironmentsLoaded(anyhow::Result<Vec<EnvironmentRow>>),
|
||||
DetailsDiffLoaded {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
diff: String,
|
||||
},
|
||||
DetailsMessagesLoaded {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
messages: Vec<String>,
|
||||
prompt: Option<String>,
|
||||
turn_id: Option<String>,
|
||||
sibling_turn_ids: Vec<String>,
|
||||
attempt_placement: Option<i64>,
|
||||
attempt_status: codex_cloud_tasks_client::AttemptStatus,
|
||||
},
|
||||
DetailsFailed {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
error: String,
|
||||
},
|
||||
AttemptsLoaded {
|
||||
id: TaskId,
|
||||
attempts: Vec<codex_cloud_tasks_client::TurnAttempt>,
|
||||
},
|
||||
/// Background completion of new task submission
|
||||
NewTaskSubmitted(Result<codex_cloud_tasks_client::CreatedTask, String>),
|
||||
/// Background completion of apply preflight when opening modal or on demand
|
||||
ApplyPreflightFinished {
|
||||
id: TaskId,
|
||||
title: String,
|
||||
message: String,
|
||||
level: ApplyResultLevel,
|
||||
skipped: Vec<String>,
|
||||
conflicts: Vec<String>,
|
||||
},
|
||||
/// Background completion of apply action (actual patch application)
|
||||
ApplyFinished {
|
||||
id: TaskId,
|
||||
result: std::result::Result<codex_cloud_tasks_client::ApplyOutcome, String>,
|
||||
},
|
||||
}
|
||||
|
||||
// Convenience aliases; currently unused.
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::Utc;
|
||||
|
||||
struct FakeBackend {
|
||||
// maps env key to titles
|
||||
by_env: std::collections::HashMap<Option<String>, Vec<&'static str>>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl codex_cloud_tasks_client::CloudBackend for FakeBackend {
|
||||
async fn list_tasks(
|
||||
&self,
|
||||
env: Option<&str>,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<TaskSummary>> {
|
||||
let key = env.map(str::to_string);
|
||||
let titles = self
|
||||
.by_env
|
||||
.get(&key)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| vec!["default-a", "default-b"]);
|
||||
let mut out = Vec::new();
|
||||
for (i, t) in titles.into_iter().enumerate() {
|
||||
out.push(TaskSummary {
|
||||
id: TaskId(format!("T-{i}")),
|
||||
title: t.to_string(),
|
||||
status: codex_cloud_tasks_client::TaskStatus::Ready,
|
||||
updated_at: Utc::now(),
|
||||
environment_id: env.map(str::to_string),
|
||||
environment_label: None,
|
||||
summary: codex_cloud_tasks_client::DiffSummary::default(),
|
||||
is_review: false,
|
||||
attempt_total: Some(1),
|
||||
});
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
async fn get_task_diff(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<Option<String>> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn get_task_messages(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<String>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
async fn get_task_text(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::TaskText> {
|
||||
Ok(codex_cloud_tasks_client::TaskText {
|
||||
prompt: Some("Example prompt".to_string()),
|
||||
messages: Vec::new(),
|
||||
turn_id: Some("fake-turn".to_string()),
|
||||
sibling_turn_ids: Vec::new(),
|
||||
attempt_placement: Some(0),
|
||||
attempt_status: codex_cloud_tasks_client::AttemptStatus::Completed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn list_sibling_attempts(
|
||||
&self,
|
||||
_task: TaskId,
|
||||
_turn_id: String,
|
||||
) -> codex_cloud_tasks_client::Result<Vec<codex_cloud_tasks_client::TurnAttempt>> {
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn apply_task(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::ApplyOutcome> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn apply_task_preflight(
|
||||
&self,
|
||||
_id: TaskId,
|
||||
_diff_override: Option<String>,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::ApplyOutcome> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
|
||||
async fn create_task(
|
||||
&self,
|
||||
_env_id: &str,
|
||||
_prompt: &str,
|
||||
_git_ref: &str,
|
||||
_qa_mode: bool,
|
||||
_best_of_n: usize,
|
||||
) -> codex_cloud_tasks_client::Result<codex_cloud_tasks_client::CreatedTask> {
|
||||
Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented(
|
||||
"not used in test",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_tasks_uses_env_parameter() {
|
||||
// Arrange: env-specific task titles
|
||||
let mut by_env = std::collections::HashMap::new();
|
||||
by_env.insert(None, vec!["root-1", "root-2"]);
|
||||
by_env.insert(Some("env-A".to_string()), vec!["A-1"]);
|
||||
by_env.insert(Some("env-B".to_string()), vec!["B-1", "B-2", "B-3"]);
|
||||
let backend = FakeBackend { by_env };
|
||||
|
||||
// Act + Assert
|
||||
let root = load_tasks(&backend, None).await.unwrap();
|
||||
assert_eq!(root.len(), 2);
|
||||
assert_eq!(root[0].title, "root-1");
|
||||
|
||||
let a = load_tasks(&backend, Some("env-A")).await.unwrap();
|
||||
assert_eq!(a.len(), 1);
|
||||
assert_eq!(a[0].title, "A-1");
|
||||
|
||||
let b = load_tasks(&backend, Some("env-B")).await.unwrap();
|
||||
assert_eq!(b.len(), 3);
|
||||
assert_eq!(b[2].title, "B-3");
|
||||
}
|
||||
}
|
||||
9
codex-rs/cloud-tasks/src/cli.rs
Normal file
9
codex-rs/cloud-tasks/src/cli.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use clap::Parser;
|
||||
use codex_common::CliConfigOverrides;
|
||||
|
||||
#[derive(Parser, Debug, Default)]
|
||||
#[command(version)]
|
||||
pub struct Cli {
|
||||
#[clap(skip)]
|
||||
pub config_overrides: CliConfigOverrides,
|
||||
}
|
||||
361
codex-rs/cloud-tasks/src/env_detect.rs
Normal file
361
codex-rs/cloud-tasks/src/env_detect.rs
Normal file
@@ -0,0 +1,361 @@
|
||||
use reqwest::header::CONTENT_TYPE;
|
||||
use reqwest::header::HeaderMap;
|
||||
use std::collections::HashMap;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
struct CodeEnvironment {
|
||||
id: String,
|
||||
#[serde(default)]
|
||||
label: Option<String>,
|
||||
#[serde(default)]
|
||||
is_pinned: Option<bool>,
|
||||
#[serde(default)]
|
||||
task_count: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AutodetectSelection {
|
||||
pub id: String,
|
||||
pub label: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn autodetect_environment_id(
|
||||
base_url: &str,
|
||||
headers: &HeaderMap,
|
||||
desired_label: Option<String>,
|
||||
) -> anyhow::Result<AutodetectSelection> {
|
||||
// 1) Try repo-specific environments based on local git origins (GitHub only, like VSCode)
|
||||
let origins = get_git_origins();
|
||||
crate::append_error_log(format!("env: git origins: {origins:?}"));
|
||||
let mut by_repo_envs: Vec<CodeEnvironment> = Vec::new();
|
||||
for origin in &origins {
|
||||
if let Some((owner, repo)) = parse_owner_repo(origin) {
|
||||
let url = if base_url.contains("/backend-api") {
|
||||
format!(
|
||||
"{}/wham/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{}/api/codex/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
};
|
||||
crate::append_error_log(format!("env: GET {url}"));
|
||||
match get_json::<Vec<CodeEnvironment>>(&url, headers).await {
|
||||
Ok(mut list) => {
|
||||
crate::append_error_log(format!(
|
||||
"env: by-repo returned {} env(s) for {owner}/{repo}",
|
||||
list.len(),
|
||||
));
|
||||
by_repo_envs.append(&mut list);
|
||||
}
|
||||
Err(e) => crate::append_error_log(format!(
|
||||
"env: by-repo fetch failed for {owner}/{repo}: {e}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(env) = pick_environment_row(&by_repo_envs, desired_label.as_deref()) {
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
});
|
||||
}
|
||||
|
||||
// 2) Fallback to the full list
|
||||
let list_url = if base_url.contains("/backend-api") {
|
||||
format!("{base_url}/wham/environments")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/environments")
|
||||
};
|
||||
crate::append_error_log(format!("env: GET {list_url}"));
|
||||
// Fetch and log the full environments JSON for debugging
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let res = http.get(&list_url).headers(headers.clone()).send().await?;
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
crate::append_error_log(format!("env: status={status} content-type={ct}"));
|
||||
match serde_json::from_str::<serde_json::Value>(&body) {
|
||||
Ok(v) => {
|
||||
let pretty = serde_json::to_string_pretty(&v).unwrap_or(body.clone());
|
||||
crate::append_error_log(format!("env: /environments JSON (pretty):\n{pretty}"));
|
||||
}
|
||||
Err(_) => crate::append_error_log(format!("env: /environments (raw):\n{body}")),
|
||||
}
|
||||
if !status.is_success() {
|
||||
anyhow::bail!("GET {list_url} failed: {status}; content-type={ct}; body={body}");
|
||||
}
|
||||
let all_envs: Vec<CodeEnvironment> = serde_json::from_str(&body).map_err(|e| {
|
||||
anyhow::anyhow!("Decode error for {list_url}: {e}; content-type={ct}; body={body}")
|
||||
})?;
|
||||
if let Some(env) = pick_environment_row(&all_envs, desired_label.as_deref()) {
|
||||
return Ok(AutodetectSelection {
|
||||
id: env.id.clone(),
|
||||
label: env.label.as_deref().map(str::to_owned),
|
||||
});
|
||||
}
|
||||
anyhow::bail!("no environments available")
|
||||
}
|
||||
|
||||
fn pick_environment_row(
|
||||
envs: &[CodeEnvironment],
|
||||
desired_label: Option<&str>,
|
||||
) -> Option<CodeEnvironment> {
|
||||
if envs.is_empty() {
|
||||
return None;
|
||||
}
|
||||
if let Some(label) = desired_label {
|
||||
let lc = label.to_lowercase();
|
||||
if let Some(e) = envs
|
||||
.iter()
|
||||
.find(|e| e.label.as_deref().unwrap_or("").to_lowercase() == lc)
|
||||
{
|
||||
crate::append_error_log(format!("env: matched by label: {label} -> {}", e.id));
|
||||
return Some(e.clone());
|
||||
}
|
||||
}
|
||||
if envs.len() == 1 {
|
||||
crate::append_error_log("env: single environment available; selecting it");
|
||||
return Some(envs[0].clone());
|
||||
}
|
||||
if let Some(e) = envs.iter().find(|e| e.is_pinned.unwrap_or(false)) {
|
||||
crate::append_error_log(format!("env: selecting pinned environment: {}", e.id));
|
||||
return Some(e.clone());
|
||||
}
|
||||
// Highest task_count as heuristic
|
||||
if let Some(e) = envs
|
||||
.iter()
|
||||
.max_by_key(|e| e.task_count.unwrap_or(0))
|
||||
.or_else(|| envs.first())
|
||||
{
|
||||
crate::append_error_log(format!("env: selecting by task_count/first: {}", e.id));
|
||||
return Some(e.clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn get_json<T: serde::de::DeserializeOwned>(
|
||||
url: &str,
|
||||
headers: &HeaderMap,
|
||||
) -> anyhow::Result<T> {
|
||||
let http = reqwest::Client::builder().build()?;
|
||||
let res = http.get(url).headers(headers.clone()).send().await?;
|
||||
let status = res.status();
|
||||
let ct = res
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let body = res.text().await.unwrap_or_default();
|
||||
crate::append_error_log(format!("env: status={status} content-type={ct}"));
|
||||
if !status.is_success() {
|
||||
anyhow::bail!("GET {url} failed: {status}; content-type={ct}; body={body}");
|
||||
}
|
||||
let parsed = serde_json::from_str::<T>(&body).map_err(|e| {
|
||||
anyhow::anyhow!("Decode error for {url}: {e}; content-type={ct}; body={body}")
|
||||
})?;
|
||||
Ok(parsed)
|
||||
}
|
||||
|
||||
fn get_git_origins() -> Vec<String> {
|
||||
// Prefer: git config --get-regexp remote\..*\.url
|
||||
let out = std::process::Command::new("git")
|
||||
.args(["config", "--get-regexp", "remote\\..*\\.url"])
|
||||
.output();
|
||||
if let Ok(ok) = out
|
||||
&& ok.status.success()
|
||||
{
|
||||
let s = String::from_utf8_lossy(&ok.stdout);
|
||||
let mut urls = Vec::new();
|
||||
for line in s.lines() {
|
||||
if let Some((_, url)) = line.split_once(' ') {
|
||||
urls.push(url.trim().to_string());
|
||||
}
|
||||
}
|
||||
if !urls.is_empty() {
|
||||
return uniq(urls);
|
||||
}
|
||||
}
|
||||
// Fallback: git remote -v
|
||||
let out = std::process::Command::new("git")
|
||||
.args(["remote", "-v"])
|
||||
.output();
|
||||
if let Ok(ok) = out
|
||||
&& ok.status.success()
|
||||
{
|
||||
let s = String::from_utf8_lossy(&ok.stdout);
|
||||
let mut urls = Vec::new();
|
||||
for line in s.lines() {
|
||||
let parts: Vec<&str> = line.split_whitespace().collect();
|
||||
if parts.len() >= 2 {
|
||||
urls.push(parts[1].to_string());
|
||||
}
|
||||
}
|
||||
if !urls.is_empty() {
|
||||
return uniq(urls);
|
||||
}
|
||||
}
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn uniq(mut v: Vec<String>) -> Vec<String> {
|
||||
v.sort();
|
||||
v.dedup();
|
||||
v
|
||||
}
|
||||
|
||||
fn parse_owner_repo(url: &str) -> Option<(String, String)> {
|
||||
// Normalize common prefixes and handle multiple SSH/HTTPS variants.
|
||||
let mut s = url.trim().to_string();
|
||||
// Drop protocol scheme for ssh URLs
|
||||
if let Some(rest) = s.strip_prefix("ssh://") {
|
||||
s = rest.to_string();
|
||||
}
|
||||
// Accept any user before @github.com (e.g., git@, org-123@)
|
||||
if let Some(idx) = s.find("@github.com:") {
|
||||
let rest = &s[idx + "@github.com:".len()..];
|
||||
let rest = rest.trim_start_matches('/').trim_end_matches(".git");
|
||||
let mut parts = rest.splitn(2, '/');
|
||||
let owner = parts.next()?.to_string();
|
||||
let repo = parts.next()?.to_string();
|
||||
crate::append_error_log(format!("env: parsed SSH GitHub origin => {owner}/{repo}"));
|
||||
return Some((owner, repo));
|
||||
}
|
||||
// HTTPS or git protocol
|
||||
for prefix in [
|
||||
"https://github.com/",
|
||||
"http://github.com/",
|
||||
"git://github.com/",
|
||||
"github.com/",
|
||||
] {
|
||||
if let Some(rest) = s.strip_prefix(prefix) {
|
||||
let rest = rest.trim_start_matches('/').trim_end_matches(".git");
|
||||
let mut parts = rest.splitn(2, '/');
|
||||
let owner = parts.next()?.to_string();
|
||||
let repo = parts.next()?.to_string();
|
||||
crate::append_error_log(format!("env: parsed HTTP GitHub origin => {owner}/{repo}"));
|
||||
return Some((owner, repo));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// List environments for the current repo(s) with a fallback to the global list.
|
||||
/// Returns a de-duplicated, sorted set suitable for the TUI modal.
|
||||
pub async fn list_environments(
|
||||
base_url: &str,
|
||||
headers: &HeaderMap,
|
||||
) -> anyhow::Result<Vec<crate::app::EnvironmentRow>> {
|
||||
let mut map: HashMap<String, crate::app::EnvironmentRow> = HashMap::new();
|
||||
|
||||
// 1) By-repo lookup for each parsed GitHub origin
|
||||
let origins = get_git_origins();
|
||||
for origin in &origins {
|
||||
if let Some((owner, repo)) = parse_owner_repo(origin) {
|
||||
let url = if base_url.contains("/backend-api") {
|
||||
format!(
|
||||
"{}/wham/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{}/api/codex/environments/by-repo/{}/{}/{}",
|
||||
base_url, "github", owner, repo
|
||||
)
|
||||
};
|
||||
match get_json::<Vec<CodeEnvironment>>(&url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: by-repo {}:{} -> {} envs", owner, repo, list.len());
|
||||
for e in list {
|
||||
let entry =
|
||||
map.entry(e.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: e.id.clone(),
|
||||
label: e.label.clone(),
|
||||
is_pinned: e.is_pinned.unwrap_or(false),
|
||||
repo_hints: Some(format!("{owner}/{repo}")),
|
||||
});
|
||||
// Merge: keep label if present, or use new; accumulate pinned flag
|
||||
if entry.label.is_none() {
|
||||
entry.label = e.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false);
|
||||
if entry.repo_hints.is_none() {
|
||||
entry.repo_hints = Some(format!("{owner}/{repo}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"env_tui: by-repo fetch failed for {}/{}: {}",
|
||||
owner, repo, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2) Fallback to the full list; on error return what we have if any.
|
||||
let list_url = if base_url.contains("/backend-api") {
|
||||
format!("{base_url}/wham/environments")
|
||||
} else {
|
||||
format!("{base_url}/api/codex/environments")
|
||||
};
|
||||
match get_json::<Vec<CodeEnvironment>>(&list_url, headers).await {
|
||||
Ok(list) => {
|
||||
info!("env_tui: global list -> {} envs", list.len());
|
||||
for e in list {
|
||||
let entry = map
|
||||
.entry(e.id.clone())
|
||||
.or_insert_with(|| crate::app::EnvironmentRow {
|
||||
id: e.id.clone(),
|
||||
label: e.label.clone(),
|
||||
is_pinned: e.is_pinned.unwrap_or(false),
|
||||
repo_hints: None,
|
||||
});
|
||||
if entry.label.is_none() {
|
||||
entry.label = e.label.clone();
|
||||
}
|
||||
entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
if map.is_empty() {
|
||||
return Err(e);
|
||||
} else {
|
||||
warn!(
|
||||
"env_tui: global list failed; using by-repo results only: {}",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut rows: Vec<crate::app::EnvironmentRow> = map.into_values().collect();
|
||||
rows.sort_by(|a, b| {
|
||||
// pinned first
|
||||
let p = b.is_pinned.cmp(&a.is_pinned);
|
||||
if p != std::cmp::Ordering::Equal {
|
||||
return p;
|
||||
}
|
||||
// then label (ci), then id
|
||||
let al = a.label.as_deref().unwrap_or("").to_lowercase();
|
||||
let bl = b.label.as_deref().unwrap_or("").to_lowercase();
|
||||
let l = al.cmp(&bl);
|
||||
if l != std::cmp::Ordering::Equal {
|
||||
return l;
|
||||
}
|
||||
a.id.cmp(&b.id)
|
||||
});
|
||||
Ok(rows)
|
||||
}
|
||||
1631
codex-rs/cloud-tasks/src/lib.rs
Normal file
1631
codex-rs/cloud-tasks/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
35
codex-rs/cloud-tasks/src/new_task.rs
Normal file
35
codex-rs/cloud-tasks/src/new_task.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use codex_tui::ComposerInput;
|
||||
|
||||
pub struct NewTaskPage {
|
||||
pub composer: ComposerInput,
|
||||
pub submitting: bool,
|
||||
pub env_id: Option<String>,
|
||||
pub best_of_n: usize,
|
||||
}
|
||||
|
||||
impl NewTaskPage {
|
||||
pub fn new(env_id: Option<String>, best_of_n: usize) -> Self {
|
||||
let mut composer = ComposerInput::new();
|
||||
composer.set_hint_items(vec![
|
||||
("⏎", "send"),
|
||||
("Shift+⏎", "newline"),
|
||||
("Ctrl+O", "env"),
|
||||
("Ctrl+N", "attempts"),
|
||||
("Ctrl+C", "quit"),
|
||||
]);
|
||||
Self {
|
||||
composer,
|
||||
submitting: false,
|
||||
env_id,
|
||||
best_of_n,
|
||||
}
|
||||
}
|
||||
|
||||
// Additional helpers can be added as usage evolves.
|
||||
}
|
||||
|
||||
impl Default for NewTaskPage {
|
||||
fn default() -> Self {
|
||||
Self::new(None, 1)
|
||||
}
|
||||
}
|
||||
176
codex-rs/cloud-tasks/src/scrollable_diff.rs
Normal file
176
codex-rs/cloud-tasks/src/scrollable_diff.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use unicode_width::UnicodeWidthChar;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
/// Scroll position and geometry for a vertical scroll view.
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct ScrollViewState {
|
||||
pub scroll: u16,
|
||||
pub viewport_h: u16,
|
||||
pub content_h: u16,
|
||||
}
|
||||
|
||||
impl ScrollViewState {
|
||||
pub fn clamp(&mut self) {
|
||||
let max_scroll = self.content_h.saturating_sub(self.viewport_h);
|
||||
if self.scroll > max_scroll {
|
||||
self.scroll = max_scroll;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A simple, local scrollable view for diffs or message text.
|
||||
///
|
||||
/// Owns raw lines, caches wrapped lines for a given width, and maintains
|
||||
/// a small scroll state that is clamped whenever geometry shrinks.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ScrollableDiff {
|
||||
raw: Vec<String>,
|
||||
wrapped: Vec<String>,
|
||||
wrapped_src_idx: Vec<usize>,
|
||||
wrap_cols: Option<u16>,
|
||||
pub state: ScrollViewState,
|
||||
}
|
||||
|
||||
impl ScrollableDiff {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Replace the raw content lines. Does not rewrap immediately; call `set_width` next.
|
||||
pub fn set_content(&mut self, lines: Vec<String>) {
|
||||
self.raw = lines;
|
||||
self.wrapped.clear();
|
||||
self.wrapped_src_idx.clear();
|
||||
self.state.content_h = 0;
|
||||
// Force rewrap on next set_width even if width is unchanged
|
||||
self.wrap_cols = None;
|
||||
}
|
||||
|
||||
/// Set the wrap width. If changed, rebuild wrapped lines and clamp scroll.
|
||||
pub fn set_width(&mut self, width: u16) {
|
||||
if self.wrap_cols == Some(width) {
|
||||
return;
|
||||
}
|
||||
self.wrap_cols = Some(width);
|
||||
self.rewrap(width);
|
||||
self.state.clamp();
|
||||
}
|
||||
|
||||
/// Update viewport height and clamp scroll if needed.
|
||||
pub fn set_viewport(&mut self, height: u16) {
|
||||
self.state.viewport_h = height;
|
||||
self.state.clamp();
|
||||
}
|
||||
|
||||
/// Return the cached wrapped lines. Call `set_width` first when area changes.
|
||||
pub fn wrapped_lines(&self) -> &[String] {
|
||||
&self.wrapped
|
||||
}
|
||||
|
||||
pub fn wrapped_src_indices(&self) -> &[usize] {
|
||||
&self.wrapped_src_idx
|
||||
}
|
||||
|
||||
pub fn raw_line_at(&self, idx: usize) -> &str {
|
||||
self.raw.get(idx).map(String::as_str).unwrap_or("")
|
||||
}
|
||||
|
||||
/// Scroll by a signed delta; clamps to content.
|
||||
pub fn scroll_by(&mut self, delta: i16) {
|
||||
let s = self.state.scroll as i32 + delta as i32;
|
||||
self.state.scroll = s.clamp(0, self.max_scroll() as i32) as u16;
|
||||
}
|
||||
|
||||
/// Page by a signed delta; typically viewport_h - 1.
|
||||
pub fn page_by(&mut self, delta: i16) {
|
||||
self.scroll_by(delta);
|
||||
}
|
||||
|
||||
pub fn to_top(&mut self) {
|
||||
self.state.scroll = 0;
|
||||
}
|
||||
|
||||
pub fn to_bottom(&mut self) {
|
||||
self.state.scroll = self.max_scroll();
|
||||
}
|
||||
|
||||
/// Optional percent scrolled; None when not enough geometry is known.
|
||||
pub fn percent_scrolled(&self) -> Option<u8> {
|
||||
if self.state.content_h == 0 || self.state.viewport_h == 0 {
|
||||
return None;
|
||||
}
|
||||
if self.state.content_h <= self.state.viewport_h {
|
||||
return None;
|
||||
}
|
||||
let visible_bottom = self.state.scroll.saturating_add(self.state.viewport_h) as f32;
|
||||
let pct = (visible_bottom / self.state.content_h as f32 * 100.0).round();
|
||||
Some(pct.clamp(0.0, 100.0) as u8)
|
||||
}
|
||||
|
||||
fn max_scroll(&self) -> u16 {
|
||||
self.state.content_h.saturating_sub(self.state.viewport_h)
|
||||
}
|
||||
|
||||
fn rewrap(&mut self, width: u16) {
|
||||
if width == 0 {
|
||||
self.wrapped = self.raw.clone();
|
||||
self.state.content_h = self.wrapped.len() as u16;
|
||||
return;
|
||||
}
|
||||
let max_cols = width as usize;
|
||||
let mut out: Vec<String> = Vec::new();
|
||||
let mut out_idx: Vec<usize> = Vec::new();
|
||||
for (raw_idx, raw) in self.raw.iter().enumerate() {
|
||||
// Normalize tabs for width accounting (MVP: 4 spaces).
|
||||
let raw = raw.replace('\t', " ");
|
||||
if raw.is_empty() {
|
||||
out.push(String::new());
|
||||
out_idx.push(raw_idx);
|
||||
continue;
|
||||
}
|
||||
let mut line = String::new();
|
||||
let mut line_cols = 0usize;
|
||||
let mut last_soft_idx: Option<usize> = None; // last whitespace or punctuation break
|
||||
for (_i, ch) in raw.char_indices() {
|
||||
if ch == '\n' {
|
||||
out.push(std::mem::take(&mut line));
|
||||
out_idx.push(raw_idx);
|
||||
line_cols = 0;
|
||||
last_soft_idx = None;
|
||||
continue;
|
||||
}
|
||||
let w = UnicodeWidthChar::width(ch).unwrap_or(0);
|
||||
if line_cols.saturating_add(w) > max_cols {
|
||||
if let Some(split) = last_soft_idx {
|
||||
let (prefix, rest) = line.split_at(split);
|
||||
out.push(prefix.trim_end().to_string());
|
||||
out_idx.push(raw_idx);
|
||||
line = rest.trim_start().to_string();
|
||||
last_soft_idx = None;
|
||||
// retry add current ch now that line may be shorter
|
||||
} else if !line.is_empty() {
|
||||
out.push(std::mem::take(&mut line));
|
||||
out_idx.push(raw_idx);
|
||||
}
|
||||
}
|
||||
if ch.is_whitespace()
|
||||
|| matches!(
|
||||
ch,
|
||||
',' | ';' | '.' | ':' | ')' | ']' | '}' | '|' | '/' | '?' | '!' | '-' | '_'
|
||||
)
|
||||
{
|
||||
last_soft_idx = Some(line.len());
|
||||
}
|
||||
line.push(ch);
|
||||
line_cols = UnicodeWidthStr::width(line.as_str());
|
||||
}
|
||||
if !line.is_empty() {
|
||||
out.push(line);
|
||||
out_idx.push(raw_idx);
|
||||
}
|
||||
}
|
||||
self.wrapped = out;
|
||||
self.wrapped_src_idx = out_idx;
|
||||
self.state.content_h = self.wrapped.len() as u16;
|
||||
}
|
||||
}
|
||||
1048
codex-rs/cloud-tasks/src/ui.rs
Normal file
1048
codex-rs/cloud-tasks/src/ui.rs
Normal file
File diff suppressed because it is too large
Load Diff
93
codex-rs/cloud-tasks/src/util.rs
Normal file
93
codex-rs/cloud-tasks/src/util.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use base64::Engine as _;
|
||||
use chrono::Utc;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
pub fn set_user_agent_suffix(suffix: &str) {
|
||||
if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() {
|
||||
guard.replace(suffix.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_error_log(message: impl AsRef<str>) {
|
||||
let ts = Utc::now().to_rfc3339();
|
||||
if let Ok(mut f) = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open("error.log")
|
||||
{
|
||||
use std::io::Write as _;
|
||||
let _ = writeln!(f, "[{ts}] {}", message.as_ref());
|
||||
}
|
||||
}
|
||||
|
||||
/// Normalize the configured base URL to a canonical form used by the backend client.
|
||||
/// - trims trailing '/'
|
||||
/// - appends '/backend-api' for ChatGPT hosts when missing
|
||||
pub fn normalize_base_url(input: &str) -> String {
|
||||
let mut base_url = input.to_string();
|
||||
while base_url.ends_with('/') {
|
||||
base_url.pop();
|
||||
}
|
||||
if (base_url.starts_with("https://chatgpt.com")
|
||||
|| base_url.starts_with("https://chat.openai.com"))
|
||||
&& !base_url.contains("/backend-api")
|
||||
{
|
||||
base_url = format!("{base_url}/backend-api");
|
||||
}
|
||||
base_url
|
||||
}
|
||||
|
||||
/// Extract the ChatGPT account id from a JWT token, when present.
|
||||
pub fn extract_chatgpt_account_id(token: &str) -> Option<String> {
|
||||
let mut parts = token.split('.');
|
||||
let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
|
||||
_ => return None,
|
||||
};
|
||||
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.decode(payload_b64)
|
||||
.ok()?;
|
||||
let v: serde_json::Value = serde_json::from_slice(&payload_bytes).ok()?;
|
||||
v.get("https://api.openai.com/auth")
|
||||
.and_then(|auth| auth.get("chatgpt_account_id"))
|
||||
.and_then(|id| id.as_str())
|
||||
.map(str::to_string)
|
||||
}
|
||||
|
||||
/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`,
|
||||
/// and optional `ChatGPT-Account-Id`.
|
||||
pub async fn build_chatgpt_headers() -> HeaderMap {
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::header::HeaderName;
|
||||
use reqwest::header::HeaderValue;
|
||||
use reqwest::header::USER_AGENT;
|
||||
|
||||
set_user_agent_suffix("codex_cloud_tasks_tui");
|
||||
let ua = codex_core::default_client::get_codex_user_agent();
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
USER_AGENT,
|
||||
HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")),
|
||||
);
|
||||
if let Ok(home) = codex_core::config::find_codex_home() {
|
||||
let am = codex_login::AuthManager::new(home);
|
||||
if let Some(auth) = am.auth()
|
||||
&& let Ok(tok) = auth.get_token().await
|
||||
&& !tok.is_empty()
|
||||
{
|
||||
let v = format!("Bearer {tok}");
|
||||
if let Ok(hv) = HeaderValue::from_str(&v) {
|
||||
headers.insert(AUTHORIZATION, hv);
|
||||
}
|
||||
if let Some(acc) = auth
|
||||
.get_account_id()
|
||||
.or_else(|| extract_chatgpt_account_id(&tok))
|
||||
&& let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id")
|
||||
&& let Ok(hv) = HeaderValue::from_str(&acc)
|
||||
{
|
||||
headers.insert(name, hv);
|
||||
}
|
||||
}
|
||||
}
|
||||
headers
|
||||
}
|
||||
22
codex-rs/cloud-tasks/tests/env_filter.rs
Normal file
22
codex-rs/cloud-tasks/tests/env_filter.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
use codex_cloud_tasks_client::CloudBackend;
|
||||
use codex_cloud_tasks_client::MockClient;
|
||||
|
||||
#[tokio::test]
|
||||
async fn mock_backend_varies_by_env() {
|
||||
let client = MockClient;
|
||||
|
||||
let root = CloudBackend::list_tasks(&client, None).await.unwrap();
|
||||
assert!(root.iter().any(|t| t.title.contains("Update README")));
|
||||
|
||||
let a = CloudBackend::list_tasks(&client, Some("env-A"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(a.len(), 1);
|
||||
assert_eq!(a[0].title, "A: First");
|
||||
|
||||
let b = CloudBackend::list_tasks(&client, Some("env-B"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(b.len(), 2);
|
||||
assert!(b[0].title.starts_with("B: "));
|
||||
}
|
||||
0
codex-rs/code
Normal file
0
codex-rs/code
Normal file
17
codex-rs/codex-backend-openapi-models/Cargo.toml
Normal file
17
codex-rs/codex-backend-openapi-models/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "codex-backend-openapi-models"
|
||||
version = { workspace = true }
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
name = "codex_backend_openapi_models"
|
||||
path = "src/lib.rs"
|
||||
|
||||
# Important: generated code often violates our workspace lints.
|
||||
# Allow unwrap/expect in this crate so the workspace builds cleanly
|
||||
# after models are regenerated.
|
||||
# Lint overrides are applied in src/lib.rs via crate attributes
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
6
codex-rs/codex-backend-openapi-models/src/lib.rs
Normal file
6
codex-rs/codex-backend-openapi-models/src/lib.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
// Re-export generated OpenAPI models.
|
||||
// The regen script populates `src/models/*.rs` and writes `src/models/mod.rs`.
|
||||
// This module intentionally contains no hand-written types.
|
||||
pub mod models;
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct CodeTaskDetailsResponse {
|
||||
#[serde(rename = "task")]
|
||||
pub task: Box<models::TaskResponse>,
|
||||
#[serde(rename = "current_user_turn", skip_serializing_if = "Option::is_none")]
|
||||
pub current_user_turn: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(
|
||||
rename = "current_assistant_turn",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub current_assistant_turn: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(
|
||||
rename = "current_diff_task_turn",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub current_diff_task_turn: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
impl CodeTaskDetailsResponse {
|
||||
pub fn new(task: models::TaskResponse) -> CodeTaskDetailsResponse {
|
||||
CodeTaskDetailsResponse {
|
||||
task: Box::new(task),
|
||||
current_user_turn: None,
|
||||
current_assistant_turn: None,
|
||||
current_diff_task_turn: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ExternalPullRequestResponse {
|
||||
#[serde(rename = "id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "assistant_turn_id")]
|
||||
pub assistant_turn_id: String,
|
||||
#[serde(rename = "pull_request")]
|
||||
pub pull_request: Box<models::GitPullRequest>,
|
||||
#[serde(rename = "codex_updated_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub codex_updated_sha: Option<String>,
|
||||
}
|
||||
|
||||
impl ExternalPullRequestResponse {
|
||||
pub fn new(
|
||||
id: String,
|
||||
assistant_turn_id: String,
|
||||
pull_request: models::GitPullRequest,
|
||||
) -> ExternalPullRequestResponse {
|
||||
ExternalPullRequestResponse {
|
||||
id,
|
||||
assistant_turn_id,
|
||||
pull_request: Box::new(pull_request),
|
||||
codex_updated_sha: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct GitPullRequest {
|
||||
#[serde(rename = "number")]
|
||||
pub number: i32,
|
||||
#[serde(rename = "url")]
|
||||
pub url: String,
|
||||
#[serde(rename = "state")]
|
||||
pub state: String,
|
||||
#[serde(rename = "merged")]
|
||||
pub merged: bool,
|
||||
#[serde(rename = "mergeable")]
|
||||
pub mergeable: bool,
|
||||
#[serde(rename = "draft", skip_serializing_if = "Option::is_none")]
|
||||
pub draft: Option<bool>,
|
||||
#[serde(rename = "title", skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
#[serde(rename = "body", skip_serializing_if = "Option::is_none")]
|
||||
pub body: Option<String>,
|
||||
#[serde(rename = "base", skip_serializing_if = "Option::is_none")]
|
||||
pub base: Option<String>,
|
||||
#[serde(rename = "head", skip_serializing_if = "Option::is_none")]
|
||||
pub head: Option<String>,
|
||||
#[serde(rename = "base_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub base_sha: Option<String>,
|
||||
#[serde(rename = "head_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub head_sha: Option<String>,
|
||||
#[serde(rename = "merge_commit_sha", skip_serializing_if = "Option::is_none")]
|
||||
pub merge_commit_sha: Option<String>,
|
||||
#[serde(rename = "comments", skip_serializing_if = "Option::is_none")]
|
||||
pub comments: Option<serde_json::Value>,
|
||||
#[serde(rename = "diff", skip_serializing_if = "Option::is_none")]
|
||||
pub diff: Option<serde_json::Value>,
|
||||
#[serde(rename = "user", skip_serializing_if = "Option::is_none")]
|
||||
pub user: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl GitPullRequest {
|
||||
pub fn new(
|
||||
number: i32,
|
||||
url: String,
|
||||
state: String,
|
||||
merged: bool,
|
||||
mergeable: bool,
|
||||
) -> GitPullRequest {
|
||||
GitPullRequest {
|
||||
number,
|
||||
url,
|
||||
state,
|
||||
merged,
|
||||
mergeable,
|
||||
draft: None,
|
||||
title: None,
|
||||
body: None,
|
||||
base: None,
|
||||
head: None,
|
||||
base_sha: None,
|
||||
head_sha: None,
|
||||
merge_commit_sha: None,
|
||||
comments: None,
|
||||
diff: None,
|
||||
user: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
22
codex-rs/codex-backend-openapi-models/src/models/mod.rs
Normal file
22
codex-rs/codex-backend-openapi-models/src/models/mod.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
// Curated minimal export list for current workspace usage.
|
||||
// NOTE: This file was previously auto-generated by the OpenAPI generator.
|
||||
// Currently export only the types referenced by the workspace
|
||||
// The process for this will change
|
||||
|
||||
pub mod code_task_details_response;
|
||||
pub use self::code_task_details_response::CodeTaskDetailsResponse;
|
||||
|
||||
pub mod task_response;
|
||||
pub use self::task_response::TaskResponse;
|
||||
|
||||
pub mod external_pull_request_response;
|
||||
pub use self::external_pull_request_response::ExternalPullRequestResponse;
|
||||
|
||||
pub mod git_pull_request;
|
||||
pub use self::git_pull_request::GitPullRequest;
|
||||
|
||||
pub mod task_list_item;
|
||||
pub use self::task_list_item::TaskListItem;
|
||||
|
||||
pub mod paginated_list_task_list_item_;
|
||||
pub use self::paginated_list_task_list_item_::PaginatedListTaskListItem;
|
||||
@@ -0,0 +1,30 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct PaginatedListTaskListItem {
|
||||
#[serde(rename = "items")]
|
||||
pub items: Vec<models::TaskListItem>,
|
||||
#[serde(rename = "cursor", skip_serializing_if = "Option::is_none")]
|
||||
pub cursor: Option<String>,
|
||||
}
|
||||
|
||||
impl PaginatedListTaskListItem {
|
||||
pub fn new(items: Vec<models::TaskListItem>) -> PaginatedListTaskListItem {
|
||||
PaginatedListTaskListItem {
|
||||
items,
|
||||
cursor: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct TaskListItem {
|
||||
#[serde(rename = "id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "title")]
|
||||
pub title: String,
|
||||
#[serde(
|
||||
rename = "has_generated_title",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub has_generated_title: Option<bool>,
|
||||
#[serde(rename = "updated_at", skip_serializing_if = "Option::is_none")]
|
||||
pub updated_at: Option<f64>,
|
||||
#[serde(rename = "created_at", skip_serializing_if = "Option::is_none")]
|
||||
pub created_at: Option<f64>,
|
||||
#[serde(
|
||||
rename = "task_status_display",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub task_status_display: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(rename = "archived")]
|
||||
pub archived: bool,
|
||||
#[serde(rename = "has_unread_turn")]
|
||||
pub has_unread_turn: bool,
|
||||
#[serde(rename = "pull_requests", skip_serializing_if = "Option::is_none")]
|
||||
pub pull_requests: Option<Vec<models::ExternalPullRequestResponse>>,
|
||||
}
|
||||
|
||||
impl TaskListItem {
|
||||
pub fn new(
|
||||
id: String,
|
||||
title: String,
|
||||
has_generated_title: Option<bool>,
|
||||
archived: bool,
|
||||
has_unread_turn: bool,
|
||||
) -> TaskListItem {
|
||||
TaskListItem {
|
||||
id,
|
||||
title,
|
||||
has_generated_title,
|
||||
updated_at: None,
|
||||
created_at: None,
|
||||
task_status_display: None,
|
||||
archived,
|
||||
has_unread_turn,
|
||||
pull_requests: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* codex-backend
|
||||
*
|
||||
* codex-backend
|
||||
*
|
||||
* The version of the OpenAPI document: 0.0.1
|
||||
*
|
||||
* Generated by: https://openapi-generator.tech
|
||||
*/
|
||||
|
||||
use crate::models;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct TaskResponse {
|
||||
#[serde(rename = "id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "created_at", skip_serializing_if = "Option::is_none")]
|
||||
pub created_at: Option<f64>,
|
||||
#[serde(rename = "title")]
|
||||
pub title: String,
|
||||
#[serde(
|
||||
rename = "has_generated_title",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub has_generated_title: Option<bool>,
|
||||
#[serde(rename = "current_turn_id", skip_serializing_if = "Option::is_none")]
|
||||
pub current_turn_id: Option<String>,
|
||||
#[serde(rename = "has_unread_turn", skip_serializing_if = "Option::is_none")]
|
||||
pub has_unread_turn: Option<bool>,
|
||||
#[serde(
|
||||
rename = "denormalized_metadata",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub denormalized_metadata: Option<std::collections::HashMap<String, serde_json::Value>>,
|
||||
#[serde(rename = "archived")]
|
||||
pub archived: bool,
|
||||
#[serde(rename = "external_pull_requests")]
|
||||
pub external_pull_requests: Vec<models::ExternalPullRequestResponse>,
|
||||
}
|
||||
|
||||
impl TaskResponse {
|
||||
pub fn new(
|
||||
id: String,
|
||||
title: String,
|
||||
archived: bool,
|
||||
external_pull_requests: Vec<models::ExternalPullRequestResponse>,
|
||||
) -> TaskResponse {
|
||||
TaskResponse {
|
||||
id,
|
||||
created_at: None,
|
||||
title,
|
||||
has_generated_title: None,
|
||||
current_turn_id: None,
|
||||
has_unread_turn: None,
|
||||
denormalized_metadata: None,
|
||||
archived,
|
||||
external_pull_requests,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,11 +7,11 @@ version = { workspace = true }
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4", features = ["derive", "wrap_help"], optional = true }
|
||||
codex-core = { path = "../core" }
|
||||
codex-protocol = { path = "../protocol" }
|
||||
serde = { version = "1", optional = true }
|
||||
toml = { version = "0.9", optional = true }
|
||||
clap = { workspace = true, features = ["derive", "wrap_help"], optional = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
toml = { workspace = true, optional = true }
|
||||
|
||||
[features]
|
||||
# Separate feature so that `clap` is not a mandatory dependency.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user