mirror of
https://github.com/openai/codex.git
synced 2026-02-02 15:03:38 +00:00
Compare commits
9 Commits
maxj/threa
...
patch-1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5ebfafab97 | ||
|
|
4ce63c70a8 | ||
|
|
e201a3ea55 | ||
|
|
e9abecfb68 | ||
|
|
286cb2a021 | ||
|
|
8c3a2b1302 | ||
|
|
8258ad88a0 | ||
|
|
3de1e54474 | ||
|
|
2d3387169c |
@@ -1,3 +0,0 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
46
.bazelrc
46
.bazelrc
@@ -1,46 +0,0 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
startup --experimental_remote_repo_contents_cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
common --@rules_cc//cc/toolchains/args/archiver_flags:use_libtool_on_macos=False
|
||||
common --@toolchains_llvm_bootstrapped//config:experimental_stub_libgcc_s
|
||||
|
||||
# We need to use the sh toolchain on windows so we don't send host bash paths to the linux executor.
|
||||
common:windows --@rules_rust//rust/settings:experimental_use_sh_toolchain_for_bootstrap_process_wrapper
|
||||
|
||||
# TODO(zbarsky): rules_rust doesn't implement this flag properly with remote exec...
|
||||
# common --@rules_rust//rust/settings:pipelined_compilation
|
||||
|
||||
common --incompatible_strict_action_env
|
||||
# Not ideal, but We need to allow dotslash to be found
|
||||
common --test_env=PATH=/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
|
||||
common --test_output=errors
|
||||
common --bes_results_url=https://app.buildbuddy.io/invocation/
|
||||
common --bes_backend=grpcs://remote.buildbuddy.io
|
||||
common --remote_cache=grpcs://remote.buildbuddy.io
|
||||
common --remote_download_toplevel
|
||||
common --nobuild_runfile_links
|
||||
common --remote_timeout=3600
|
||||
common --noexperimental_throttle_remote_action_building
|
||||
common --experimental_remote_execution_keepalive
|
||||
common --grpc_keepalive_time=30s
|
||||
|
||||
# This limits both in-flight executions and concurrent downloads. Even with high number
|
||||
# of jobs execution will still be limited by CPU cores, so this just pays a bit of
|
||||
# memory in exchange for higher download concurrency.
|
||||
common --jobs=30
|
||||
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
9.0.0
|
||||
@@ -1,3 +1 @@
|
||||
iTerm
|
||||
iTerm2
|
||||
psuedo
|
||||
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
ignore-words-list = ratatui,ser
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
9
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -40,18 +40,11 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: input
|
||||
id: terminal
|
||||
attributes:
|
||||
label: What terminal emulator and version are you using (if applicable)?
|
||||
description: Also note any multiplexer in use (screen / tmux / zellij)
|
||||
description: |
|
||||
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
44
.github/actions/linux-code-sign/action.yml
vendored
44
.github/actions/linux-code-sign/action.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: linux-code-sign
|
||||
description: Sign Linux artifacts with cosign.
|
||||
inputs:
|
||||
target:
|
||||
description: Target triple for the artifacts to sign.
|
||||
required: true
|
||||
artifacts-dir:
|
||||
description: Absolute path to the directory containing built binaries to sign.
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
|
||||
- name: Cosign Linux artifacts
|
||||
shell: bash
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "1"
|
||||
COSIGN_YES: "true"
|
||||
COSIGN_OIDC_CLIENT_ID: "sigstore"
|
||||
COSIGN_OIDC_ISSUER: "https://oauth2.sigstore.dev/auth"
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
dest="${{ inputs.artifacts-dir }}"
|
||||
if [[ ! -d "$dest" ]]; then
|
||||
echo "Destination $dest does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
artifact="${dest}/${binary}"
|
||||
if [[ ! -f "$artifact" ]]; then
|
||||
echo "Binary $artifact not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cosign sign-blob \
|
||||
--yes \
|
||||
--bundle "${artifact}.sigstore" \
|
||||
"$artifact"
|
||||
done
|
||||
246
.github/actions/macos-code-sign/action.yml
vendored
246
.github/actions/macos-code-sign/action.yml
vendored
@@ -1,246 +0,0 @@
|
||||
name: macos-code-sign
|
||||
description: Configure, sign, notarize, and clean up macOS code signing artifacts.
|
||||
inputs:
|
||||
target:
|
||||
description: Rust compilation target triple (e.g. aarch64-apple-darwin).
|
||||
required: true
|
||||
sign-binaries:
|
||||
description: Whether to sign and notarize the macOS binaries.
|
||||
required: false
|
||||
default: "true"
|
||||
sign-dmg:
|
||||
description: Whether to sign and notarize the macOS dmg.
|
||||
required: false
|
||||
default: "true"
|
||||
apple-certificate:
|
||||
description: Base64-encoded Apple signing certificate (P12).
|
||||
required: true
|
||||
apple-certificate-password:
|
||||
description: Password for the signing certificate.
|
||||
required: true
|
||||
apple-notarization-key-p8:
|
||||
description: Base64-encoded Apple notarization key (P8).
|
||||
required: true
|
||||
apple-notarization-key-id:
|
||||
description: Apple notarization key ID.
|
||||
required: true
|
||||
apple-notarization-issuer-id:
|
||||
description: Apple notarization issuer ID.
|
||||
required: true
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: actions
|
||||
APPLE_CERTIFICATE: ${{ inputs.apple-certificate }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ inputs.apple-certificate-password }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
|
||||
echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
|
||||
|
||||
keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
security set-keychain-settings -lut 21600 "$keychain_path"
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
|
||||
keychain_args=()
|
||||
cleanup_keychain() {
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}" || true
|
||||
security default-keychain -s "${keychain_args[0]}" || true
|
||||
else
|
||||
security list-keychains -s || true
|
||||
fi
|
||||
if [[ -f "$keychain_path" ]]; then
|
||||
security delete-keychain "$keychain_path" || true
|
||||
fi
|
||||
}
|
||||
|
||||
while IFS= read -r keychain; do
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "$keychain_path" "${keychain_args[@]}"
|
||||
else
|
||||
security list-keychains -s "$keychain_path"
|
||||
fi
|
||||
|
||||
security default-keychain -s "$keychain_path"
|
||||
security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
|
||||
|
||||
codesign_hashes=()
|
||||
while IFS= read -r hash; do
|
||||
[[ -n "$hash" ]] && codesign_hashes+=("$hash")
|
||||
done < <(security find-identity -v -p codesigning "$keychain_path" \
|
||||
| sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
|
||||
| sort -u)
|
||||
|
||||
if ((${#codesign_hashes[@]} == 0)); then
|
||||
echo "No signing identities found in $keychain_path"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ((${#codesign_hashes[@]} > 1)); then
|
||||
echo "Multiple signing identities found in $keychain_path:"
|
||||
printf ' %s\n' "${codesign_hashes[@]}"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
|
||||
|
||||
rm -f "$cert_path"
|
||||
|
||||
echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- name: Sign macOS binaries
|
||||
if: ${{ inputs.sign-binaries == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
|
||||
echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
path="codex-rs/target/${{ inputs.target }}/release/${binary}"
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- name: Notarize macOS binaries
|
||||
if: ${{ inputs.sign-binaries == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
source "$GITHUB_ACTION_PATH/notary_helpers.sh"
|
||||
|
||||
notarize_binary() {
|
||||
local binary="$1"
|
||||
local source_path="codex-rs/target/${{ inputs.target }}/release/${binary}"
|
||||
local archive_path="${RUNNER_TEMP}/${binary}.zip"
|
||||
|
||||
if [[ ! -f "$source_path" ]]; then
|
||||
echo "Binary $source_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$archive_path"
|
||||
ditto -c -k --keepParent "$source_path" "$archive_path"
|
||||
|
||||
notarize_submission "$binary" "$archive_path" "$notary_key_path"
|
||||
}
|
||||
|
||||
notarize_binary "codex"
|
||||
notarize_binary "codex-responses-api-proxy"
|
||||
|
||||
- name: Sign and notarize macOS dmg
|
||||
if: ${{ inputs.sign-dmg == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ inputs.apple-notarization-key-p8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ inputs.apple-notarization-key-id }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ inputs.apple-notarization-issuer-id }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_CODESIGN_IDENTITY APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
source "$GITHUB_ACTION_PATH/notary_helpers.sh"
|
||||
|
||||
dmg_path="codex-rs/target/${{ inputs.target }}/release/codex-${{ inputs.target }}.dmg"
|
||||
|
||||
if [[ ! -f "$dmg_path" ]]; then
|
||||
echo "dmg $dmg_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
codesign --force --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$dmg_path"
|
||||
notarize_submission "codex-${{ inputs.target }}.dmg" "$dmg_path" "$notary_key_path"
|
||||
xcrun stapler staple "$dmg_path"
|
||||
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
|
||||
keychain_args=()
|
||||
while IFS= read -r keychain; do
|
||||
[[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}"
|
||||
security default-keychain -s "${keychain_args[0]}"
|
||||
fi
|
||||
|
||||
if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
|
||||
security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
|
||||
fi
|
||||
fi
|
||||
@@ -1,46 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
notarize_submission() {
|
||||
local label="$1"
|
||||
local path="$2"
|
||||
local notary_key_path="$3"
|
||||
|
||||
if [[ -z "${APPLE_NOTARIZATION_KEY_ID:-}" || -z "${APPLE_NOTARIZATION_ISSUER_ID:-}" ]]; then
|
||||
echo "APPLE_NOTARIZATION_KEY_ID and APPLE_NOTARIZATION_ISSUER_ID are required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$notary_key_path" || ! -f "$notary_key_path" ]]; then
|
||||
echo "Notary key file $notary_key_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$path" ]]; then
|
||||
echo "Notarization payload $path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local submission_json
|
||||
submission_json=$(xcrun notarytool submit "$path" \
|
||||
--key "$notary_key_path" \
|
||||
--key-id "$APPLE_NOTARIZATION_KEY_ID" \
|
||||
--issuer "$APPLE_NOTARIZATION_ISSUER_ID" \
|
||||
--output-format json \
|
||||
--wait)
|
||||
|
||||
local status submission_id
|
||||
status=$(printf '%s\n' "$submission_json" | jq -r '.status // "Unknown"')
|
||||
submission_id=$(printf '%s\n' "$submission_json" | jq -r '.id // ""')
|
||||
|
||||
if [[ -z "$submission_id" ]]; then
|
||||
echo "Failed to retrieve submission ID for $label"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "::notice title=Notarization::$label submission ${submission_id} completed with status ${status}"
|
||||
|
||||
if [[ "$status" != "Accepted" ]]; then
|
||||
echo "Notarization failed for ${label} (submission ${submission_id}, status ${status})"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
57
.github/actions/windows-code-sign/action.yml
vendored
57
.github/actions/windows-code-sign/action.yml
vendored
@@ -1,57 +0,0 @@
|
||||
name: windows-code-sign
|
||||
description: Sign Windows binaries with Azure Trusted Signing.
|
||||
inputs:
|
||||
target:
|
||||
description: Target triple for the artifacts to sign.
|
||||
required: true
|
||||
client-id:
|
||||
description: Azure Trusted Signing client ID.
|
||||
required: true
|
||||
tenant-id:
|
||||
description: Azure tenant ID for Trusted Signing.
|
||||
required: true
|
||||
subscription-id:
|
||||
description: Azure subscription ID for Trusted Signing.
|
||||
required: true
|
||||
endpoint:
|
||||
description: Azure Trusted Signing endpoint.
|
||||
required: true
|
||||
account-name:
|
||||
description: Azure Trusted Signing account name.
|
||||
required: true
|
||||
certificate-profile-name:
|
||||
description: Certificate profile name for signing.
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Azure login for Trusted Signing (OIDC)
|
||||
uses: azure/login@v2
|
||||
with:
|
||||
client-id: ${{ inputs.client-id }}
|
||||
tenant-id: ${{ inputs.tenant-id }}
|
||||
subscription-id: ${{ inputs.subscription-id }}
|
||||
|
||||
- name: Sign Windows binaries with Azure Trusted Signing
|
||||
uses: azure/trusted-signing-action@v0
|
||||
with:
|
||||
endpoint: ${{ inputs.endpoint }}
|
||||
trusted-signing-account-name: ${{ inputs.account-name }}
|
||||
certificate-profile-name: ${{ inputs.certificate-profile-name }}
|
||||
exclude-environment-credential: true
|
||||
exclude-workload-identity-credential: true
|
||||
exclude-managed-identity-credential: true
|
||||
exclude-shared-token-cache-credential: true
|
||||
exclude-visual-studio-credential: true
|
||||
exclude-visual-studio-code-credential: true
|
||||
exclude-azure-cli-credential: false
|
||||
exclude-azure-powershell-credential: true
|
||||
exclude-azure-developer-cli-credential: true
|
||||
exclude-interactive-browser-credential: true
|
||||
cache-dependencies: false
|
||||
files: |
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-responses-api-proxy.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-windows-sandbox-setup.exe
|
||||
${{ github.workspace }}/codex-rs/target/${{ inputs.target }}/release/codex-command-runner.exe
|
||||
BIN
.github/codex-cli-login.png
vendored
Normal file
BIN
.github/codex-cli-login.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.9 MiB |
BIN
.github/codex-cli-permissions.png
vendored
Normal file
BIN
.github/codex-cli-permissions.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 408 KiB |
BIN
.github/codex-cli-splash.png
vendored
BIN
.github/codex-cli-splash.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 818 KiB After Width: | Height: | Size: 3.1 MiB |
2
.github/codex/home/config.toml
vendored
2
.github/codex/home/config.toml
vendored
@@ -1,3 +1,3 @@
|
||||
model = "gpt-5.1"
|
||||
model = "gpt-5"
|
||||
|
||||
# Consider setting [mcp_servers] here!
|
||||
|
||||
BIN
.github/demo.gif
vendored
Normal file
BIN
.github/demo.gif
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 MiB |
24
.github/dotslash-config.json
vendored
24
.github/dotslash-config.json
vendored
@@ -55,30 +55,6 @@
|
||||
"path": "codex-responses-api-proxy.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-command-runner": {
|
||||
"platforms": {
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-command-runner-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-command-runner.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-command-runner-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-command-runner.exe"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codex-windows-sandbox-setup": {
|
||||
"platforms": {
|
||||
"windows-x86_64": {
|
||||
"regex": "^codex-windows-sandbox-setup-x86_64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-windows-sandbox-setup.exe"
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"regex": "^codex-windows-sandbox-setup-aarch64-pc-windows-msvc\\.exe\\.zst$",
|
||||
"path": "codex-windows-sandbox-setup.exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
163
.github/scripts/install-musl-build-tools.sh
vendored
163
.github/scripts/install-musl-build-tools.sh
vendored
@@ -1,163 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
: "${TARGET:?TARGET environment variable is required}"
|
||||
: "${GITHUB_ENV:?GITHUB_ENV environment variable is required}"
|
||||
|
||||
apt_update_args=()
|
||||
if [[ -n "${APT_UPDATE_ARGS:-}" ]]; then
|
||||
# shellcheck disable=SC2206
|
||||
apt_update_args=(${APT_UPDATE_ARGS})
|
||||
fi
|
||||
|
||||
apt_install_args=()
|
||||
if [[ -n "${APT_INSTALL_ARGS:-}" ]]; then
|
||||
# shellcheck disable=SC2206
|
||||
apt_install_args=(${APT_INSTALL_ARGS})
|
||||
fi
|
||||
|
||||
sudo apt-get update "${apt_update_args[@]}"
|
||||
sudo apt-get install -y "${apt_install_args[@]}" musl-tools pkg-config g++ clang libc++-dev libc++abi-dev lld
|
||||
|
||||
case "${TARGET}" in
|
||||
x86_64-unknown-linux-musl)
|
||||
arch="x86_64"
|
||||
;;
|
||||
aarch64-unknown-linux-musl)
|
||||
arch="aarch64"
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected musl target: ${TARGET}" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Use the musl toolchain as the Rust linker to avoid Zig injecting its own CRT.
|
||||
if command -v "${arch}-linux-musl-gcc" >/dev/null; then
|
||||
musl_linker="$(command -v "${arch}-linux-musl-gcc")"
|
||||
elif command -v musl-gcc >/dev/null; then
|
||||
musl_linker="$(command -v musl-gcc)"
|
||||
else
|
||||
echo "musl gcc not found after install; arch=${arch}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
zig_target="${TARGET/-unknown-linux-musl/-linux-musl}"
|
||||
runner_temp="${RUNNER_TEMP:-/tmp}"
|
||||
tool_root="${runner_temp}/codex-musl-tools-${TARGET}"
|
||||
mkdir -p "${tool_root}"
|
||||
|
||||
sysroot=""
|
||||
if command -v zig >/dev/null; then
|
||||
zig_bin="$(command -v zig)"
|
||||
cc="${tool_root}/zigcc"
|
||||
cxx="${tool_root}/zigcxx"
|
||||
|
||||
cat >"${cc}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
# Drop any explicit --target/-target flags. Zig expects -target and
|
||||
# rejects Rust triples like *-unknown-linux-musl.
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" cc -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
cat >"${cxx}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" c++ -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
chmod +x "${cc}" "${cxx}"
|
||||
|
||||
sysroot="$("${zig_bin}" cc -target "${zig_target}" -print-sysroot 2>/dev/null || true)"
|
||||
else
|
||||
cc="${musl_linker}"
|
||||
|
||||
if command -v "${arch}-linux-musl-g++" >/dev/null; then
|
||||
cxx="$(command -v "${arch}-linux-musl-g++")"
|
||||
elif command -v musl-g++ >/dev/null; then
|
||||
cxx="$(command -v musl-g++)"
|
||||
else
|
||||
cxx="${cc}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${sysroot}" && "${sysroot}" != "/" ]]; then
|
||||
echo "BORING_BSSL_SYSROOT=${sysroot}" >> "$GITHUB_ENV"
|
||||
boring_sysroot_var="BORING_BSSL_SYSROOT_${TARGET}"
|
||||
boring_sysroot_var="${boring_sysroot_var//-/_}"
|
||||
echo "${boring_sysroot_var}=${sysroot}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
cflags="-pthread"
|
||||
cxxflags="-pthread"
|
||||
if [[ "${TARGET}" == "aarch64-unknown-linux-musl" ]]; then
|
||||
# BoringSSL enables -Wframe-larger-than=25344 under clang and treats warnings as errors.
|
||||
cflags="${cflags} -Wno-error=frame-larger-than"
|
||||
cxxflags="${cxxflags} -Wno-error=frame-larger-than"
|
||||
fi
|
||||
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
echo "CC=${cc}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CC=${cc}" >> "$GITHUB_ENV"
|
||||
target_cc_var="CC_${TARGET}"
|
||||
target_cc_var="${target_cc_var//-/_}"
|
||||
echo "${target_cc_var}=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
target_cxx_var="CXX_${TARGET}"
|
||||
target_cxx_var="${target_cxx_var//-/_}"
|
||||
echo "${target_cxx_var}=${cxx}" >> "$GITHUB_ENV"
|
||||
|
||||
cargo_linker_var="CARGO_TARGET_${TARGET^^}_LINKER"
|
||||
cargo_linker_var="${cargo_linker_var//-/_}"
|
||||
echo "${cargo_linker_var}=${musl_linker}" >> "$GITHUB_ENV"
|
||||
|
||||
echo "CMAKE_C_COMPILER=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_CXX_COMPILER=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_ARGS=-DCMAKE_HAVE_THREADS_LIBRARY=1 -DCMAKE_USE_PTHREADS_INIT=1 -DCMAKE_THREAD_LIBS_INIT=-pthread -DTHREADS_PREFER_PTHREAD_FLAG=ON" >> "$GITHUB_ENV"
|
||||
20
.github/workflows/Dockerfile.bazel
vendored
20
.github/workflows/Dockerfile.bazel
vendored
@@ -1,20 +0,0 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
# TODO(mbolin): Published to docker.io/mbolin491/codex-bazel:latest for
|
||||
# initial debugging, but we should publish to a more proper location.
|
||||
#
|
||||
# docker buildx create --use
|
||||
# docker buildx build --platform linux/amd64,linux/arm64 -f .github/workflows/Dockerfile.bazel -t mbolin491/codex-bazel:latest --push .
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl git python3 ca-certificates && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dotslash.
|
||||
RUN curl -LSfs "https://github.com/facebook/dotslash/releases/download/v0.5.8/dotslash-ubuntu-22.04.$(uname -m).tar.gz" | tar fxz - -C /usr/local/bin
|
||||
|
||||
# Ubuntu 24.04 ships with user 'ubuntu' already created with UID 1000.
|
||||
USER ubuntu
|
||||
|
||||
WORKDIR /workspace
|
||||
110
.github/workflows/bazel.yml
vendored
110
.github/workflows/bazel.yml
vendored
@@ -1,110 +0,0 @@
|
||||
name: Bazel (experimental)
|
||||
|
||||
# Note this workflow was originally derived from:
|
||||
# https://github.com/cerisier/toolchains_llvm_bootstrapped/blob/main/.github/workflows/ci.yaml
|
||||
|
||||
on:
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Cancel previous actions from the same PR or branch except 'main' branch.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency and https://docs.github.com/en/actions/learn-github-actions/contexts for more info.
|
||||
group: concurrency-group::${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }}${{ github.ref_name == 'main' && format('::{0}', github.run_id) || ''}}
|
||||
cancel-in-progress: ${{ github.ref_name != 'main' }}
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# macOS
|
||||
- os: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
|
||||
# Linux
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
# TODO: Enable Windows once we fix the toolchain issues there.
|
||||
#- os: windows-latest
|
||||
# target: x86_64-pc-windows-gnullvm
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Configure a human readable name for each job
|
||||
name: Local Bazel build on ${{ matrix.os }} for ${{ matrix.target }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Make DotSlash available in PATH (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: cp "$(which dotslash)" /usr/local/bin
|
||||
|
||||
- name: Make DotSlash available in PATH (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: Copy-Item (Get-Command dotslash).Source -Destination "$env:LOCALAPPDATA\Microsoft\WindowsApps\dotslash.exe"
|
||||
|
||||
# Install Bazel via Bazelisk
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@v3
|
||||
|
||||
# TODO(mbolin): Bring this back once we have caching working. Currently,
|
||||
# we never seem to get a cache hit but we still end up paying the cost of
|
||||
# uploading at the end of the build, which takes over a minute!
|
||||
#
|
||||
# Cache build and external artifacts so that the next ci build is incremental.
|
||||
# Because github action caches cannot be updated after a build, we need to
|
||||
# store the contents of each build in a unique cache key, then fall back to loading
|
||||
# it on the next ci run. We use hashFiles(...) in the key and restore-keys- with
|
||||
# the prefix to load the most recent cache for the branch on a cache miss. You
|
||||
# should customize the contents of hashFiles to capture any bazel input sources,
|
||||
# although this doesn't need to be perfect. If none of the input sources change
|
||||
# then a cache hit will load an existing cache and bazel won't have to do any work.
|
||||
# In the case of a cache miss, you want the fallback cache to contain most of the
|
||||
# previously built artifacts to minimize build time. The more precise you are with
|
||||
# hashFiles sources the less work bazel will have to do.
|
||||
# - name: Mount bazel caches
|
||||
# uses: actions/cache@v5
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cache/bazel-repo-cache
|
||||
# ~/.cache/bazel-repo-contents-cache
|
||||
# key: bazel-cache-${{ matrix.os }}-${{ hashFiles('**/BUILD.bazel', '**/*.bzl', 'MODULE.bazel') }}
|
||||
# restore-keys: |
|
||||
# bazel-cache-${{ matrix.os }}
|
||||
|
||||
- name: Configure Bazel startup args (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Use a very short path to reduce argv/path length issues.
|
||||
"BAZEL_STARTUP_ARGS=--output_user_root=C:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: bazel test //...
|
||||
env:
|
||||
BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
bazel $BAZEL_STARTUP_ARGS --bazelrc=.github/workflows/ci.bazelrc test //... \
|
||||
--build_metadata=REPO_URL=https://github.com/openai/codex.git \
|
||||
--build_metadata=COMMIT_SHA=$(git rev-parse HEAD) \
|
||||
--build_metadata=ROLE=CI \
|
||||
--build_metadata=VISIBILITY=PUBLIC \
|
||||
"--remote_header=x-buildbuddy-api-key=$BUILDBUDDY_API_KEY"
|
||||
26
.github/workflows/cargo-deny.yml
vendored
26
.github/workflows/cargo-deny.yml
vendored
@@ -1,26 +0,0 @@
|
||||
name: cargo-deny
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
cargo-deny:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./codex-rs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Run cargo-deny
|
||||
uses: EmbarkStudios/cargo-deny-action@v2
|
||||
with:
|
||||
rust-version: stable
|
||||
manifest-path: ./codex-rs/Cargo.toml
|
||||
20
.github/workflows/ci.bazelrc
vendored
20
.github/workflows/ci.bazelrc
vendored
@@ -1,20 +0,0 @@
|
||||
common --remote_download_minimal
|
||||
common --nobuild_runfile_links
|
||||
common --keep_going
|
||||
|
||||
# We prefer to run the build actions entirely remotely so we can dial up the concurrency.
|
||||
# We have platform-specific tests, so we want to execute the tests on all platforms using the strongest sandboxing available on each platform.
|
||||
|
||||
# On linux, we can do a full remote build/test, by targeting the right (x86/arm) runners, so we have coverage of both.
|
||||
# Linux crossbuilds don't work until we untangle the libc constraint mess.
|
||||
common:linux --config=remote
|
||||
common:linux --strategy=remote
|
||||
common:linux --platforms=//:rbe
|
||||
|
||||
# On mac, we can run all the build actions remotely but test actions locally.
|
||||
common:macos --config=remote
|
||||
common:macos --strategy=remote
|
||||
common:macos --strategy=TestRunner=darwin-sandbox,local
|
||||
|
||||
common:windows --strategy=TestRunner=local
|
||||
|
||||
9
.github/workflows/ci.yml
vendored
9
.github/workflows/ci.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
@@ -36,8 +36,7 @@ jobs:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Use a rust-release version that includes all native binaries.
|
||||
CODEX_VERSION=0.74.0
|
||||
CODEX_VERSION=0.40.0
|
||||
OUTPUT_DIR="${RUNNER_TEMP}"
|
||||
python3 ./scripts/stage_npm_packages.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
@@ -47,7 +46,7 @@ jobs:
|
||||
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ steps.stage_npm_package.outputs.pack_output }}
|
||||
|
||||
5
.github/workflows/cla.yml
vendored
5
.github/workflows/cla.yml
vendored
@@ -13,9 +13,6 @@ permissions:
|
||||
|
||||
jobs:
|
||||
cla:
|
||||
# Only run the CLA assistant for the canonical openai repo so forks are not blocked
|
||||
# and contributors who signed previously do not receive duplicate CLA notifications.
|
||||
if: ${{ github.repository_owner == 'openai' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: contributor-assistant/github-action@v2.6.1
|
||||
@@ -46,4 +43,4 @@ jobs:
|
||||
path-to-document: https://github.com/openai/codex/blob/main/docs/CLA.md
|
||||
path-to-signatures: signatures/cla.json
|
||||
branch: cla-signatures
|
||||
allowlist: codex,dependabot,dependabot[bot],github-actions[bot]
|
||||
allowlist: dependabot[bot]
|
||||
|
||||
107
.github/workflows/close-stale-contributor-prs.yml
vendored
107
.github/workflows/close-stale-contributor-prs.yml
vendored
@@ -1,107 +0,0 @@
|
||||
name: Close stale contributor PRs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 6 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
close-stale-contributor-prs:
|
||||
# Prevent scheduled runs on forks
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Close inactive PRs from contributors
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const DAYS_INACTIVE = 14;
|
||||
const cutoff = new Date(Date.now() - DAYS_INACTIVE * 24 * 60 * 60 * 1000);
|
||||
const { owner, repo } = context.repo;
|
||||
const dryRun = false;
|
||||
const stalePrs = [];
|
||||
|
||||
core.info(`Dry run mode: ${dryRun}`);
|
||||
|
||||
const prs = await github.paginate(github.rest.pulls.list, {
|
||||
owner,
|
||||
repo,
|
||||
state: "open",
|
||||
per_page: 100,
|
||||
sort: "updated",
|
||||
direction: "asc",
|
||||
});
|
||||
|
||||
for (const pr of prs) {
|
||||
const lastUpdated = new Date(pr.updated_at);
|
||||
if (lastUpdated > cutoff) {
|
||||
core.info(`PR ${pr.number} is fresh`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pr.user || pr.user.type !== "User") {
|
||||
core.info(`PR ${pr.number} wasn't created by a user`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let permission;
|
||||
try {
|
||||
const permissionResponse = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner,
|
||||
repo,
|
||||
username: pr.user.login,
|
||||
});
|
||||
permission = permissionResponse.data.permission;
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
core.info(`Author ${pr.user.login} is not a collaborator; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const hasContributorAccess = ["admin", "maintain", "write"].includes(permission);
|
||||
if (!hasContributorAccess) {
|
||||
core.info(`Author ${pr.user.login} has ${permission} access; skipping #${pr.number}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
stalePrs.push(pr);
|
||||
}
|
||||
|
||||
if (!stalePrs.length) {
|
||||
core.info("No stale contributor pull requests found.");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const pr of stalePrs) {
|
||||
const issue_number = pr.number;
|
||||
const closeComment = `Closing this pull request because it has had no updates for more than ${DAYS_INACTIVE} days. If you plan to continue working on it, feel free to reopen or open a new PR.`;
|
||||
|
||||
if (dryRun) {
|
||||
core.info(`[dry-run] Would close contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
body: closeComment,
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: issue_number,
|
||||
state: "closed",
|
||||
});
|
||||
|
||||
core.info(`Closed contributor PR #${issue_number} from ${pr.user.login}`);
|
||||
}
|
||||
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v5
|
||||
- name: Annotate locations with typos
|
||||
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
|
||||
- name: Codespell
|
||||
|
||||
6
.github/workflows/issue-deduplicator.yml
vendored
6
.github/workflows/issue-deduplicator.yml
vendored
@@ -9,15 +9,14 @@ on:
|
||||
jobs:
|
||||
gather-duplicates:
|
||||
name: Identify potential duplicates
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate'))
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
@@ -47,6 +46,7 @@ jobs:
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
model: gpt-5
|
||||
prompt: |
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
|
||||
5
.github/workflows/issue-labeler.yml
vendored
5
.github/workflows/issue-labeler.yml
vendored
@@ -9,15 +9,14 @@ on:
|
||||
jobs:
|
||||
gather-labels:
|
||||
name: Generate label suggestions
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label'))
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- id: codex
|
||||
uses: openai/codex-action@main
|
||||
|
||||
288
.github/workflows/rust-ci.yml
vendored
288
.github/workflows/rust-ci.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
codex: ${{ steps.detect.outputs.codex }}
|
||||
workflows: ${{ steps.detect.outputs.workflows }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Detect changed paths (no external action)
|
||||
@@ -28,11 +28,9 @@ jobs:
|
||||
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
BASE_SHA='${{ github.event.pull_request.base.sha }}'
|
||||
HEAD_SHA='${{ github.event.pull_request.head.sha }}'
|
||||
echo "Base SHA: $BASE_SHA"
|
||||
echo "Head SHA: $HEAD_SHA"
|
||||
# List files changed between base and PR head
|
||||
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA" "$HEAD_SHA")
|
||||
# List files changed between base and current HEAD (merge-base aware)
|
||||
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA"...HEAD)
|
||||
else
|
||||
# On push / manual runs, default to running everything
|
||||
files=("codex-rs/force" ".github/force")
|
||||
@@ -58,8 +56,8 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -76,8 +74,8 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -88,7 +86,7 @@ jobs:
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build:
|
||||
name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
# Keep job-level if to avoid spinning up runners when not needed
|
||||
@@ -97,8 +95,8 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
# Speed up repeated builds across CI runs by caching compiled objects.
|
||||
RUSTC_WRAPPER: sccache
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
@@ -106,140 +104,78 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: x86_64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
# Also run representative release builds on Mac and Linux because
|
||||
# there could be release-only build errors we want to catch.
|
||||
# Hopefully this also pre-populates the build cache to speed up
|
||||
# releases.
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: release
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: windows-x64
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Explicit cache restore: split cargo home vs target, so we can
|
||||
# avoid caching the large target dir on the gnu-dev job.
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v5
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
# Install and restore sccache cache
|
||||
- name: Install sccache
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -252,30 +188,17 @@ jobs:
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v5
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Disable sccache wrapper (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "RUSTC_WRAPPER=" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
@@ -287,79 +210,21 @@ jobs:
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Restore APT cache (musl)
|
||||
id: cache_apt_restore
|
||||
uses: actions/cache/restore@v5
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
TARGET: ${{ matrix.target }}
|
||||
APT_UPDATE_ARGS: -o Acquire::Retries=3
|
||||
APT_INSTALL_ARGS: --no-install-recommends
|
||||
shell: bash
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
sudo apt-get -y update -o Acquire::Retries=3
|
||||
sudo apt-get -y install --no-install-recommends musl-tools pkg-config
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
@@ -399,34 +264,30 @@ jobs:
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
@@ -440,7 +301,7 @@ jobs:
|
||||
- name: Save APT cache (musl)
|
||||
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
/var/cache/apt
|
||||
@@ -457,7 +318,7 @@ jobs:
|
||||
|
||||
tests:
|
||||
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
@@ -465,8 +326,7 @@ jobs:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
env:
|
||||
# Speed up repeated builds across CI runs by caching compiled objects (non-Windows).
|
||||
USE_SCCACHE: ${{ startsWith(matrix.runner, 'windows') && 'false' || 'true' }}
|
||||
RUSTC_WRAPPER: sccache
|
||||
CARGO_INCREMENTAL: "0"
|
||||
SCCACHE_CACHE_SIZE: 10G
|
||||
|
||||
@@ -474,77 +334,48 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "hash=$(sha256sum Cargo.lock | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
echo "toolchain_hash=$(sha256sum rust-toolchain.toml | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Restore cargo home cache
|
||||
id: cache_cargo_home_restore
|
||||
uses: actions/cache/restore@v5
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- name: Install sccache
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: sccache
|
||||
version: 0.7.5
|
||||
|
||||
- name: Configure sccache backend
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -557,20 +388,15 @@ jobs:
|
||||
echo "Using sccache local disk + actions/cache fallback"
|
||||
fi
|
||||
|
||||
- name: Enable sccache wrapper
|
||||
if: ${{ env.USE_SCCACHE == 'true' }}
|
||||
shell: bash
|
||||
run: echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Restore sccache cache (fallback)
|
||||
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
if: ${{ env.SCCACHE_GHA_ENABLED != 'true' }}
|
||||
id: cache_sccache_restore
|
||||
uses: actions/cache/restore@v5
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
restore-keys: |
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
@@ -580,38 +406,38 @@ jobs:
|
||||
|
||||
- name: tests
|
||||
id: test
|
||||
continue-on-error: true
|
||||
run: cargo nextest run --all-features --no-fail-fast --target ${{ matrix.target }} --cargo-profile ci-test
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
NEXTEST_STATUS_LEVEL: leak
|
||||
|
||||
- name: Save cargo home cache
|
||||
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ hashFiles('codex-rs/rust-toolchain.toml') }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
if: always() && !cancelled() && env.SCCACHE_GHA_ENABLED != 'true'
|
||||
continue-on-error: true
|
||||
uses: actions/cache/save@v5
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/.sccache/
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
|
||||
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ hashFiles('**/Cargo.lock') }}-${{ github.run_id }}
|
||||
|
||||
- name: sccache stats
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: sccache --show-stats || true
|
||||
|
||||
- name: sccache summary
|
||||
if: always() && env.USE_SCCACHE == 'true'
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
{
|
||||
|
||||
53
.github/workflows/rust-release-prepare.yml
vendored
53
.github/workflows/rust-release-prepare.yml
vendored
@@ -1,53 +0,0 @@
|
||||
name: rust-release-prepare
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 */4 * * *"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
# Prevent scheduled runs on forks (no secrets, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update models.json
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
client_version="99.99.99"
|
||||
terminal_info="github-actions"
|
||||
user_agent="codex_cli_rs/99.99.99 (Linux $(uname -r); $(uname -m)) ${terminal_info}"
|
||||
base_url="${OPENAI_BASE_URL:-https://chatgpt.com/backend-api/codex}"
|
||||
|
||||
headers=(
|
||||
-H "Authorization: Bearer ${OPENAI_API_KEY}"
|
||||
-H "User-Agent: ${user_agent}"
|
||||
)
|
||||
|
||||
url="${base_url%/}/models?client_version=${client_version}"
|
||||
curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/core/models.json
|
||||
|
||||
- name: Open pull request (if changed)
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
with:
|
||||
commit-message: "Update models.json"
|
||||
title: "Update models.json"
|
||||
body: "Automated update of models.json."
|
||||
branch: "bot/update-models-json"
|
||||
reviewers: "pakrym-oai,aibrahim-oai"
|
||||
delete-branch: true
|
||||
482
.github/workflows/rust-release.yml
vendored
482
.github/workflows/rust-release.yml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
tag-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -45,23 +45,11 @@ jobs:
|
||||
echo "✅ Tag and Cargo.toml agree (${tag_ver})"
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Verify config schema fixture
|
||||
shell: bash
|
||||
working-directory: codex-rs
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "If this fails, run: just write-config-schema to overwrite fixture with intentional changes."
|
||||
cargo run -p codex-core --bin codex-write-config-schema
|
||||
git diff --exit-code core/config.schema.json
|
||||
|
||||
build:
|
||||
needs: tag-check
|
||||
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
@@ -88,208 +76,198 @@ jobs:
|
||||
target: aarch64-pc-windows-msvc
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- uses: actions/cache@v5
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Configure Apple code signing
|
||||
shell: bash
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: actions
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
run: |
|
||||
if [[ "${{ contains(matrix.target, 'windows') }}" == 'true' ]]; then
|
||||
cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy --bin codex-windows-sandbox-setup --bin codex-command-runner
|
||||
else
|
||||
cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy
|
||||
set -euo pipefail
|
||||
|
||||
if [[ -z "${APPLE_CERTIFICATE:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- if: ${{ contains(matrix.target, 'linux') }}
|
||||
name: Cosign Linux artifacts
|
||||
uses: ./.github/actions/linux-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
artifacts-dir: ${{ github.workspace }}/codex-rs/target/${{ matrix.target }}/release
|
||||
if [[ -z "${APPLE_CERTIFICATE_PASSWORD:-}" ]]; then
|
||||
echo "APPLE_CERTIFICATE_PASSWORD is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- if: ${{ contains(matrix.target, 'windows') }}
|
||||
name: Sign Windows binaries with Azure Trusted Signing
|
||||
uses: ./.github/actions/windows-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
client-id: ${{ secrets.AZURE_TRUSTED_SIGNING_CLIENT_ID }}
|
||||
tenant-id: ${{ secrets.AZURE_TRUSTED_SIGNING_TENANT_ID }}
|
||||
subscription-id: ${{ secrets.AZURE_TRUSTED_SIGNING_SUBSCRIPTION_ID }}
|
||||
endpoint: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
|
||||
account-name: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
|
||||
certificate-profile-name: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE_NAME }}
|
||||
cert_path="${RUNNER_TEMP}/apple_signing_certificate.p12"
|
||||
echo "$APPLE_CERTIFICATE" | base64 -d > "$cert_path"
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: MacOS code signing (binaries)
|
||||
uses: ./.github/actions/macos-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
sign-binaries: "true"
|
||||
sign-dmg: "false"
|
||||
apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
keychain_path="${RUNNER_TEMP}/codex-signing.keychain-db"
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
security set-keychain-settings -lut 21600 "$keychain_path"
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$keychain_path"
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: Build macOS dmg
|
||||
keychain_args=()
|
||||
cleanup_keychain() {
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}" || true
|
||||
security default-keychain -s "${keychain_args[0]}" || true
|
||||
else
|
||||
security list-keychains -s || true
|
||||
fi
|
||||
if [[ -f "$keychain_path" ]]; then
|
||||
security delete-keychain "$keychain_path" || true
|
||||
fi
|
||||
}
|
||||
|
||||
while IFS= read -r keychain; do
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "$keychain_path" "${keychain_args[@]}"
|
||||
else
|
||||
security list-keychains -s "$keychain_path"
|
||||
fi
|
||||
|
||||
security default-keychain -s "$keychain_path"
|
||||
security import "$cert_path" -k "$keychain_path" -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$keychain_path" > /dev/null
|
||||
|
||||
codesign_hashes=()
|
||||
while IFS= read -r hash; do
|
||||
[[ -n "$hash" ]] && codesign_hashes+=("$hash")
|
||||
done < <(security find-identity -v -p codesigning "$keychain_path" \
|
||||
| sed -n 's/.*\([0-9A-F]\{40\}\).*/\1/p' \
|
||||
| sort -u)
|
||||
|
||||
if ((${#codesign_hashes[@]} == 0)); then
|
||||
echo "No signing identities found in $keychain_path"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ((${#codesign_hashes[@]} > 1)); then
|
||||
echo "Multiple signing identities found in $keychain_path:"
|
||||
printf ' %s\n' "${codesign_hashes[@]}"
|
||||
cleanup_keychain
|
||||
rm -f "$cert_path"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
APPLE_CODESIGN_IDENTITY="${codesign_hashes[0]}"
|
||||
|
||||
rm -f "$cert_path"
|
||||
|
||||
echo "APPLE_CODESIGN_IDENTITY=$APPLE_CODESIGN_IDENTITY" >> "$GITHUB_ENV"
|
||||
echo "APPLE_CODESIGN_KEYCHAIN=$keychain_path" >> "$GITHUB_ENV"
|
||||
echo "::add-mask::$APPLE_CODESIGN_IDENTITY"
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Sign macOS binaries
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
target="${{ matrix.target }}"
|
||||
release_dir="target/${target}/release"
|
||||
dmg_root="${RUNNER_TEMP}/codex-dmg-root"
|
||||
volname="Codex (${target})"
|
||||
dmg_path="${release_dir}/codex-${target}.dmg"
|
||||
|
||||
# The previous "MacOS code signing (binaries)" step signs + notarizes the
|
||||
# built artifacts in `${release_dir}`. This step packages *those same*
|
||||
# signed binaries into a dmg.
|
||||
codex_binary_path="${release_dir}/codex"
|
||||
proxy_binary_path="${release_dir}/codex-responses-api-proxy"
|
||||
|
||||
rm -rf "$dmg_root"
|
||||
mkdir -p "$dmg_root"
|
||||
|
||||
if [[ ! -f "$codex_binary_path" ]]; then
|
||||
echo "Binary $codex_binary_path not found"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$proxy_binary_path" ]]; then
|
||||
echo "Binary $proxy_binary_path not found"
|
||||
if [[ -z "${APPLE_CODESIGN_IDENTITY:-}" ]]; then
|
||||
echo "APPLE_CODESIGN_IDENTITY is required for macOS signing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ditto "$codex_binary_path" "${dmg_root}/codex"
|
||||
ditto "$proxy_binary_path" "${dmg_root}/codex-responses-api-proxy"
|
||||
|
||||
rm -f "$dmg_path"
|
||||
hdiutil create \
|
||||
-volname "$volname" \
|
||||
-srcfolder "$dmg_root" \
|
||||
-format UDZO \
|
||||
-ov \
|
||||
"$dmg_path"
|
||||
|
||||
if [[ ! -f "$dmg_path" ]]; then
|
||||
echo "dmg $dmg_path not found after build"
|
||||
exit 1
|
||||
keychain_args=()
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" && -f "${APPLE_CODESIGN_KEYCHAIN}" ]]; then
|
||||
keychain_args+=(--keychain "${APPLE_CODESIGN_KEYCHAIN}")
|
||||
fi
|
||||
|
||||
- if: ${{ runner.os == 'macOS' }}
|
||||
name: MacOS code signing (dmg)
|
||||
uses: ./.github/actions/macos-code-sign
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
sign-binaries: "false"
|
||||
sign-dmg: "true"
|
||||
apple-certificate: ${{ secrets.APPLE_CERTIFICATE_P12 }}
|
||||
apple-certificate-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
apple-notarization-key-p8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
apple-notarization-key-id: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
apple-notarization-issuer-id: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
for binary in codex codex-responses-api-proxy; do
|
||||
path="target/${{ matrix.target }}/release/${binary}"
|
||||
codesign --force --options runtime --timestamp --sign "$APPLE_CODESIGN_IDENTITY" "${keychain_args[@]}" "$path"
|
||||
done
|
||||
|
||||
- if: ${{ matrix.runner == 'macos-15-xlarge' }}
|
||||
name: Notarize macOS binaries
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_NOTARIZATION_KEY_P8: ${{ secrets.APPLE_NOTARIZATION_KEY_P8 }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
for var in APPLE_NOTARIZATION_KEY_P8 APPLE_NOTARIZATION_KEY_ID APPLE_NOTARIZATION_ISSUER_ID; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "$var is required for notarization"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
notary_key_path="${RUNNER_TEMP}/notarytool.key.p8"
|
||||
echo "$APPLE_NOTARIZATION_KEY_P8" | base64 -d > "$notary_key_path"
|
||||
cleanup_notary() {
|
||||
rm -f "$notary_key_path"
|
||||
}
|
||||
trap cleanup_notary EXIT
|
||||
|
||||
notarize_binary() {
|
||||
local binary="$1"
|
||||
local source_path="target/${{ matrix.target }}/release/${binary}"
|
||||
local archive_path="${RUNNER_TEMP}/${binary}.zip"
|
||||
|
||||
if [[ ! -f "$source_path" ]]; then
|
||||
echo "Binary $source_path not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -f "$archive_path"
|
||||
ditto -c -k --keepParent "$source_path" "$archive_path"
|
||||
|
||||
submission_json=$(xcrun notarytool submit "$archive_path" \
|
||||
--key "$notary_key_path" \
|
||||
--key-id "$APPLE_NOTARIZATION_KEY_ID" \
|
||||
--issuer "$APPLE_NOTARIZATION_ISSUER_ID" \
|
||||
--output-format json \
|
||||
--wait)
|
||||
|
||||
status=$(printf '%s\n' "$submission_json" | jq -r '.status // "Unknown"')
|
||||
submission_id=$(printf '%s\n' "$submission_json" | jq -r '.id // ""')
|
||||
|
||||
if [[ -z "$submission_id" ]]; then
|
||||
echo "Failed to retrieve submission ID for $binary"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "::notice title=Notarization::$binary submission ${submission_id} completed with status ${status}"
|
||||
|
||||
if [[ "$status" != "Accepted" ]]; then
|
||||
echo "Notarization failed for ${binary} (submission ${submission_id}, status ${status})"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
notarize_binary "codex"
|
||||
notarize_binary "codex-responses-api-proxy"
|
||||
|
||||
- name: Stage artifacts
|
||||
shell: bash
|
||||
@@ -300,22 +278,11 @@ jobs:
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-windows-sandbox-setup.exe "$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe"
|
||||
cp target/${{ matrix.target }}/release/codex-command-runner.exe "$dest/codex-command-runner-${{ matrix.target }}.exe"
|
||||
else
|
||||
cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}"
|
||||
fi
|
||||
|
||||
if [[ "${{ matrix.target }}" == *linux* ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex.sigstore "$dest/codex-${{ matrix.target }}.sigstore"
|
||||
cp target/${{ matrix.target }}/release/codex-responses-api-proxy.sigstore "$dest/codex-responses-api-proxy-${{ matrix.target }}.sigstore"
|
||||
fi
|
||||
|
||||
if [[ "${{ matrix.target }}" == *apple-darwin ]]; then
|
||||
cp target/${{ matrix.target }}/release/codex-${{ matrix.target }}.dmg "$dest/codex-${{ matrix.target }}.dmg"
|
||||
fi
|
||||
|
||||
- if: ${{ matrix.runner == 'windows-11-arm' }}
|
||||
name: Install zstd
|
||||
shell: powershell
|
||||
@@ -327,7 +294,6 @@ jobs:
|
||||
# Path that contains the uncompressed binaries for the current
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
repo_root=$PWD
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
@@ -351,12 +317,7 @@ jobs:
|
||||
base="$(basename "$f")"
|
||||
# Skip files that are already archives (shouldn't happen, but be
|
||||
# safe).
|
||||
if [[ "$base" == *.tar.gz || "$base" == *.zip || "$base" == *.dmg ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Don't try to compress signature bundles.
|
||||
if [[ "$base" == *.sigstore ]]; then
|
||||
if [[ "$base" == *.tar.gz || "$base" == *.zip ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
@@ -367,30 +328,7 @@ jobs:
|
||||
# Must run from inside the dest dir so 7z won't
|
||||
# embed the directory path inside the zip.
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
if [[ "$base" == "codex-${{ matrix.target }}.exe" ]]; then
|
||||
# Bundle the sandbox helper binaries into the main codex zip so
|
||||
# WinGet installs include the required helpers next to codex.exe.
|
||||
# Fall back to the single-binary zip if the helpers are missing
|
||||
# to avoid breaking releases.
|
||||
bundle_dir="$(mktemp -d)"
|
||||
runner_src="$dest/codex-command-runner-${{ matrix.target }}.exe"
|
||||
setup_src="$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe"
|
||||
if [[ -f "$runner_src" && -f "$setup_src" ]]; then
|
||||
cp "$dest/$base" "$bundle_dir/$base"
|
||||
cp "$runner_src" "$bundle_dir/codex-command-runner.exe"
|
||||
cp "$setup_src" "$bundle_dir/codex-windows-sandbox-setup.exe"
|
||||
# Use an absolute path so bundle zips land in the real dist
|
||||
# dir even when 7z runs from a temp directory.
|
||||
(cd "$bundle_dir" && 7z a "$repo_root/$dest/${base}.zip" .)
|
||||
else
|
||||
echo "warning: missing sandbox binaries; falling back to single-binary zip"
|
||||
echo "warning: expected $runner_src and $setup_src"
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
rm -rf "$bundle_dir"
|
||||
else
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
|
||||
# Also create .zst (existing behaviour) *and* remove the original
|
||||
@@ -402,7 +340,30 @@ jobs:
|
||||
zstd "${zstd_args[@]}" "$dest/$base"
|
||||
done
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
- name: Remove signing keychain
|
||||
if: ${{ always() && matrix.runner == 'macos-15-xlarge' }}
|
||||
shell: bash
|
||||
env:
|
||||
APPLE_CODESIGN_KEYCHAIN: ${{ env.APPLE_CODESIGN_KEYCHAIN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -n "${APPLE_CODESIGN_KEYCHAIN:-}" ]]; then
|
||||
keychain_args=()
|
||||
while IFS= read -r keychain; do
|
||||
[[ "$keychain" == "$APPLE_CODESIGN_KEYCHAIN" ]] && continue
|
||||
[[ -n "$keychain" ]] && keychain_args+=("$keychain")
|
||||
done < <(security list-keychains | sed 's/^[[:space:]]*//;s/[[:space:]]*$//;s/"//g')
|
||||
if ((${#keychain_args[@]} > 0)); then
|
||||
security list-keychains -s "${keychain_args[@]}"
|
||||
security default-keychain -s "${keychain_args[0]}"
|
||||
fi
|
||||
|
||||
if [[ -f "$APPLE_CODESIGN_KEYCHAIN" ]]; then
|
||||
security delete-keychain "$APPLE_CODESIGN_KEYCHAIN"
|
||||
fi
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
# Upload the per-binary .zst files as well as the new .tar.gz
|
||||
@@ -410,19 +371,8 @@ jobs:
|
||||
path: |
|
||||
codex-rs/dist/${{ matrix.target }}/*
|
||||
|
||||
shell-tool-mcp:
|
||||
name: shell-tool-mcp
|
||||
needs: tag-check
|
||||
uses: ./.github/workflows/shell-tool-mcp.yml
|
||||
with:
|
||||
release-tag: ${{ github.ref_name }}
|
||||
publish: true
|
||||
secrets: inherit
|
||||
|
||||
release:
|
||||
needs:
|
||||
- build
|
||||
- shell-tool-mcp
|
||||
needs: build
|
||||
name: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
@@ -436,47 +386,15 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Generate release notes from tag commit message
|
||||
id: release_notes
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# On tag pushes, GITHUB_SHA may be a tag object for annotated tags;
|
||||
# peel it to the underlying commit.
|
||||
commit="$(git rev-parse "${GITHUB_SHA}^{commit}")"
|
||||
notes_path="${RUNNER_TEMP}/release-notes.md"
|
||||
|
||||
# Use the commit message for the commit the tag points at (not the
|
||||
# annotated tag message).
|
||||
git log -1 --format=%B "${commit}" > "${notes_path}"
|
||||
# Ensure trailing newline so GitHub's markdown renderer doesn't
|
||||
# occasionally run the last line into subsequent content.
|
||||
echo >> "${notes_path}"
|
||||
|
||||
echo "path=${notes_path}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: dist
|
||||
|
||||
- name: List
|
||||
run: ls -R dist/
|
||||
|
||||
# This is a temporary fix: we should modify shell-tool-mcp.yml so these
|
||||
# files do not end up in dist/ in the first place.
|
||||
- name: Delete entries from dist/ that should not go in the release
|
||||
run: |
|
||||
rm -rf dist/shell-tool-mcp*
|
||||
|
||||
ls -R dist/
|
||||
|
||||
- name: Add config schema release asset
|
||||
run: |
|
||||
cp codex-rs/core/config.schema.json dist/config-schema.json
|
||||
|
||||
- name: Define release name
|
||||
id: release_name
|
||||
run: |
|
||||
@@ -510,7 +428,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js for npm packaging
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
@@ -534,7 +452,6 @@ jobs:
|
||||
with:
|
||||
name: ${{ steps.release_name.outputs.name }}
|
||||
tag_name: ${{ github.ref_name }}
|
||||
body_path: ${{ steps.release_notes.outputs.path }}
|
||||
files: dist/**
|
||||
# Mark as prerelease only when the version has a suffix after x.y.z
|
||||
# (e.g. -alpha, -beta). Otherwise publish a normal release.
|
||||
@@ -547,19 +464,6 @@ jobs:
|
||||
tag: ${{ github.ref_name }}
|
||||
config: .github/dotslash-config.json
|
||||
|
||||
- name: Trigger developers.openai.com deploy
|
||||
# Only trigger the deploy if the release is not a pre-release.
|
||||
# The deploy is used to update the developers.openai.com website with the new config schema json file.
|
||||
if: ${{ !contains(steps.release_name.outputs.name, '-') }}
|
||||
continue-on-error: true
|
||||
env:
|
||||
DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL: ${{ secrets.DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL }}
|
||||
run: |
|
||||
if ! curl -sS -f -o /dev/null -X POST "$DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL"; then
|
||||
echo "::warning title=developers.openai.com deploy hook failed::Vercel deploy hook POST failed for ${GITHUB_REF_NAME}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Publish to npm using OIDC authentication.
|
||||
# July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
|
||||
# npm docs: https://docs.npmjs.com/trusted-publishers
|
||||
@@ -575,7 +479,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
6
.github/workflows/sdk.yml
vendored
6
.github/workflows/sdk.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
@@ -19,12 +19,12 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
48
.github/workflows/shell-tool-mcp-ci.yml
vendored
@@ -1,48 +0,0 @@
|
||||
name: shell-tool-mcp CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
pull_request:
|
||||
paths:
|
||||
- "shell-tool-mcp/**"
|
||||
- ".github/workflows/shell-tool-mcp-ci.yml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Format check
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run format
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp test
|
||||
|
||||
- name: Build
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
411
.github/workflows/shell-tool-mcp.yml
vendored
411
.github/workflows/shell-tool-mcp.yml
vendored
@@ -1,411 +0,0 @@
|
||||
name: shell-tool-mcp
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
release-version:
|
||||
description: Version to publish (x.y.z or x.y.z-alpha.N). Defaults to GITHUB_REF_NAME when it starts with rust-v.
|
||||
required: false
|
||||
type: string
|
||||
release-tag:
|
||||
description: Tag name to use when downloading release artifacts (defaults to rust-v<version>).
|
||||
required: false
|
||||
type: string
|
||||
publish:
|
||||
description: Whether to publish to npm when the version is releasable.
|
||||
required: false
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
NODE_VERSION: 22
|
||||
|
||||
jobs:
|
||||
metadata:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.compute.outputs.version }}
|
||||
release_tag: ${{ steps.compute.outputs.release_tag }}
|
||||
should_publish: ${{ steps.compute.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.compute.outputs.npm_tag }}
|
||||
steps:
|
||||
- name: Compute version and tags
|
||||
id: compute
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
version="${{ inputs.release-version }}"
|
||||
release_tag="${{ inputs.release-tag }}"
|
||||
|
||||
if [[ -z "$version" ]]; then
|
||||
if [[ -n "$release_tag" && "$release_tag" =~ ^rust-v.+ ]]; then
|
||||
version="${release_tag#rust-v}"
|
||||
elif [[ "${GITHUB_REF_NAME:-}" =~ ^rust-v.+ ]]; then
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
release_tag="${GITHUB_REF_NAME}"
|
||||
else
|
||||
echo "release-version is required when GITHUB_REF_NAME is not a rust-v tag."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "$release_tag" ]]; then
|
||||
release_tag="rust-v${version}"
|
||||
fi
|
||||
|
||||
npm_tag=""
|
||||
should_publish="false"
|
||||
if [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
elif [[ "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
should_publish="true"
|
||||
npm_tag="alpha"
|
||||
fi
|
||||
|
||||
echo "version=${version}" >> "$GITHUB_OUTPUT"
|
||||
echo "release_tag=${release_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=${npm_tag}" >> "$GITHUB_OUTPUT"
|
||||
echo "should_publish=${should_publish}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
rust-binaries:
|
||||
name: Build Rust - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- runner: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
install_musl: true
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
install_musl: true
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install musl build dependencies
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
|
||||
- name: Stage exec server binaries
|
||||
run: |
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}"
|
||||
mkdir -p "$dest"
|
||||
cp "target/${{ matrix.target }}/release/codex-exec-mcp-server" "$dest/"
|
||||
cp "target/${{ matrix.target }}/release/codex-execve-wrapper" "$dest/"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-rust-${{ matrix.target }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-linux:
|
||||
name: Build Bash (Linux) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: ubuntu:24.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: ubuntu:22.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: debian:12
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: debian:11
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: arm64v8/ubuntu:24.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: arm64v8/ubuntu:22.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-20.04
|
||||
image: arm64v8/ubuntu:20.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: arm64v8/debian:12
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: arm64v8/debian:11
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
steps:
|
||||
- name: Install build prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
dnf install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
else
|
||||
echo "Unsupported package manager in container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(command -v nproc >/dev/null 2>&1 && nproc || getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
bash-darwin:
|
||||
name: Build Bash (macOS) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-15
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-14
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched Bash
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
|
||||
./configure --without-bash-malloc
|
||||
cores="$(getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/bash/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp bash "$dest/bash"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-bash-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
package:
|
||||
name: Package npm module
|
||||
needs:
|
||||
- metadata
|
||||
- rust-binaries
|
||||
- bash-linux
|
||||
- bash-darwin
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PACKAGE_VERSION: ${{ needs.metadata.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Install JavaScript dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build (shell-tool-mcp)
|
||||
run: pnpm --filter @openai/codex-shell-tool-mcp run build
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Assemble staging directory
|
||||
id: staging
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${STAGING_DIR}"
|
||||
mkdir -p "$staging" "$staging/vendor"
|
||||
cp shell-tool-mcp/README.md "$staging/"
|
||||
cp shell-tool-mcp/package.json "$staging/"
|
||||
cp -R shell-tool-mcp/bin "$staging/"
|
||||
|
||||
found_vendor="false"
|
||||
shopt -s nullglob
|
||||
for vendor_dir in artifacts/*/vendor; do
|
||||
rsync -av "$vendor_dir/" "$staging/vendor/"
|
||||
found_vendor="true"
|
||||
done
|
||||
if [[ "$found_vendor" == "false" ]]; then
|
||||
echo "No vendor payloads were downloaded."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
node - <<'NODE'
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const stagingDir = process.env.STAGING_DIR;
|
||||
const version = process.env.PACKAGE_VERSION;
|
||||
const pkgPath = path.join(stagingDir, "package.json");
|
||||
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
|
||||
pkg.version = version;
|
||||
fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + "\n");
|
||||
NODE
|
||||
|
||||
echo "dir=$staging" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
STAGING_DIR: ${{ runner.temp }}/shell-tool-mcp
|
||||
|
||||
- name: Ensure binaries are executable
|
||||
run: |
|
||||
set -euo pipefail
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
chmod +x \
|
||||
"$staging"/vendor/*/codex-exec-mcp-server \
|
||||
"$staging"/vendor/*/codex-execve-wrapper \
|
||||
"$staging"/vendor/*/bash/*/bash
|
||||
|
||||
- name: Create npm tarball
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p dist/npm
|
||||
staging="${{ steps.staging.outputs.dir }}"
|
||||
pack_info=$(cd "$staging" && npm pack --ignore-scripts --json --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
filename=$(PACK_INFO="$pack_info" node -e 'const data = JSON.parse(process.env.PACK_INFO); console.log(data[0].filename);')
|
||||
mv "dist/npm/${filename}" "dist/npm/codex-shell-tool-mcp-npm-${PACKAGE_VERSION}.tgz"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm/codex-shell-tool-mcp-npm-${{ env.PACKAGE_VERSION }}.tgz
|
||||
if-no-files-found: error
|
||||
|
||||
publish:
|
||||
name: Publish npm package
|
||||
needs:
|
||||
- metadata
|
||||
- package
|
||||
if: ${{ inputs.publish && needs.metadata.outputs.should_publish == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
- name: Download npm tarball
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: codex-shell-tool-mcp-npm
|
||||
path: dist/npm
|
||||
|
||||
- name: Publish to npm
|
||||
env:
|
||||
NPM_TAG: ${{ needs.metadata.outputs.npm_tag }}
|
||||
VERSION: ${{ needs.metadata.outputs.version }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
npm publish "dist/npm/codex-shell-tool-mcp-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -9,7 +9,6 @@ node_modules
|
||||
|
||||
# build
|
||||
dist/
|
||||
bazel-*
|
||||
build/
|
||||
out/
|
||||
storybook-static/
|
||||
@@ -65,9 +64,6 @@ apply_patch/
|
||||
# coverage
|
||||
coverage/
|
||||
|
||||
# personal files
|
||||
personal/
|
||||
|
||||
# os
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
@@ -86,8 +82,3 @@ CHANGELOG.ignore.md
|
||||
# nix related
|
||||
.direnv
|
||||
.envrc
|
||||
|
||||
# Python bytecode files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
config:
|
||||
MD013:
|
||||
line_length: 100
|
||||
|
||||
globs:
|
||||
- "docs/tui-chat-composer.md"
|
||||
19
AGENTS.md
19
AGENTS.md
@@ -11,17 +11,15 @@ In the codex-rs folder where the rust code lives:
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When possible, make `match` statements exhaustive and avoid wildcard arms.
|
||||
- Do not use unsigned integer even if the number cannot be negative.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after you have finished making Rust code changes; do not ask for approval to run it. Additionally, run the tests:
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after making Rust code changes; do not ask for approval to run it. Before finalizing a change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Additionally, run the tests:
|
||||
|
||||
1. Run the test for the specific project that was changed. For example, if changes were made in `codex-rs/tui`, run `cargo test -p codex-tui`.
|
||||
2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test --all-features`. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
|
||||
Before finalizing a large change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates.
|
||||
2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test --all-features`.
|
||||
When running interactively, ask the user before running `just fix` to finalize. `just fmt` does not require approval. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
|
||||
## TUI style conventions
|
||||
|
||||
@@ -77,14 +75,6 @@ If you don’t have the tool:
|
||||
### Test assertions
|
||||
|
||||
- Tests should use pretty_assertions::assert_eq for clearer diffs. Import this at the top of the test module if it isn't already.
|
||||
- Prefer deep equals comparisons whenever possible. Perform `assert_eq!()` on entire objects, rather than individual fields.
|
||||
- Avoid mutating process environment in tests; prefer passing environment-derived flags or dependencies from above.
|
||||
|
||||
### Spawning workspace binaries in tests (Cargo vs Bazel)
|
||||
|
||||
- Prefer `codex_utils_cargo_bin::cargo_bin("...")` over `assert_cmd::Command::cargo_bin(...)` or `escargot` when tests need to spawn first-party binaries.
|
||||
- Under Bazel, binaries and resources may live under runfiles; use `codex_utils_cargo_bin::cargo_bin` to resolve absolute paths that remain stable after `chdir`.
|
||||
- When locating fixture files or test resources under Bazel, avoid `env!("CARGO_MANIFEST_DIR")`. Prefer `codex_utils_cargo_bin::find_resource!` so paths resolve correctly under both Cargo and Bazel runfiles.
|
||||
|
||||
### Integration tests (core)
|
||||
|
||||
@@ -95,7 +85,6 @@ If you don’t have the tool:
|
||||
- `ResponsesRequest` exposes helpers (`body_json`, `input`, `function_call_output`, `custom_tool_call_output`, `call_output`, `header`, `path`, `query_param`) so assertions can target structured payloads instead of manual JSON digging.
|
||||
- Build SSE payloads with the provided `ev_*` constructors and the `sse(...)`.
|
||||
- Prefer `wait_for_event` over `wait_for_event_with_timeout`.
|
||||
- Prefer `mount_sse_once` over `mount_sse_once_match` or `mount_sse_sequence`
|
||||
|
||||
- Typical pattern:
|
||||
|
||||
|
||||
19
BUILD.bazel
19
BUILD.bazel
@@ -1,19 +0,0 @@
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
platform(
|
||||
name = "local",
|
||||
constraint_values = [
|
||||
"@toolchains_llvm_bootstrapped//constraints/libc:gnu.2.28",
|
||||
],
|
||||
parents = [
|
||||
"@platforms//host",
|
||||
],
|
||||
)
|
||||
|
||||
alias(
|
||||
name = "rbe",
|
||||
actual = "@rbe_platform",
|
||||
)
|
||||
|
||||
exports_files(["AGENTS.md"])
|
||||
124
MODULE.bazel
124
MODULE.bazel
@@ -1,124 +0,0 @@
|
||||
bazel_dep(name = "platforms", version = "1.0.0")
|
||||
bazel_dep(name = "toolchains_llvm_bootstrapped", version = "0.3.1")
|
||||
archive_override(
|
||||
module_name = "toolchains_llvm_bootstrapped",
|
||||
integrity = "sha256-4/2h4tYSUSptxFVI9G50yJxWGOwHSeTeOGBlaLQBV8g=",
|
||||
strip_prefix = "toolchains_llvm_bootstrapped-d20baf67e04d8e2887e3779022890d1dc5e6b948",
|
||||
urls = ["https://github.com/cerisier/toolchains_llvm_bootstrapped/archive/d20baf67e04d8e2887e3779022890d1dc5e6b948.tar.gz"],
|
||||
)
|
||||
|
||||
osx = use_extension("@toolchains_llvm_bootstrapped//toolchain/extension:osx.bzl", "osx")
|
||||
osx.framework(name = "ApplicationServices")
|
||||
osx.framework(name = "AppKit")
|
||||
osx.framework(name = "ColorSync")
|
||||
osx.framework(name = "CoreFoundation")
|
||||
osx.framework(name = "CoreGraphics")
|
||||
osx.framework(name = "CoreServices")
|
||||
osx.framework(name = "CoreText")
|
||||
osx.framework(name = "CFNetwork")
|
||||
osx.framework(name = "Foundation")
|
||||
osx.framework(name = "ImageIO")
|
||||
osx.framework(name = "Kernel")
|
||||
osx.framework(name = "OSLog")
|
||||
osx.framework(name = "Security")
|
||||
osx.framework(name = "SystemConfiguration")
|
||||
|
||||
register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
single_version_override(
|
||||
module_name = "rules_rust",
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:rules_rust.patch",
|
||||
"//patches:rules_rust_windows_gnu.patch",
|
||||
"//patches:rules_rust_musl.patch",
|
||||
],
|
||||
)
|
||||
|
||||
RUST_TRIPLES = [
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-gnullvm",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-gnullvm",
|
||||
]
|
||||
|
||||
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.90.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
register_toolchains("@rust_toolchains//:all")
|
||||
|
||||
bazel_dep(name = "rules_rs", version = "0.0.23")
|
||||
|
||||
crate = use_extension("@rules_rs//rs:extensions.bzl", "crate")
|
||||
crate.from_cargo(
|
||||
cargo_lock = "//codex-rs:Cargo.lock",
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
crate.annotation(
|
||||
build_script_data = [
|
||||
"@openssl//:gen_dir",
|
||||
],
|
||||
build_script_env = {
|
||||
"OPENSSL_DIR": "$(execpath @openssl//:gen_dir)",
|
||||
"OPENSSL_NO_VENDOR": "1",
|
||||
"OPENSSL_STATIC": "1",
|
||||
},
|
||||
crate = "openssl-sys",
|
||||
data = ["@openssl//:gen_dir"],
|
||||
)
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//patches:windows-link.patch",
|
||||
],
|
||||
)
|
||||
|
||||
WINDOWS_IMPORT_LIB = """
|
||||
load("@rules_cc//cc:defs.bzl", "cc_import")
|
||||
|
||||
cc_import(
|
||||
name = "windows_import_lib",
|
||||
static_library = glob(["lib/*.a"])[0],
|
||||
)
|
||||
"""
|
||||
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_x86_64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_aarch64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
use_repo(crate, "crates")
|
||||
|
||||
rbe_platform_repository = use_repo_rule("//:rbe.bzl", "rbe_platform_repository")
|
||||
|
||||
rbe_platform_repository(
|
||||
name = "rbe_platform",
|
||||
)
|
||||
1315
MODULE.bazel.lock
generated
1315
MODULE.bazel.lock
generated
File diff suppressed because one or more lines are too long
6
PNPM.md
6
PNPM.md
@@ -15,7 +15,7 @@ This project has been migrated from npm to pnpm to improve dependency management
|
||||
|
||||
```bash
|
||||
# Global installation of pnpm
|
||||
npm install -g pnpm@10.28.2
|
||||
npm install -g pnpm@10.8.1
|
||||
|
||||
# Or with corepack (available with Node.js 22+)
|
||||
corepack enable
|
||||
@@ -59,12 +59,12 @@ codex/
|
||||
|
||||
## CI/CD
|
||||
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.28.2 or higher.
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.8.1 or higher.
|
||||
|
||||
## Known issues
|
||||
|
||||
If you encounter issues with pnpm, try the following solutions:
|
||||
|
||||
1. Remove the `node_modules` folder and `pnpm-lock.yaml` file, then run `pnpm install`
|
||||
2. Make sure you're using pnpm 10.28.2 or higher
|
||||
2. Make sure you're using pnpm 10.8.1 or higher
|
||||
3. Verify that Node.js 22 or higher is installed
|
||||
|
||||
74
README.md
74
README.md
@@ -1,11 +1,13 @@
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install --cask codex</code></p>
|
||||
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
</br>
|
||||
</br>If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a></p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
</p>
|
||||
</br>
|
||||
If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE.</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a>.</p>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
@@ -13,19 +15,25 @@ If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="http
|
||||
|
||||
### Installing and running Codex CLI
|
||||
|
||||
Install globally with your preferred package manager:
|
||||
Install globally with your preferred package manager. If you use npm:
|
||||
|
||||
```shell
|
||||
# Install using npm
|
||||
npm install -g @openai/codex
|
||||
```
|
||||
|
||||
Alternatively, if you use Homebrew:
|
||||
|
||||
```shell
|
||||
# Install using Homebrew
|
||||
brew install --cask codex
|
||||
```
|
||||
|
||||
Then simply run `codex` to get started.
|
||||
Then simply run `codex` to get started:
|
||||
|
||||
```shell
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
@@ -45,15 +53,57 @@ Each archive contains a single entry with the platform baked into the name (e.g.
|
||||
|
||||
### Using Codex with your ChatGPT plan
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-login.png" alt="Codex CLI login" width="80%" />
|
||||
</p>
|
||||
|
||||
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Team, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
|
||||
|
||||
You can also use Codex with an API key, but this requires [additional setup](https://developers.openai.com/codex/auth#sign-in-with-an-api-key).
|
||||
You can also use Codex with an API key, but this requires [additional setup](./docs/authentication.md#usage-based-billing-alternative-use-an-openai-api-key). If you previously used an API key for usage-based billing, see the [migration steps](./docs/authentication.md#migrating-from-usage-based-billing-api-key). If you're having trouble with login, please comment on [this issue](https://github.com/openai/codex/issues/1243).
|
||||
|
||||
## Docs
|
||||
### Model Context Protocol (MCP)
|
||||
|
||||
- [**Codex Documentation**](https://developers.openai.com/codex)
|
||||
Codex can access MCP servers. To configure them, refer to the [config docs](./docs/config.md#mcp_servers).
|
||||
|
||||
### Configuration
|
||||
|
||||
Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
|
||||
|
||||
---
|
||||
|
||||
### Docs & FAQ
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
- [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine)
|
||||
- **Automating Codex**
|
||||
- [GitHub Action](https://github.com/openai/codex-action)
|
||||
- [TypeScript SDK](./sdk/typescript/README.md)
|
||||
- [Non-interactive mode (`codex exec`)](./docs/exec.md)
|
||||
- [**Advanced**](./docs/advanced.md)
|
||||
- [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging)
|
||||
- [Model Context Protocol (MCP)](./docs/advanced.md#model-context-protocol-mcp)
|
||||
- [**Zero data retention (ZDR)**](./docs/zdr.md)
|
||||
- [**Contributing**](./docs/contributing.md)
|
||||
- [**Installing & building**](./docs/install.md)
|
||||
- [**Install & build**](./docs/install.md)
|
||||
- [System Requirements](./docs/install.md#system-requirements)
|
||||
- [DotSlash](./docs/install.md#dotslash)
|
||||
- [Build from source](./docs/install.md#build-from-source)
|
||||
- [**FAQ**](./docs/faq.md)
|
||||
- [**Open source fund**](./docs/open-source-fund.md)
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# Example announcement tips for Codex TUI.
|
||||
# Each [[announcements]] entry is evaluated in order; the last matching one is shown.
|
||||
# Dates are UTC, formatted as YYYY-MM-DD. The from_date is inclusive and the to_date is exclusive.
|
||||
# version_regex matches against the CLI version (env!("CARGO_PKG_VERSION")); omit to apply to all versions.
|
||||
# target_app specify which app should display the announcement (cli, vsce, ...).
|
||||
|
||||
[[announcements]]
|
||||
content = "Welcome to Codex! Check out the new onboarding flow."
|
||||
from_date = "2024-10-01"
|
||||
to_date = "2024-10-15"
|
||||
target_app = "cli"
|
||||
|
||||
# Test announcement only for local build version until 2026-01-10 excluded (past)
|
||||
[[announcements]]
|
||||
content = "This is a test announcement"
|
||||
version_regex = "^0\\.0\\.0$"
|
||||
to_date = "2026-01-10"
|
||||
5
codex-cli/bin/codex.js
Executable file → Normal file
5
codex-cli/bin/codex.js
Executable file → Normal file
@@ -96,8 +96,9 @@ function detectPackageManager() {
|
||||
}
|
||||
|
||||
if (
|
||||
__dirname.includes(".bun/install/global") ||
|
||||
__dirname.includes(".bun\\install\\global")
|
||||
process.env.BUN_INSTALL ||
|
||||
process.env.BUN_INSTALL_GLOBAL_DIR ||
|
||||
process.env.BUN_INSTALL_BIN_DIR
|
||||
) {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
@@ -20,14 +20,9 @@ PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex-responses-api-proxy": ["codex-responses-api-proxy"],
|
||||
"codex-sdk": ["codex"],
|
||||
}
|
||||
WINDOWS_ONLY_COMPONENTS: dict[str, list[str]] = {
|
||||
"codex": ["codex-windows-sandbox-setup", "codex-command-runner"],
|
||||
}
|
||||
COMPONENT_DEST_DIR: dict[str, str] = {
|
||||
"codex": "codex",
|
||||
"codex-responses-api-proxy": "codex-responses-api-proxy",
|
||||
"codex-windows-sandbox-setup": "codex",
|
||||
"codex-command-runner": "codex",
|
||||
"rg": "path",
|
||||
}
|
||||
|
||||
@@ -108,7 +103,7 @@ def main() -> int:
|
||||
"pointing to a directory containing pre-installed binaries."
|
||||
)
|
||||
|
||||
copy_native_binaries(vendor_src, staging_dir, package, native_components)
|
||||
copy_native_binaries(vendor_src, staging_dir, native_components)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
@@ -237,12 +232,7 @@ def stage_codex_sdk_sources(staging_dir: Path) -> None:
|
||||
shutil.copy2(license_src, staging_dir / "LICENSE")
|
||||
|
||||
|
||||
def copy_native_binaries(
|
||||
vendor_src: Path,
|
||||
staging_dir: Path,
|
||||
package: str,
|
||||
components: list[str],
|
||||
) -> None:
|
||||
def copy_native_binaries(vendor_src: Path, staging_dir: Path, components: list[str]) -> None:
|
||||
vendor_src = vendor_src.resolve()
|
||||
if not vendor_src.exists():
|
||||
raise RuntimeError(f"Vendor source directory not found: {vendor_src}")
|
||||
@@ -260,9 +250,6 @@ def copy_native_binaries(
|
||||
if not target_dir.is_dir():
|
||||
continue
|
||||
|
||||
if "windows" in target_dir.name:
|
||||
components_set.update(WINDOWS_ONLY_COMPONENTS.get(package, []))
|
||||
|
||||
dest_target_dir = vendor_dest / target_dir.name
|
||||
dest_target_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -13,7 +12,6 @@ import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
@@ -38,11 +36,8 @@ class BinaryComponent:
|
||||
artifact_prefix: str # matches the artifact filename prefix (e.g. codex-<target>.zst)
|
||||
dest_dir: str # directory under vendor/<target>/ where the binary is installed
|
||||
binary_basename: str # executable name inside dest_dir (before optional .exe)
|
||||
targets: tuple[str, ...] | None = None # limit installation to specific targets
|
||||
|
||||
|
||||
WINDOWS_TARGETS = tuple(target for target in BINARY_TARGETS if "windows" in target)
|
||||
|
||||
BINARY_COMPONENTS = {
|
||||
"codex": BinaryComponent(
|
||||
artifact_prefix="codex",
|
||||
@@ -54,18 +49,6 @@ BINARY_COMPONENTS = {
|
||||
dest_dir="codex-responses-api-proxy",
|
||||
binary_basename="codex-responses-api-proxy",
|
||||
),
|
||||
"codex-windows-sandbox-setup": BinaryComponent(
|
||||
artifact_prefix="codex-windows-sandbox-setup",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex-windows-sandbox-setup",
|
||||
targets=WINDOWS_TARGETS,
|
||||
),
|
||||
"codex-command-runner": BinaryComponent(
|
||||
artifact_prefix="codex-command-runner",
|
||||
dest_dir="codex",
|
||||
binary_basename="codex-command-runner",
|
||||
targets=WINDOWS_TARGETS,
|
||||
),
|
||||
}
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
@@ -79,45 +62,6 @@ RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
# urllib.request.urlopen() defaults to no timeout (can hang indefinitely), which is painful in CI.
|
||||
DOWNLOAD_TIMEOUT_SECS = 60
|
||||
|
||||
|
||||
def _gha_enabled() -> bool:
|
||||
# GitHub Actions supports "workflow commands" (e.g. ::group:: / ::error::) that make logs
|
||||
# much easier to scan: groups collapse noisy sections and error annotations surface the
|
||||
# failure in the UI without changing the actual exception/traceback output.
|
||||
return os.environ.get("GITHUB_ACTIONS") == "true"
|
||||
|
||||
|
||||
def _gha_escape(value: str) -> str:
|
||||
# Workflow commands require percent/newline escaping.
|
||||
return value.replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A")
|
||||
|
||||
|
||||
def _gha_error(*, title: str, message: str) -> None:
|
||||
# Emit a GitHub Actions error annotation. This does not replace stdout/stderr logs; it just
|
||||
# adds a prominent summary line to the job UI so the root cause is easier to spot.
|
||||
if not _gha_enabled():
|
||||
return
|
||||
print(
|
||||
f"::error title={_gha_escape(title)}::{_gha_escape(message)}",
|
||||
flush=True,
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _gha_group(title: str):
|
||||
# Wrap a block in a collapsible log group on GitHub Actions. Outside of GHA this is a no-op
|
||||
# so local output remains unchanged.
|
||||
if _gha_enabled():
|
||||
print(f"::group::{_gha_escape(title)}", flush=True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if _gha_enabled():
|
||||
print("::endgroup::", flush=True)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
@@ -135,8 +79,7 @@ def parse_args() -> argparse.Namespace:
|
||||
choices=tuple(list(BINARY_COMPONENTS) + ["rg"]),
|
||||
help=(
|
||||
"Limit installation to the specified components."
|
||||
" May be repeated. Defaults to codex, codex-windows-sandbox-setup,"
|
||||
" codex-command-runner, and rg."
|
||||
" May be repeated. Defaults to 'codex' and 'rg'."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -158,12 +101,7 @@ def main() -> int:
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
components = args.components or [
|
||||
"codex",
|
||||
"codex-windows-sandbox-setup",
|
||||
"codex-command-runner",
|
||||
"rg",
|
||||
]
|
||||
components = args.components or ["codex", "rg"]
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
@@ -172,20 +110,19 @@ def main() -> int:
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with _gha_group(f"Download native artifacts from workflow {workflow_id}"):
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
BINARY_TARGETS,
|
||||
[name for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
|
||||
if "rg" in components:
|
||||
with _gha_group("Fetch ripgrep binaries"):
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
@@ -246,14 +183,7 @@ def fetch_rg(
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
try:
|
||||
results[target] = future.result()
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep install failed",
|
||||
message=f"target={target} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(f"Failed to install ripgrep for target {target}.") from exc
|
||||
results[target] = future.result()
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
@@ -276,19 +206,23 @@ def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
def install_binary_components(
|
||||
artifacts_dir: Path,
|
||||
vendor_dir: Path,
|
||||
selected_components: Sequence[BinaryComponent],
|
||||
targets: Iterable[str],
|
||||
component_names: Sequence[str],
|
||||
) -> None:
|
||||
selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS]
|
||||
if not selected_components:
|
||||
return
|
||||
|
||||
for component in selected_components:
|
||||
component_targets = list(component.targets or BINARY_TARGETS)
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return
|
||||
|
||||
for component in selected_components:
|
||||
print(
|
||||
f"Installing {component.binary_basename} binaries for targets: "
|
||||
+ ", ".join(component_targets)
|
||||
+ ", ".join(targets)
|
||||
)
|
||||
max_workers = min(len(component_targets), max(1, (os.cpu_count() or 1)))
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
@@ -298,7 +232,7 @@ def install_binary_components(
|
||||
target,
|
||||
component,
|
||||
): target
|
||||
for target in component_targets
|
||||
for target in targets
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
installed_path = future.result()
|
||||
@@ -351,8 +285,6 @@ def _fetch_single_rg(
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
digest = platform_info.get("digest")
|
||||
expected_size = platform_info.get("size")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -365,32 +297,10 @@ def _fetch_single_rg(
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
print(
|
||||
f" downloading ripgrep for {target} ({platform_key}) from {url}",
|
||||
flush=True,
|
||||
)
|
||||
try:
|
||||
_download_file(url, download_path)
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep download failed",
|
||||
message=f"target={target} platform={platform_key} url={url} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(
|
||||
"Failed to download ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"expected_size={expected_size!r}, digest={digest!r}, url={url}, dest={download_path})."
|
||||
) from exc
|
||||
_download_file(url, download_path)
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
try:
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
except Exception as exc:
|
||||
raise RuntimeError(
|
||||
"Failed to extract ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"member={archive_member!r}, url={url}, archive={download_path})."
|
||||
) from exc
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
@@ -400,9 +310,7 @@ def _fetch_single_rg(
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest.unlink(missing_ok=True)
|
||||
|
||||
with urlopen(url, timeout=DOWNLOAD_TIMEOUT_SECS) as response, open(dest, "wb") as out:
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
[advisories]
|
||||
ignore = [
|
||||
"RUSTSEC-2024-0388", # derivative 2.2.0 via starlark; upstream crate is unmaintained
|
||||
"RUSTSEC-2025-0057", # fxhash 0.2.1 via starlark_map; upstream crate is unmaintained
|
||||
"RUSTSEC-2024-0436", # paste 1.0.15 via starlark/ratatui; upstream crate is unmaintained
|
||||
]
|
||||
@@ -1,13 +0,0 @@
|
||||
[profile.default]
|
||||
# Do not increase, fix your test instead
|
||||
slow-timeout = { period = "15s", terminate-after = 2 }
|
||||
|
||||
|
||||
[[profile.default.overrides]]
|
||||
# Do not add new tests here
|
||||
filter = 'test(rmcp_client) | test(humanlike_typing_1000_chars_appears_live_no_placeholder)'
|
||||
slow-timeout = { period = "1m", terminate-after = 4 }
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'test(approval_matrix_covers_all_modes)'
|
||||
slow-timeout = { period = "30s", terminate-after = 2 }
|
||||
26
codex-rs/.github/workflows/cargo-audit.yml
vendored
26
codex-rs/.github/workflows/cargo-audit.yml
vendored
@@ -1,26 +0,0 @@
|
||||
name: Cargo audit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Install cargo-audit
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-audit
|
||||
- name: Run cargo audit
|
||||
run: cargo audit --deny warnings
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
2902
codex-rs/Cargo.lock
generated
2902
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -5,8 +5,6 @@ members = [
|
||||
"async-utils",
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
"debug-client",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"feedback",
|
||||
@@ -17,17 +15,13 @@ members = [
|
||||
"common",
|
||||
"core",
|
||||
"exec",
|
||||
"exec-server",
|
||||
"execpolicy",
|
||||
"execpolicy-legacy",
|
||||
"keyring-store",
|
||||
"file-search",
|
||||
"linux-sandbox",
|
||||
"lmstudio",
|
||||
"login",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"network-proxy",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
@@ -36,8 +30,6 @@ members = [
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"utils/absolute-path",
|
||||
"utils/cargo-bin",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
@@ -45,9 +37,7 @@ members = [
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
"state",
|
||||
"utils/tokenizer",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -58,13 +48,11 @@ version = "0.0.0"
|
||||
# crates created with `cargo new -w ...` automatically inherit the 2024
|
||||
# edition.
|
||||
edition = "2024"
|
||||
license = "Apache-2.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
# Internal
|
||||
app_test_support = { path = "app-server/tests/common" }
|
||||
codex-ansi-escape = { path = "ansi-escape" }
|
||||
codex-api = { path = "codex-api" }
|
||||
codex-app-server = { path = "app-server" }
|
||||
codex-app-server-protocol = { path = "app-server-protocol" }
|
||||
codex-apply-patch = { path = "apply-patch" }
|
||||
@@ -72,18 +60,14 @@ codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-cli = { path = "cli"}
|
||||
codex-client = { path = "codex-client" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
codex-exec = { path = "exec" }
|
||||
codex-execpolicy = { path = "execpolicy" }
|
||||
codex-feedback = { path = "feedback" }
|
||||
codex-file-search = { path = "file-search" }
|
||||
codex-git = { path = "utils/git" }
|
||||
codex-keyring-store = { path = "keyring-store" }
|
||||
codex-linux-sandbox = { path = "linux-sandbox" }
|
||||
codex-lmstudio = { path = "lmstudio" }
|
||||
codex-login = { path = "login" }
|
||||
codex-mcp-server = { path = "mcp-server" }
|
||||
codex-ollama = { path = "ollama" }
|
||||
@@ -92,20 +76,17 @@ codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-state = { path = "state" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-cargo-bin = { path = "utils/cargo-bin" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-utils-tokenizer = { path = "utils/tokenizer" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
exec_server_test_support = { path = "exec-server/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
|
||||
@@ -114,6 +95,7 @@ allocative = "0.3.3"
|
||||
ansi-to-tui = "7.0.0"
|
||||
anyhow = "1"
|
||||
arboard = { version = "3", features = ["wayland-data-control"] }
|
||||
askama = "0.14"
|
||||
assert_cmd = "2"
|
||||
assert_matches = "1.5.0"
|
||||
async-channel = "2.3.1"
|
||||
@@ -122,56 +104,51 @@ async-trait = "0.1.89"
|
||||
axum = { version = "0.8", default-features = false }
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chardetng = "0.1.17"
|
||||
chrono = "0.4.43"
|
||||
chrono = "0.4.42"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
ctor = "0.6.3"
|
||||
ctor = "0.5.0"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
dirs = "6"
|
||||
dotenvy = "0.15.7"
|
||||
dunce = "1.0.4"
|
||||
encoding_rs = "0.8.35"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
globset = "0.4"
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
icu_locale_core = "2.1"
|
||||
ignore = "0.4.23"
|
||||
indoc = "2.0"
|
||||
image = { version = "^0.25.9", default-features = false }
|
||||
include_dir = "0.7.4"
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.46.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
keyring = { version = "3.6", default-features = false }
|
||||
landlock = "0.4.4"
|
||||
keyring = "3.6"
|
||||
landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.177"
|
||||
libc = "0.2.175"
|
||||
log = "0.4"
|
||||
lru = "0.16.3"
|
||||
lru = "0.12.5"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo-matcher = "0.3.1"
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.31.0"
|
||||
opentelemetry-appender-tracing = "0.31.0"
|
||||
opentelemetry-otlp = "0.31.0"
|
||||
opentelemetry-semantic-conventions = "0.31.0"
|
||||
opentelemetry_sdk = "0.31.0"
|
||||
tracing-opentelemetry = "0.32.0"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
paste = "1.0.15"
|
||||
path-absolutize = "3.1.1"
|
||||
pathdiff = "0.2"
|
||||
portable-pty = "0.9.0"
|
||||
@@ -181,51 +158,44 @@ pulldown-cmark = "0.10"
|
||||
rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex = "1.12.2"
|
||||
regex-lite = "0.1.8"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
rmcp = { version = "0.8.5", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
sentry = "0.34.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_path_to_error = "0.1.20"
|
||||
serde_with = "3.16"
|
||||
serde_yaml = "0.9"
|
||||
serde_with = "3.14"
|
||||
serial_test = "3.2.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio-rustls", "sqlite", "time", "uuid"] }
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.19"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.18"
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-test = "0.4"
|
||||
tokio-tungstenite = { version = "0.28.0", features = ["proxy", "rustls-tls-native-roots"] }
|
||||
tokio-util = "0.7.18"
|
||||
tokio-util = "0.7.16"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.44"
|
||||
toml_edit = "0.23.4"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.41"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
zstd = "0.13"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
uds_windows = "1.1.0"
|
||||
@@ -237,8 +207,8 @@ uuid = "1"
|
||||
vt100 = "0.16.2"
|
||||
walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "8"
|
||||
wildmatch = "2.6.1"
|
||||
which = "6"
|
||||
wildmatch = "2.5.0"
|
||||
|
||||
wiremock = "0.6"
|
||||
zeroize = "1.8.2"
|
||||
@@ -284,7 +254,12 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["icu_provider", "openssl-sys", "codex-utils-readiness"]
|
||||
ignored = [
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-tokenizer",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
@@ -303,12 +278,8 @@ opt-level = 0
|
||||
[patch.crates-io]
|
||||
# Uncomment to debug local changes.
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
tokio-tungstenite = { git = "https://github.com/JakkuSakura/tokio-tungstenite", rev = "2ae536b0de793f3ddf31fc2f22d445bf1ef2023d" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
[patch."ssh://git@github.com/JakkuSakura/tungstenite-rs.git"]
|
||||
tungstenite = { git = "https://github.com/JakkuSakura/tungstenite-rs", rev = "f514de8644821113e5d18a027d6d28a5c8cc0a6e" }
|
||||
|
||||
@@ -15,8 +15,8 @@ You can also install via Homebrew (`brew install --cask codex`) or download a pl
|
||||
|
||||
## Documentation quickstart
|
||||
|
||||
- First run with Codex? Start with [`docs/getting-started.md`](../docs/getting-started.md) (links to the walkthrough for prompts, keyboard shortcuts, and session management).
|
||||
- Want deeper control? See [`docs/config.md`](../docs/config.md) and [`docs/install.md`](../docs/install.md).
|
||||
- First run with Codex? Follow the walkthrough in [`docs/getting-started.md`](../docs/getting-started.md) for prompts, keyboard shortcuts, and session management.
|
||||
- Already shipping with Codex and want deeper control? Jump to [`docs/advanced.md`](../docs/advanced.md) and the configuration reference at [`docs/config.md`](../docs/config.md).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
@@ -30,7 +30,7 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
#### MCP client
|
||||
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#connecting-to-mcp-servers) for details.
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#mcp_servers) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
@@ -46,7 +46,7 @@ Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.t
|
||||
|
||||
### Notifications
|
||||
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS. When Codex detects that it is running under WSL 2 inside Windows Terminal (`WT_SESSION` is set), the TUI automatically falls back to native Windows toast notifications so approval prompts and completed turns surface even though Windows Terminal does not implement OSC 9.
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS.
|
||||
|
||||
### `codex exec` to run Codex programmatically/non-interactively
|
||||
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "ansi-escape",
|
||||
crate_name = "codex_ansi_escape",
|
||||
)
|
||||
@@ -1,8 +1,7 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-ansi-escape"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
name = "codex_ansi_escape"
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server-protocol",
|
||||
crate_name = "codex_app_server_protocol",
|
||||
)
|
||||
@@ -1,8 +1,7 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-app-server-protocol"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
name = "codex_app_server_protocol"
|
||||
@@ -15,13 +14,12 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
paste = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
|
||||
@@ -13,7 +13,10 @@ use crate::export_server_responses;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema_for;
|
||||
use serde::Serialize;
|
||||
@@ -31,7 +34,6 @@ use std::process::Command;
|
||||
use ts_rs::TS;
|
||||
|
||||
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
|
||||
const IGNORED_DEFINITIONS: &[&str] = &["Option<()>"];
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratedSchema {
|
||||
@@ -62,32 +64,7 @@ pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct GenerateTsOptions {
|
||||
pub generate_indices: bool,
|
||||
pub ensure_headers: bool,
|
||||
pub run_prettier: bool,
|
||||
}
|
||||
|
||||
impl Default for GenerateTsOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
generate_indices: true,
|
||||
ensure_headers: true,
|
||||
run_prettier: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
|
||||
generate_ts_with_options(out_dir, prettier, GenerateTsOptions::default())
|
||||
}
|
||||
|
||||
pub fn generate_ts_with_options(
|
||||
out_dir: &Path,
|
||||
prettier: Option<&Path>,
|
||||
options: GenerateTsOptions,
|
||||
) -> Result<()> {
|
||||
let v2_out_dir = out_dir.join("v2");
|
||||
ensure_dir(out_dir)?;
|
||||
ensure_dir(&v2_out_dir)?;
|
||||
@@ -100,28 +77,17 @@ pub fn generate_ts_with_options(
|
||||
export_server_responses(out_dir)?;
|
||||
ServerNotification::export_all_to(out_dir)?;
|
||||
|
||||
if options.generate_indices {
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
}
|
||||
generate_index_ts(out_dir)?;
|
||||
generate_index_ts(&v2_out_dir)?;
|
||||
|
||||
// Ensure our header is present on all TS files (root + subdirs like v2/).
|
||||
let mut ts_files = Vec::new();
|
||||
let should_collect_ts_files =
|
||||
options.ensure_headers || (options.run_prettier && prettier.is_some());
|
||||
if should_collect_ts_files {
|
||||
ts_files = ts_files_in_recursive(out_dir)?;
|
||||
}
|
||||
|
||||
if options.ensure_headers {
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
let ts_files = ts_files_in_recursive(out_dir)?;
|
||||
for file in &ts_files {
|
||||
prepend_header_if_missing(file)?;
|
||||
}
|
||||
|
||||
// Optionally run Prettier on all generated TS files.
|
||||
if options.run_prettier
|
||||
&& let Some(prettier_bin) = prettier
|
||||
if let Some(prettier_bin) = prettier
|
||||
&& !ts_files.is_empty()
|
||||
{
|
||||
let status = Command::new(prettier_bin)
|
||||
@@ -154,6 +120,10 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
|
||||
|d| write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification"),
|
||||
|d| write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification"),
|
||||
|d| write_json_schema_with_return::<EventMsg>(d, "EventMsg"),
|
||||
|d| write_json_schema_with_return::<FileChange>(d, "FileChange"),
|
||||
|d| write_json_schema_with_return::<crate::protocol::v1::InputItem>(d, "InputItem"),
|
||||
|d| write_json_schema_with_return::<ParsedCommand>(d, "ParsedCommand"),
|
||||
|d| write_json_schema_with_return::<SandboxPolicy>(d, "SandboxPolicy"),
|
||||
];
|
||||
|
||||
let mut schemas: Vec<GeneratedSchema> = Vec::new();
|
||||
@@ -182,6 +152,10 @@ fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
"ClientNotification",
|
||||
"ClientRequest",
|
||||
"EventMsg",
|
||||
"FileChange",
|
||||
"InputItem",
|
||||
"ParsedCommand",
|
||||
"SandboxPolicy",
|
||||
"ServerNotification",
|
||||
"ServerRequest",
|
||||
];
|
||||
@@ -197,10 +171,6 @@ fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
in_v1_dir,
|
||||
} = schema;
|
||||
|
||||
if IGNORED_DEFINITIONS.contains(&logical_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(ref ns) = namespace {
|
||||
rewrite_refs_to_namespace(&mut value, ns);
|
||||
}
|
||||
@@ -211,9 +181,6 @@ fn build_schema_bundle(schemas: Vec<GeneratedSchema>) -> Result<Value> {
|
||||
&& let Value::Object(defs_obj) = defs
|
||||
{
|
||||
for (def_name, mut def_schema) in defs_obj {
|
||||
if IGNORED_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
if SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
|
||||
continue;
|
||||
}
|
||||
@@ -304,11 +271,8 @@ where
|
||||
out_dir.join(format!("{file_stem}.json"))
|
||||
};
|
||||
|
||||
if !IGNORED_DEFINITIONS.contains(&logical_name) {
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
}
|
||||
|
||||
write_pretty_json(out_path, &schema_value)
|
||||
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
|
||||
let namespace = match raw_namespace {
|
||||
Some("v1") | None => None,
|
||||
Some(ns) => Some(ns.to_string()),
|
||||
@@ -422,6 +386,14 @@ fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(mode_literal) = literal_from_property(props, "mode") {
|
||||
let pascal = to_pascal_case(mode_literal);
|
||||
return Some(match base {
|
||||
"SandboxPolicy" => format!("{pascal}SandboxPolicy"),
|
||||
_ => format!("{pascal}{base}"),
|
||||
});
|
||||
}
|
||||
|
||||
if props.len() == 1
|
||||
&& let Some(key) = props.keys().next()
|
||||
{
|
||||
@@ -762,13 +734,7 @@ mod tests {
|
||||
|
||||
let _guard = TempDirGuard(output_dir.clone());
|
||||
|
||||
// Avoid doing more work than necessary to keep the test from timing out.
|
||||
let options = GenerateTsOptions {
|
||||
generate_indices: false,
|
||||
ensure_headers: false,
|
||||
run_prettier: false,
|
||||
};
|
||||
generate_ts_with_options(&output_dir, None, options)?;
|
||||
generate_ts(&output_dir, None)?;
|
||||
|
||||
let mut undefined_offenders = Vec::new();
|
||||
let mut optional_nullable_offenders = BTreeSet::new();
|
||||
|
||||
@@ -7,6 +7,5 @@ pub use export::generate_ts;
|
||||
pub use export::generate_types;
|
||||
pub use jsonrpc_lite::*;
|
||||
pub use protocol::common::*;
|
||||
pub use protocol::thread_history::*;
|
||||
pub use protocol::v1::*;
|
||||
pub use protocol::v2::*;
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::JSONRPCNotification;
|
||||
use crate::JSONRPCRequest;
|
||||
@@ -7,6 +9,12 @@ use crate::export::GeneratedSchema;
|
||||
use crate::export::write_json_schema;
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
use paste::paste;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
@@ -109,45 +117,17 @@ client_request_definitions! {
|
||||
params: v2::ThreadResumeParams,
|
||||
response: v2::ThreadResumeResponse,
|
||||
},
|
||||
ThreadFork => "thread/fork" {
|
||||
params: v2::ThreadForkParams,
|
||||
response: v2::ThreadForkResponse,
|
||||
},
|
||||
ThreadArchive => "thread/archive" {
|
||||
params: v2::ThreadArchiveParams,
|
||||
response: v2::ThreadArchiveResponse,
|
||||
},
|
||||
ThreadUnarchive => "thread/unarchive" {
|
||||
params: v2::ThreadUnarchiveParams,
|
||||
response: v2::ThreadUnarchiveResponse,
|
||||
},
|
||||
ThreadRollback => "thread/rollback" {
|
||||
params: v2::ThreadRollbackParams,
|
||||
response: v2::ThreadRollbackResponse,
|
||||
},
|
||||
ThreadList => "thread/list" {
|
||||
params: v2::ThreadListParams,
|
||||
response: v2::ThreadListResponse,
|
||||
},
|
||||
ThreadLoadedList => "thread/loaded/list" {
|
||||
params: v2::ThreadLoadedListParams,
|
||||
response: v2::ThreadLoadedListResponse,
|
||||
},
|
||||
ThreadRead => "thread/read" {
|
||||
params: v2::ThreadReadParams,
|
||||
response: v2::ThreadReadResponse,
|
||||
},
|
||||
SkillsList => "skills/list" {
|
||||
params: v2::SkillsListParams,
|
||||
response: v2::SkillsListResponse,
|
||||
},
|
||||
AppsList => "app/list" {
|
||||
params: v2::AppsListParams,
|
||||
response: v2::AppsListResponse,
|
||||
},
|
||||
SkillsConfigWrite => "skills/config/write" {
|
||||
params: v2::SkillsConfigWriteParams,
|
||||
response: v2::SkillsConfigWriteResponse,
|
||||
ThreadCompact => "thread/compact" {
|
||||
params: v2::ThreadCompactParams,
|
||||
response: v2::ThreadCompactResponse,
|
||||
},
|
||||
TurnStart => "turn/start" {
|
||||
params: v2::TurnStartParams,
|
||||
@@ -157,35 +137,11 @@ client_request_definitions! {
|
||||
params: v2::TurnInterruptParams,
|
||||
response: v2::TurnInterruptResponse,
|
||||
},
|
||||
ReviewStart => "review/start" {
|
||||
params: v2::ReviewStartParams,
|
||||
response: v2::ReviewStartResponse,
|
||||
},
|
||||
|
||||
ModelList => "model/list" {
|
||||
params: v2::ModelListParams,
|
||||
response: v2::ModelListResponse,
|
||||
},
|
||||
/// EXPERIMENTAL - list collaboration mode presets.
|
||||
CollaborationModeList => "collaborationMode/list" {
|
||||
params: v2::CollaborationModeListParams,
|
||||
response: v2::CollaborationModeListResponse,
|
||||
},
|
||||
|
||||
McpServerOauthLogin => "mcpServer/oauth/login" {
|
||||
params: v2::McpServerOauthLoginParams,
|
||||
response: v2::McpServerOauthLoginResponse,
|
||||
},
|
||||
|
||||
McpServerRefresh => "config/mcpServer/reload" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::McpServerRefreshResponse,
|
||||
},
|
||||
|
||||
McpServerStatusList => "mcpServerStatus/list" {
|
||||
params: v2::ListMcpServerStatusParams,
|
||||
response: v2::ListMcpServerStatusResponse,
|
||||
},
|
||||
|
||||
LoginAccount => "account/login/start" {
|
||||
params: v2::LoginAccountParams,
|
||||
@@ -212,30 +168,6 @@ client_request_definitions! {
|
||||
response: v2::FeedbackUploadResponse,
|
||||
},
|
||||
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
OneOffCommandExec => "command/exec" {
|
||||
params: v2::CommandExecParams,
|
||||
response: v2::CommandExecResponse,
|
||||
},
|
||||
|
||||
ConfigRead => "config/read" {
|
||||
params: v2::ConfigReadParams,
|
||||
response: v2::ConfigReadResponse,
|
||||
},
|
||||
ConfigValueWrite => "config/value/write" {
|
||||
params: v2::ConfigValueWriteParams,
|
||||
response: v2::ConfigWriteResponse,
|
||||
},
|
||||
ConfigBatchWrite => "config/batchWrite" {
|
||||
params: v2::ConfigBatchWriteParams,
|
||||
response: v2::ConfigWriteResponse,
|
||||
},
|
||||
|
||||
ConfigRequirementsRead => "configRequirements/read" {
|
||||
params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>,
|
||||
response: v2::ConfigRequirementsReadResponse,
|
||||
},
|
||||
|
||||
GetAccount => "account/read" {
|
||||
params: v2::GetAccountParams,
|
||||
response: v2::GetAccountResponse,
|
||||
@@ -260,11 +192,6 @@ client_request_definitions! {
|
||||
params: v1::ResumeConversationParams,
|
||||
response: v1::ResumeConversationResponse,
|
||||
},
|
||||
/// Fork a recorded Codex conversation into a new session.
|
||||
ForkConversation {
|
||||
params: v1::ForkConversationParams,
|
||||
response: v1::ForkConversationResponse,
|
||||
},
|
||||
ArchiveConversation {
|
||||
params: v1::ArchiveConversationParams,
|
||||
response: v1::ArchiveConversationResponse,
|
||||
@@ -350,36 +277,34 @@ macro_rules! server_request_definitions {
|
||||
(
|
||||
$(
|
||||
$(#[$variant_meta:meta])*
|
||||
$variant:ident $(=> $wire:literal)? {
|
||||
params: $params:ty,
|
||||
response: $response:ty,
|
||||
}
|
||||
$variant:ident
|
||||
),* $(,)?
|
||||
) => {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$(#[serde(rename = $wire)] #[ts(rename = $wire)])?
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: $params,
|
||||
},
|
||||
)*
|
||||
}
|
||||
paste! {
|
||||
/// Request initiated from the server and sent to the client.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "method", rename_all = "camelCase")]
|
||||
pub enum ServerRequest {
|
||||
$(
|
||||
$(#[$variant_meta])*
|
||||
$variant {
|
||||
#[serde(rename = "id")]
|
||||
request_id: RequestId,
|
||||
params: [<$variant Params>],
|
||||
},
|
||||
)*
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant($params), )*
|
||||
}
|
||||
#[derive(Debug, Clone, PartialEq, JsonSchema)]
|
||||
pub enum ServerRequestPayload {
|
||||
$( $variant([<$variant Params>]), )*
|
||||
}
|
||||
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
impl ServerRequestPayload {
|
||||
pub fn request_with_id(self, request_id: RequestId) -> ServerRequest {
|
||||
match self {
|
||||
$(Self::$variant(params) => ServerRequest::$variant { request_id, params },)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -387,9 +312,9 @@ macro_rules! server_request_definitions {
|
||||
pub fn export_server_responses(
|
||||
out_dir: &::std::path::Path,
|
||||
) -> ::std::result::Result<(), ::ts_rs::ExportError> {
|
||||
$(
|
||||
<$response as ::ts_rs::TS>::export_all_to(out_dir)?;
|
||||
)*
|
||||
paste! {
|
||||
$(<[<$variant Response>] as ::ts_rs::TS>::export_all_to(out_dir)?;)*
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -398,12 +323,9 @@ macro_rules! server_request_definitions {
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$response>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Response"),
|
||||
)?);
|
||||
)*
|
||||
paste! {
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Response>]>(out_dir, stringify!([<$variant Response>]))?);)*
|
||||
}
|
||||
Ok(schemas)
|
||||
}
|
||||
|
||||
@@ -412,12 +334,9 @@ macro_rules! server_request_definitions {
|
||||
out_dir: &Path,
|
||||
) -> ::anyhow::Result<Vec<GeneratedSchema>> {
|
||||
let mut schemas = Vec::new();
|
||||
$(
|
||||
schemas.push(crate::export::write_json_schema::<$params>(
|
||||
out_dir,
|
||||
concat!(stringify!($variant), "Params"),
|
||||
)?);
|
||||
)*
|
||||
paste! {
|
||||
$(schemas.push(crate::export::write_json_schema::<[<$variant Params>]>(out_dir, stringify!([<$variant Params>]))?);)*
|
||||
}
|
||||
Ok(schemas)
|
||||
}
|
||||
};
|
||||
@@ -455,7 +374,7 @@ macro_rules! server_notification_definitions {
|
||||
impl TryFrom<JSONRPCNotification> for ServerNotification {
|
||||
type Error = serde_json::Error;
|
||||
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, serde_json::Error> {
|
||||
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
|
||||
serde_json::from_value(serde_json::to_value(value)?)
|
||||
}
|
||||
}
|
||||
@@ -507,46 +426,49 @@ impl TryFrom<JSONRPCRequest> for ServerRequest {
|
||||
}
|
||||
|
||||
server_request_definitions! {
|
||||
/// NEW APIs
|
||||
/// Sent when approval is requested for a specific command execution.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
CommandExecutionRequestApproval => "item/commandExecution/requestApproval" {
|
||||
params: v2::CommandExecutionRequestApprovalParams,
|
||||
response: v2::CommandExecutionRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// Sent when approval is requested for a specific file change.
|
||||
/// This request is used for Turns started via turn/start.
|
||||
FileChangeRequestApproval => "item/fileChange/requestApproval" {
|
||||
params: v2::FileChangeRequestApprovalParams,
|
||||
response: v2::FileChangeRequestApprovalResponse,
|
||||
},
|
||||
|
||||
/// EXPERIMENTAL - Request input from the user for a tool call.
|
||||
ToolRequestUserInput => "item/tool/requestUserInput" {
|
||||
params: v2::ToolRequestUserInputParams,
|
||||
response: v2::ToolRequestUserInputResponse,
|
||||
},
|
||||
|
||||
/// Execute a dynamic tool call on the client.
|
||||
DynamicToolCall => "item/tool/call" {
|
||||
params: v2::DynamicToolCallParams,
|
||||
response: v2::DynamicToolCallResponse,
|
||||
},
|
||||
|
||||
/// DEPRECATED APIs below
|
||||
/// Request to approve a patch.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ApplyPatchApproval {
|
||||
params: v1::ApplyPatchApprovalParams,
|
||||
response: v1::ApplyPatchApprovalResponse,
|
||||
},
|
||||
ApplyPatchApproval,
|
||||
/// Request to exec a command.
|
||||
/// This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).
|
||||
ExecCommandApproval {
|
||||
params: v1::ExecCommandApprovalParams,
|
||||
response: v1::ExecCommandApprovalResponse,
|
||||
},
|
||||
ExecCommandApproval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ConversationId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub risk: Option<SandboxCommandAssessment>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -576,34 +498,16 @@ pub struct FuzzyFileSearchResponse {
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
Error => "error" (v2::ErrorNotification),
|
||||
ThreadStarted => "thread/started" (v2::ThreadStartedNotification),
|
||||
ThreadTokenUsageUpdated => "thread/tokenUsage/updated" (v2::ThreadTokenUsageUpdatedNotification),
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
TurnDiffUpdated => "turn/diff/updated" (v2::TurnDiffUpdatedNotification),
|
||||
TurnPlanUpdated => "turn/plan/updated" (v2::TurnPlanUpdatedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
/// This event is internal-only. Used by Codex Cloud.
|
||||
RawResponseItemCompleted => "rawResponseItem/completed" (v2::RawResponseItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
|
||||
TerminalInteraction => "item/commandExecution/terminalInteraction" (v2::TerminalInteractionNotification),
|
||||
FileChangeOutputDelta => "item/fileChange/outputDelta" (v2::FileChangeOutputDeltaNotification),
|
||||
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
|
||||
McpServerOauthLoginCompleted => "mcpServer/oauthLogin/completed" (v2::McpServerOauthLoginCompletedNotification),
|
||||
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
|
||||
AccountRateLimitsUpdated => "account/rateLimits/updated" (v2::AccountRateLimitsUpdatedNotification),
|
||||
ReasoningSummaryTextDelta => "item/reasoning/summaryTextDelta" (v2::ReasoningSummaryTextDeltaNotification),
|
||||
ReasoningSummaryPartAdded => "item/reasoning/summaryPartAdded" (v2::ReasoningSummaryPartAddedNotification),
|
||||
ReasoningTextDelta => "item/reasoning/textDelta" (v2::ReasoningTextDeltaNotification),
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
|
||||
/// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
|
||||
WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
|
||||
|
||||
#[serde(rename = "account/login/completed")]
|
||||
#[ts(rename = "account/login/completed")]
|
||||
@@ -626,20 +530,17 @@ client_notification_definitions! {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn serialize_new_conversation() -> Result<()> {
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: RequestId::Integer(42),
|
||||
params: v1::NewConversationParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
model: Some("gpt-5-codex".to_string()),
|
||||
model_provider: None,
|
||||
profile: None,
|
||||
cwd: None,
|
||||
@@ -657,7 +558,7 @@ mod tests {
|
||||
"method": "newConversation",
|
||||
"id": 42,
|
||||
"params": {
|
||||
"model": "gpt-5.1-codex-max",
|
||||
"model": "gpt-5-codex",
|
||||
"modelProvider": null,
|
||||
"profile": null,
|
||||
"cwd": null,
|
||||
@@ -675,7 +576,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn conversation_id_serializes_as_plain_string() -> Result<()> {
|
||||
let id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
|
||||
assert_eq!(
|
||||
json!("67e55044-10b1-426f-9247-bb680e5fe0c8"),
|
||||
@@ -686,10 +587,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn conversation_id_deserializes_from_plain_string() -> Result<()> {
|
||||
let id: ThreadId = serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
let id: ConversationId =
|
||||
serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?;
|
||||
|
||||
assert_eq!(
|
||||
ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?,
|
||||
id,
|
||||
);
|
||||
Ok(())
|
||||
@@ -710,13 +612,14 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn serialize_server_request() -> Result<()> {
|
||||
let conversation_id = ThreadId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = v1::ExecCommandApprovalParams {
|
||||
let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?;
|
||||
let params = ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call-42".to_string(),
|
||||
command: vec!["echo".to_string(), "hello".to_string()],
|
||||
cwd: PathBuf::from("/tmp"),
|
||||
reason: Some("because tests".to_string()),
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "echo hello".to_string(),
|
||||
}],
|
||||
@@ -736,6 +639,7 @@ mod tests {
|
||||
"command": ["echo", "hello"],
|
||||
"cwd": "/tmp",
|
||||
"reason": "because tests",
|
||||
"risk": null,
|
||||
"parsedCmd": [
|
||||
{
|
||||
"type": "unknown",
|
||||
@@ -768,22 +672,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_config_requirements_read() -> Result<()> {
|
||||
let request = ClientRequest::ConfigRequirementsRead {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: None,
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "configRequirements/read",
|
||||
"id": 1,
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_account_login_api_key() -> Result<()> {
|
||||
let request = ClientRequest::LoginAccount {
|
||||
@@ -907,21 +795,4 @@ mod tests {
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_collaboration_modes() -> Result<()> {
|
||||
let request = ClientRequest::CollaborationModeList {
|
||||
request_id: RequestId::Integer(7),
|
||||
params: v2::CollaborationModeListParams::default(),
|
||||
};
|
||||
assert_eq!(
|
||||
json!({
|
||||
"method": "collaborationMode/list",
|
||||
"id": 7,
|
||||
"params": {}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
use crate::protocol::v1;
|
||||
use crate::protocol::v2;
|
||||
|
||||
impl From<v1::ExecOneOffCommandParams> for v2::CommandExecParams {
|
||||
fn from(value: v1::ExecOneOffCommandParams) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
timeout_ms: value
|
||||
.timeout_ms
|
||||
.map(|timeout| i64::try_from(timeout).unwrap_or(60_000)),
|
||||
cwd: value.cwd,
|
||||
sandbox_policy: value.sandbox_policy.map(std::convert::Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,5 @@
|
||||
// Exposes protocol pieces used by `lib.rs` via `pub use protocol::common::*;`.
|
||||
|
||||
pub mod common;
|
||||
mod mappers;
|
||||
pub mod thread_history;
|
||||
pub mod v1;
|
||||
pub mod v2;
|
||||
|
||||
@@ -1,559 +0,0 @@
|
||||
use crate::protocol::v2::ThreadItem;
|
||||
use crate::protocol::v2::Turn;
|
||||
use crate::protocol::v2::TurnError;
|
||||
use crate::protocol::v2::TurnStatus;
|
||||
use crate::protocol::v2::UserInput;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::ThreadRolledBackEvent;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
|
||||
/// Convert persisted [`EventMsg`] entries into a sequence of [`Turn`] values.
|
||||
///
|
||||
/// The purpose of this is to convert the EventMsgs persisted in a rollout file
|
||||
/// into a sequence of Turns and ThreadItems, which allows the client to render
|
||||
/// the historical messages when resuming a thread.
|
||||
pub fn build_turns_from_event_msgs(events: &[EventMsg]) -> Vec<Turn> {
|
||||
let mut builder = ThreadHistoryBuilder::new();
|
||||
for event in events {
|
||||
builder.handle_event(event);
|
||||
}
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
struct ThreadHistoryBuilder {
|
||||
turns: Vec<Turn>,
|
||||
current_turn: Option<PendingTurn>,
|
||||
next_turn_index: i64,
|
||||
next_item_index: i64,
|
||||
}
|
||||
|
||||
impl ThreadHistoryBuilder {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
turns: Vec::new(),
|
||||
current_turn: None,
|
||||
next_turn_index: 1,
|
||||
next_item_index: 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> Vec<Turn> {
|
||||
self.finish_current_turn();
|
||||
self.turns
|
||||
}
|
||||
|
||||
/// This function should handle all EventMsg variants that can be persisted in a rollout file.
|
||||
/// See `should_persist_event_msg` in `codex-rs/core/rollout/policy.rs`.
|
||||
fn handle_event(&mut self, event: &EventMsg) {
|
||||
match event {
|
||||
EventMsg::UserMessage(payload) => self.handle_user_message(payload),
|
||||
EventMsg::AgentMessage(payload) => self.handle_agent_message(payload.message.clone()),
|
||||
EventMsg::AgentReasoning(payload) => self.handle_agent_reasoning(payload),
|
||||
EventMsg::AgentReasoningRawContent(payload) => {
|
||||
self.handle_agent_reasoning_raw_content(payload)
|
||||
}
|
||||
EventMsg::TokenCount(_) => {}
|
||||
EventMsg::EnteredReviewMode(_) => {}
|
||||
EventMsg::ExitedReviewMode(_) => {}
|
||||
EventMsg::ThreadRolledBack(payload) => self.handle_thread_rollback(payload),
|
||||
EventMsg::UndoCompleted(_) => {}
|
||||
EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_user_message(&mut self, payload: &UserMessageEvent) {
|
||||
self.finish_current_turn();
|
||||
let mut turn = self.new_turn();
|
||||
let id = self.next_item_id();
|
||||
let content = self.build_user_inputs(payload);
|
||||
turn.items.push(ThreadItem::UserMessage { id, content });
|
||||
self.current_turn = Some(turn);
|
||||
}
|
||||
|
||||
fn handle_agent_message(&mut self, text: String) {
|
||||
if text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn()
|
||||
.items
|
||||
.push(ThreadItem::AgentMessage { id, text });
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning(&mut self, payload: &AgentReasoningEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the summary.
|
||||
if let Some(ThreadItem::Reasoning { summary, .. }) = self.ensure_turn().items.last_mut() {
|
||||
summary.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: vec![payload.text.clone()],
|
||||
content: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_agent_reasoning_raw_content(&mut self, payload: &AgentReasoningRawContentEvent) {
|
||||
if payload.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the last item is a reasoning item, add the new text to the content.
|
||||
if let Some(ThreadItem::Reasoning { content, .. }) = self.ensure_turn().items.last_mut() {
|
||||
content.push(payload.text.clone());
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a new reasoning item.
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::Reasoning {
|
||||
id,
|
||||
summary: Vec::new(),
|
||||
content: vec![payload.text.clone()],
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_turn_aborted(&mut self, _payload: &TurnAbortedEvent) {
|
||||
let Some(turn) = self.current_turn.as_mut() else {
|
||||
return;
|
||||
};
|
||||
turn.status = TurnStatus::Interrupted;
|
||||
}
|
||||
|
||||
fn handle_thread_rollback(&mut self, payload: &ThreadRolledBackEvent) {
|
||||
self.finish_current_turn();
|
||||
|
||||
let n = usize::try_from(payload.num_turns).unwrap_or(usize::MAX);
|
||||
if n >= self.turns.len() {
|
||||
self.turns.clear();
|
||||
} else {
|
||||
self.turns.truncate(self.turns.len().saturating_sub(n));
|
||||
}
|
||||
|
||||
// Re-number subsequent synthetic ids so the pruned history is consistent.
|
||||
self.next_turn_index =
|
||||
i64::try_from(self.turns.len().saturating_add(1)).unwrap_or(i64::MAX);
|
||||
let item_count: usize = self.turns.iter().map(|t| t.items.len()).sum();
|
||||
self.next_item_index = i64::try_from(item_count.saturating_add(1)).unwrap_or(i64::MAX);
|
||||
}
|
||||
|
||||
fn finish_current_turn(&mut self) {
|
||||
if let Some(turn) = self.current_turn.take() {
|
||||
if turn.items.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.turns.push(turn.into());
|
||||
}
|
||||
}
|
||||
|
||||
fn new_turn(&mut self) -> PendingTurn {
|
||||
PendingTurn {
|
||||
id: self.next_turn_id(),
|
||||
items: Vec::new(),
|
||||
error: None,
|
||||
status: TurnStatus::Completed,
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_turn(&mut self) -> &mut PendingTurn {
|
||||
if self.current_turn.is_none() {
|
||||
let turn = self.new_turn();
|
||||
return self.current_turn.insert(turn);
|
||||
}
|
||||
|
||||
if let Some(turn) = self.current_turn.as_mut() {
|
||||
return turn;
|
||||
}
|
||||
|
||||
unreachable!("current turn must exist after initialization");
|
||||
}
|
||||
|
||||
fn next_turn_id(&mut self) -> String {
|
||||
let id = format!("turn-{}", self.next_turn_index);
|
||||
self.next_turn_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn next_item_id(&mut self) -> String {
|
||||
let id = format!("item-{}", self.next_item_index);
|
||||
self.next_item_index += 1;
|
||||
id
|
||||
}
|
||||
|
||||
fn build_user_inputs(&self, payload: &UserMessageEvent) -> Vec<UserInput> {
|
||||
let mut content = Vec::new();
|
||||
if !payload.message.trim().is_empty() {
|
||||
content.push(UserInput::Text {
|
||||
text: payload.message.clone(),
|
||||
text_elements: payload
|
||||
.text_elements
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
if let Some(images) = &payload.images {
|
||||
for image in images {
|
||||
content.push(UserInput::Image { url: image.clone() });
|
||||
}
|
||||
}
|
||||
for path in &payload.local_images {
|
||||
content.push(UserInput::LocalImage { path: path.clone() });
|
||||
}
|
||||
content
|
||||
}
|
||||
}
|
||||
|
||||
struct PendingTurn {
|
||||
id: String,
|
||||
items: Vec<ThreadItem>,
|
||||
error: Option<TurnError>,
|
||||
status: TurnStatus,
|
||||
}
|
||||
|
||||
impl From<PendingTurn> for Turn {
|
||||
fn from(value: PendingTurn) -> Self {
|
||||
Self {
|
||||
id: value.id,
|
||||
items: value.items,
|
||||
error: value.error,
|
||||
status: value.status,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::AgentMessageEvent;
|
||||
use codex_protocol::protocol::AgentReasoningEvent;
|
||||
use codex_protocol::protocol::AgentReasoningRawContentEvent;
|
||||
use codex_protocol::protocol::ThreadRolledBackEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn builds_multiple_turns_with_reasoning_items() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First turn".into(),
|
||||
images: Some(vec!["https://example.com/one.png".into()]),
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Hi there".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "thinking".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "full reasoning".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second turn".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Reply two".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first = &turns[0];
|
||||
assert_eq!(first.id, "turn-1");
|
||||
assert_eq!(first.status, TurnStatus::Completed);
|
||||
assert_eq!(first.items.len(), 3);
|
||||
assert_eq!(
|
||||
first.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![
|
||||
UserInput::Text {
|
||||
text: "First turn".into(),
|
||||
text_elements: Vec::new(),
|
||||
},
|
||||
UserInput::Image {
|
||||
url: "https://example.com/one.png".into(),
|
||||
}
|
||||
],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Hi there".into(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first.items[2],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-3".into(),
|
||||
summary: vec!["thinking".into()],
|
||||
content: vec!["full reasoning".into()],
|
||||
}
|
||||
);
|
||||
|
||||
let second = &turns[1];
|
||||
assert_eq!(second.id, "turn-2");
|
||||
assert_eq!(second.items.len(), 2);
|
||||
assert_eq!(
|
||||
second.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-4".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Second turn".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-5".into(),
|
||||
text: "Reply two".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn splits_reasoning_when_interleaved() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Turn start".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "first summary".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoningRawContent(AgentReasoningRawContentEvent {
|
||||
text: "first content".into(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "interlude".into(),
|
||||
}),
|
||||
EventMsg::AgentReasoning(AgentReasoningEvent {
|
||||
text: "second summary".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 1);
|
||||
let turn = &turns[0];
|
||||
assert_eq!(turn.items.len(), 4);
|
||||
|
||||
assert_eq!(
|
||||
turn.items[1],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-2".into(),
|
||||
summary: vec!["first summary".into()],
|
||||
content: vec!["first content".into()],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
turn.items[3],
|
||||
ThreadItem::Reasoning {
|
||||
id: "item-4".into(),
|
||||
summary: vec!["second summary".into()],
|
||||
content: Vec::new(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn marks_turn_as_interrupted_when_aborted() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Please do the thing".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Working...".into(),
|
||||
}),
|
||||
EventMsg::TurnAborted(TurnAbortedEvent {
|
||||
reason: TurnAbortReason::Replaced,
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Let's try again".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "Second attempt complete.".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns.len(), 2);
|
||||
|
||||
let first_turn = &turns[0];
|
||||
assert_eq!(first_turn.status, TurnStatus::Interrupted);
|
||||
assert_eq!(first_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
first_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Please do the thing".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
first_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "Working...".into(),
|
||||
}
|
||||
);
|
||||
|
||||
let second_turn = &turns[1];
|
||||
assert_eq!(second_turn.status, TurnStatus::Completed);
|
||||
assert_eq!(second_turn.items.len(), 2);
|
||||
assert_eq!(
|
||||
second_turn.items[0],
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Let's try again".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
second_turn.items[1],
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-4".into(),
|
||||
text: "Second attempt complete.".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drops_last_turns_on_thread_rollback() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "First".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A1".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Second".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A2".into(),
|
||||
}),
|
||||
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 1 }),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Third".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A3".into(),
|
||||
}),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
let expected = vec![
|
||||
Turn {
|
||||
id: "turn-1".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
items: vec![
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-1".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "First".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-2".into(),
|
||||
text: "A1".into(),
|
||||
},
|
||||
],
|
||||
},
|
||||
Turn {
|
||||
id: "turn-2".into(),
|
||||
status: TurnStatus::Completed,
|
||||
error: None,
|
||||
items: vec![
|
||||
ThreadItem::UserMessage {
|
||||
id: "item-3".into(),
|
||||
content: vec![UserInput::Text {
|
||||
text: "Third".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
ThreadItem::AgentMessage {
|
||||
id: "item-4".into(),
|
||||
text: "A3".into(),
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
assert_eq!(turns, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_rollback_clears_all_turns_when_num_turns_exceeds_history() {
|
||||
let events = vec![
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "One".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A1".into(),
|
||||
}),
|
||||
EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "Two".into(),
|
||||
images: None,
|
||||
text_elements: Vec::new(),
|
||||
local_images: Vec::new(),
|
||||
}),
|
||||
EventMsg::AgentMessage(AgentMessageEvent {
|
||||
message: "A2".into(),
|
||||
}),
|
||||
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 99 }),
|
||||
];
|
||||
|
||||
let turns = build_turns_from_event_msgs(&events);
|
||||
assert_eq!(turns, Vec::<Turn>::new());
|
||||
}
|
||||
}
|
||||
@@ -1,24 +1,18 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
use codex_protocol::protocol::SandboxPolicy;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::user_input::ByteRange as CoreByteRange;
|
||||
use codex_protocol::user_input::TextElement as CoreTextElement;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
@@ -70,7 +64,7 @@ pub struct NewConversationParams {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct NewConversationResponse {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub rollout_path: PathBuf,
|
||||
@@ -79,16 +73,7 @@ pub struct NewConversationResponse {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationResponse {
|
||||
pub conversation_id: ThreadId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ForkConversationResponse {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
pub model: String,
|
||||
pub initial_messages: Option<Vec<EventMsg>>,
|
||||
pub rollout_path: PathBuf,
|
||||
@@ -101,9 +86,9 @@ pub enum GetConversationSummaryParams {
|
||||
#[serde(rename = "rolloutPath")]
|
||||
rollout_path: PathBuf,
|
||||
},
|
||||
ThreadId {
|
||||
ConversationId {
|
||||
#[serde(rename = "conversationId")]
|
||||
conversation_id: ThreadId,
|
||||
conversation_id: ConversationId,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -124,11 +109,10 @@ pub struct ListConversationsParams {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ConversationSummary {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
pub path: PathBuf,
|
||||
pub preview: String,
|
||||
pub timestamp: Option<String>,
|
||||
pub updated_at: Option<String>,
|
||||
pub model_provider: String,
|
||||
pub cwd: PathBuf,
|
||||
pub cli_version: String,
|
||||
@@ -155,19 +139,11 @@ pub struct ListConversationsResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResumeConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ThreadId>,
|
||||
pub conversation_id: Option<ConversationId>,
|
||||
pub history: Option<Vec<ResponseItem>>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ForkConversationParams {
|
||||
pub path: Option<PathBuf>,
|
||||
pub conversation_id: Option<ThreadId>,
|
||||
pub overrides: Option<NewConversationParams>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationSubscriptionResponse {
|
||||
@@ -178,7 +154,7 @@ pub struct AddConversationSubscriptionResponse {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ArchiveConversationParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
pub rollout_path: PathBuf,
|
||||
}
|
||||
|
||||
@@ -215,45 +191,6 @@ pub struct GitDiffToRemoteResponse {
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalParams {
|
||||
pub conversation_id: ThreadId,
|
||||
/// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent]
|
||||
/// and [codex_core::protocol::PatchApplyEndEvent].
|
||||
pub call_id: String,
|
||||
pub file_changes: HashMap<PathBuf, FileChange>,
|
||||
/// Optional explanatory reason (e.g. request for extra write access).
|
||||
pub reason: Option<String>,
|
||||
/// When set, the agent is asking the user to allow writes under this root
|
||||
/// for the remainder of the session (unclear if this is honored today).
|
||||
pub grant_root: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ApplyPatchApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExecCommandApprovalParams {
|
||||
pub conversation_id: ThreadId,
|
||||
/// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent]
|
||||
/// and [codex_core::protocol::ExecCommandEndEvent].
|
||||
pub call_id: String,
|
||||
pub command: Vec<String>,
|
||||
pub cwd: PathBuf,
|
||||
pub reason: Option<String>,
|
||||
pub parsed_cmd: Vec<ParsedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
pub struct ExecCommandApprovalResponse {
|
||||
pub decision: ReviewDecision,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CancelLoginChatGptParams {
|
||||
@@ -380,7 +317,7 @@ pub struct Tools {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SandboxSettings {
|
||||
#[serde(default)]
|
||||
pub writable_roots: Vec<AbsolutePathBuf>,
|
||||
pub writable_roots: Vec<PathBuf>,
|
||||
pub network_access: Option<bool>,
|
||||
pub exclude_tmpdir_env_var: Option<bool>,
|
||||
pub exclude_slash_tmp: Option<bool>,
|
||||
@@ -389,14 +326,14 @@ pub struct SandboxSettings {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserMessageParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SendUserTurnParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
pub items: Vec<InputItem>,
|
||||
pub cwd: PathBuf,
|
||||
pub approval_policy: AskForApproval,
|
||||
@@ -404,8 +341,6 @@ pub struct SendUserTurnParams {
|
||||
pub model: String,
|
||||
pub effort: Option<ReasoningEffort>,
|
||||
pub summary: ReasoningSummary,
|
||||
/// Optional JSON Schema used to constrain the final assistant message for this turn.
|
||||
pub output_schema: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -415,7 +350,7 @@ pub struct SendUserTurnResponse {}
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InterruptConversationParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
@@ -431,7 +366,7 @@ pub struct SendUserMessageResponse {}
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AddConversationListenerParams {
|
||||
pub conversation_id: ThreadId,
|
||||
pub conversation_id: ConversationId,
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
}
|
||||
@@ -447,71 +382,9 @@ pub struct RemoveConversationListenerParams {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum InputItem {
|
||||
Text {
|
||||
text: String,
|
||||
/// UI-defined spans within `text` used to render or persist special elements.
|
||||
#[serde(default)]
|
||||
text_elements: Vec<V1TextElement>,
|
||||
},
|
||||
Image {
|
||||
image_url: String,
|
||||
},
|
||||
LocalImage {
|
||||
path: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename = "ByteRange")]
|
||||
pub struct V1ByteRange {
|
||||
/// Start byte offset (inclusive) within the UTF-8 text buffer.
|
||||
pub start: usize,
|
||||
/// End byte offset (exclusive) within the UTF-8 text buffer.
|
||||
pub end: usize,
|
||||
}
|
||||
|
||||
impl From<CoreByteRange> for V1ByteRange {
|
||||
fn from(value: CoreByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<V1ByteRange> for CoreByteRange {
|
||||
fn from(value: V1ByteRange) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename = "TextElement")]
|
||||
pub struct V1TextElement {
|
||||
/// Byte range in the parent `text` buffer that this element occupies.
|
||||
pub byte_range: V1ByteRange,
|
||||
/// Optional human-readable placeholder for the element, displayed in the UI.
|
||||
pub placeholder: Option<String>,
|
||||
}
|
||||
|
||||
impl From<CoreTextElement> for V1TextElement {
|
||||
fn from(value: CoreTextElement) -> Self {
|
||||
Self {
|
||||
byte_range: value.byte_range.into(),
|
||||
placeholder: value._placeholder_for_conversion_only().map(str::to_string),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<V1TextElement> for CoreTextElement {
|
||||
fn from(value: V1TextElement) -> Self {
|
||||
Self::new(value.byte_range.into(), value.placeholder)
|
||||
}
|
||||
Text { text: String },
|
||||
Image { image_url: String },
|
||||
LocalImage { path: PathBuf },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -527,7 +400,7 @@ pub struct LoginChatGptCompleteNotification {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SessionConfiguredNotification {
|
||||
pub session_id: ThreadId,
|
||||
pub session_id: ConversationId,
|
||||
pub model: String,
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub history_log_id: u64,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "codex-app-server-test-client",
|
||||
crate_name = "codex_app_server_test_client",
|
||||
)
|
||||
1298
codex-rs/app-server-test-client/Cargo.lock
generated
1298
codex-rs/app-server-test-client/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
||||
[package]
|
||||
name = "codex-app-server-test-client"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "env"] }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
@@ -1,2 +0,0 @@
|
||||
# App Server Test Client
|
||||
Exercises simple `codex app-server` flows end-to-end, logging JSON-RPC messages sent between client and server to stdout.
|
||||
@@ -1,964 +0,0 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::process::Child;
|
||||
use std::process::ChildStdin;
|
||||
use std::process::ChildStdout;
|
||||
use std::process::Command;
|
||||
use std::process::Stdio;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::bail;
|
||||
use clap::ArgAction;
|
||||
use clap::Parser;
|
||||
use clap::Subcommand;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::AskForApproval;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::CommandExecutionApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalParams;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::FileChangeApprovalDecision;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalParams;
|
||||
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
|
||||
use codex_app_server_protocol::GetAccountRateLimitsResponse;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::InputItem;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::LoginChatGptResponse;
|
||||
use codex_app_server_protocol::ModelListParams;
|
||||
use codex_app_server_protocol::ModelListResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SandboxPolicy;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Minimal launcher that initializes the Codex app-server and logs the handshake.
|
||||
#[derive(Parser)]
|
||||
#[command(author = "Codex", version, about = "Bootstrap Codex app-server", long_about = None)]
|
||||
struct Cli {
|
||||
/// Path to the `codex` CLI binary.
|
||||
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
|
||||
codex_bin: String,
|
||||
|
||||
/// Forwarded to the `codex` CLI as `--config key=value`. Repeatable.
|
||||
///
|
||||
/// Example:
|
||||
/// `--config 'model_providers.mock.base_url="http://localhost:4010/v2"'`
|
||||
#[arg(
|
||||
short = 'c',
|
||||
long = "config",
|
||||
value_name = "key=value",
|
||||
action = ArgAction::Append,
|
||||
global = true
|
||||
)]
|
||||
config_overrides: Vec<String>,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: CliCommand,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum CliCommand {
|
||||
/// Send a user message through the Codex app-server.
|
||||
SendMessage {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Send a user message through the app-server V2 thread/turn APIs.
|
||||
SendMessageV2 {
|
||||
/// User message to send to Codex.
|
||||
#[arg()]
|
||||
user_message: String,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ExecCommand approval.
|
||||
#[command(name = "trigger-cmd-approval")]
|
||||
TriggerCmdApproval {
|
||||
/// Optional prompt; defaults to a simple python command.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ApplyPatch approval.
|
||||
#[command(name = "trigger-patch-approval")]
|
||||
TriggerPatchApproval {
|
||||
/// Optional prompt; defaults to creating a file via apply_patch.
|
||||
#[arg()]
|
||||
user_message: Option<String>,
|
||||
},
|
||||
/// Start a V2 turn that should not elicit an ExecCommand approval.
|
||||
#[command(name = "no-trigger-cmd-approval")]
|
||||
NoTriggerCmdApproval,
|
||||
/// Send two sequential V2 turns in the same thread to test follow-up behavior.
|
||||
SendFollowUpV2 {
|
||||
/// Initial user message for the first turn.
|
||||
#[arg()]
|
||||
first_message: String,
|
||||
/// Follow-up user message for the second turn.
|
||||
#[arg()]
|
||||
follow_up_message: String,
|
||||
},
|
||||
/// Trigger the ChatGPT login flow and wait for completion.
|
||||
TestLogin,
|
||||
/// Fetch the current account rate limits from the Codex app-server.
|
||||
GetAccountRateLimits,
|
||||
/// List the available models from the Codex app-server.
|
||||
#[command(name = "model-list")]
|
||||
ModelList,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let Cli {
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
command,
|
||||
} = Cli::parse();
|
||||
|
||||
match command {
|
||||
CliCommand::SendMessage { user_message } => {
|
||||
send_message(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::SendMessageV2 { user_message } => {
|
||||
send_message_v2(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::TriggerCmdApproval { user_message } => {
|
||||
trigger_cmd_approval(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::TriggerPatchApproval { user_message } => {
|
||||
trigger_patch_approval(&codex_bin, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::NoTriggerCmdApproval => no_trigger_cmd_approval(&codex_bin, &config_overrides),
|
||||
CliCommand::SendFollowUpV2 {
|
||||
first_message,
|
||||
follow_up_message,
|
||||
} => send_follow_up_v2(
|
||||
&codex_bin,
|
||||
&config_overrides,
|
||||
first_message,
|
||||
follow_up_message,
|
||||
),
|
||||
CliCommand::TestLogin => test_login(&codex_bin, &config_overrides),
|
||||
CliCommand::GetAccountRateLimits => get_account_rate_limits(&codex_bin, &config_overrides),
|
||||
CliCommand::ModelList => model_list(&codex_bin, &config_overrides),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_message(codex_bin: &str, config_overrides: &[String], user_message: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let conversation = client.start_thread()?;
|
||||
println!("< newConversation response: {conversation:?}");
|
||||
|
||||
let subscription = client.add_conversation_listener(&conversation.conversation_id)?;
|
||||
println!("< addConversationListener response: {subscription:?}");
|
||||
|
||||
let send_response = client.send_user_message(&conversation.conversation_id, &user_message)?;
|
||||
println!("< sendUserMessage response: {send_response:?}");
|
||||
|
||||
client.stream_conversation(&conversation.conversation_id)?;
|
||||
|
||||
client.remove_thread_listener(subscription.subscription_id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_message_v2(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
) -> Result<()> {
|
||||
send_message_v2_with_policies(codex_bin, config_overrides, user_message, None, None)
|
||||
}
|
||||
|
||||
fn trigger_cmd_approval(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn trigger_patch_approval(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
) -> Result<()> {
|
||||
let default_prompt =
|
||||
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
Some(SandboxPolicy::ReadOnly),
|
||||
)
|
||||
}
|
||||
|
||||
fn no_trigger_cmd_approval(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let prompt = "Run `touch should_not_trigger_approval.txt`";
|
||||
send_message_v2_with_policies(codex_bin, config_overrides, prompt.to_string(), None, None)
|
||||
}
|
||||
|
||||
fn send_message_v2_with_policies(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let thread_response = client.thread_start(ThreadStartParams::default())?;
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
let mut turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: user_message,
|
||||
// Test client sends plain text without UI element ranges.
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
turn_params.approval_policy = approval_policy;
|
||||
turn_params.sandbox_policy = sandbox_policy;
|
||||
|
||||
let turn_response = client.turn_start(turn_params)?;
|
||||
println!("< turn/start response: {turn_response:?}");
|
||||
|
||||
client.stream_turn(&thread_response.thread.id, &turn_response.turn.id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_follow_up_v2(
|
||||
codex_bin: &str,
|
||||
config_overrides: &[String],
|
||||
first_message: String,
|
||||
follow_up_message: String,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let thread_response = client.thread_start(ThreadStartParams::default())?;
|
||||
println!("< thread/start response: {thread_response:?}");
|
||||
|
||||
let first_turn_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: first_message,
|
||||
// Test client sends plain text without UI element ranges.
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let first_turn_response = client.turn_start(first_turn_params)?;
|
||||
println!("< turn/start response (initial): {first_turn_response:?}");
|
||||
client.stream_turn(&thread_response.thread.id, &first_turn_response.turn.id)?;
|
||||
|
||||
let follow_up_params = TurnStartParams {
|
||||
thread_id: thread_response.thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: follow_up_message,
|
||||
// Test client sends plain text without UI element ranges.
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let follow_up_response = client.turn_start(follow_up_params)?;
|
||||
println!("< turn/start response (follow-up): {follow_up_response:?}");
|
||||
client.stream_turn(&thread_response.thread.id, &follow_up_response.turn.id)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn test_login(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let login_response = client.login_chat_gpt()?;
|
||||
println!("< loginChatGpt response: {login_response:?}");
|
||||
println!(
|
||||
"Open the following URL in your browser to continue:\n{}",
|
||||
login_response.auth_url
|
||||
);
|
||||
|
||||
let completion = client.wait_for_login_completion(&login_response.login_id)?;
|
||||
println!("< loginChatGptComplete notification: {completion:?}");
|
||||
|
||||
if completion.success {
|
||||
println!("Login succeeded.");
|
||||
Ok(())
|
||||
} else {
|
||||
bail!(
|
||||
"login failed: {}",
|
||||
completion
|
||||
.error
|
||||
.as_deref()
|
||||
.unwrap_or("unknown error from loginChatGptComplete")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.get_account_rate_limits()?;
|
||||
println!("< account/rateLimits/read response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn model_list(codex_bin: &str, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.model_list(ModelListParams::default())?;
|
||||
println!("< model/list response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct CodexClient {
|
||||
child: Child,
|
||||
stdin: Option<ChildStdin>,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_notifications: VecDeque<JSONRPCNotification>,
|
||||
}
|
||||
|
||||
impl CodexClient {
|
||||
fn spawn(codex_bin: &str, config_overrides: &[String]) -> Result<Self> {
|
||||
let mut cmd = Command::new(codex_bin);
|
||||
for override_kv in config_overrides {
|
||||
cmd.arg("--config").arg(override_kv);
|
||||
}
|
||||
let mut codex_app_server = cmd
|
||||
.arg("app-server")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to start `{codex_bin}` app-server"))?;
|
||||
|
||||
let stdin = codex_app_server
|
||||
.stdin
|
||||
.take()
|
||||
.context("codex app-server stdin unavailable")?;
|
||||
let stdout = codex_app_server
|
||||
.stdout
|
||||
.take()
|
||||
.context("codex app-server stdout unavailable")?;
|
||||
|
||||
Ok(Self {
|
||||
child: codex_app_server,
|
||||
stdin: Some(stdin),
|
||||
stdout: BufReader::new(stdout),
|
||||
pending_notifications: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn initialize(&mut self) -> Result<InitializeResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::Initialize {
|
||||
request_id: request_id.clone(),
|
||||
params: InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-toy-app-server".to_string(),
|
||||
title: Some("Codex Toy App Server".to_string()),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "initialize")
|
||||
}
|
||||
|
||||
fn start_thread(&mut self) -> Result<NewConversationResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::NewConversation {
|
||||
request_id: request_id.clone(),
|
||||
params: NewConversationParams::default(),
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "newConversation")
|
||||
}
|
||||
|
||||
fn add_conversation_listener(
|
||||
&mut self,
|
||||
conversation_id: &ThreadId,
|
||||
) -> Result<AddConversationSubscriptionResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::AddConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: AddConversationListenerParams {
|
||||
conversation_id: *conversation_id,
|
||||
experimental_raw_events: false,
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "addConversationListener")
|
||||
}
|
||||
|
||||
fn remove_thread_listener(&mut self, subscription_id: Uuid) -> Result<()> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::RemoveConversationListener {
|
||||
request_id: request_id.clone(),
|
||||
params: codex_app_server_protocol::RemoveConversationListenerParams { subscription_id },
|
||||
};
|
||||
|
||||
self.send_request::<codex_app_server_protocol::RemoveConversationSubscriptionResponse>(
|
||||
request,
|
||||
request_id,
|
||||
"removeConversationListener",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_user_message(
|
||||
&mut self,
|
||||
conversation_id: &ThreadId,
|
||||
message: &str,
|
||||
) -> Result<SendUserMessageResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::SendUserMessage {
|
||||
request_id: request_id.clone(),
|
||||
params: SendUserMessageParams {
|
||||
conversation_id: *conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: message.to_string(),
|
||||
// Test client sends plain text without UI element ranges.
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "sendUserMessage")
|
||||
}
|
||||
|
||||
fn thread_start(&mut self, params: ThreadStartParams) -> Result<ThreadStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ThreadStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "thread/start")
|
||||
}
|
||||
|
||||
fn turn_start(&mut self, params: TurnStartParams) -> Result<TurnStartResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::TurnStart {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "turn/start")
|
||||
}
|
||||
|
||||
fn login_chat_gpt(&mut self) -> Result<LoginChatGptResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::LoginChatGpt {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "loginChatGpt")
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(&mut self) -> Result<GetAccountRateLimitsResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::GetAccountRateLimits {
|
||||
request_id: request_id.clone(),
|
||||
params: None,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "account/rateLimits/read")
|
||||
}
|
||||
|
||||
fn model_list(&mut self, params: ModelListParams) -> Result<ModelListResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ModelList {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "model/list")
|
||||
}
|
||||
|
||||
fn stream_conversation(&mut self, conversation_id: &ThreadId) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if !notification.method.starts_with("codex/event/") {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(event) = self.extract_event(notification, conversation_id)? {
|
||||
match &event.msg {
|
||||
EventMsg::AgentMessage(event) => {
|
||||
println!("{}", event.message);
|
||||
}
|
||||
EventMsg::AgentMessageDelta(event) => {
|
||||
print!("{}", event.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
EventMsg::TurnComplete(event) => {
|
||||
println!("\n[task complete: {event:?}]");
|
||||
break;
|
||||
}
|
||||
EventMsg::TurnAborted(event) => {
|
||||
println!("\n[turn aborted: {:?}]", event.reason);
|
||||
break;
|
||||
}
|
||||
EventMsg::Error(event) => {
|
||||
println!("[error] {event:?}");
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN EVENT] {:?}", event.msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_login_completion(
|
||||
&mut self,
|
||||
expected_login_id: &Uuid,
|
||||
) -> Result<LoginChatGptCompleteNotification> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
if let Ok(server_notification) = ServerNotification::try_from(notification) {
|
||||
match server_notification {
|
||||
ServerNotification::LoginChatGptComplete(completion) => {
|
||||
if &completion.login_id == expected_login_id {
|
||||
return Ok(completion);
|
||||
}
|
||||
|
||||
println!(
|
||||
"[ignoring loginChatGptComplete for unexpected login_id: {}]",
|
||||
completion.login_id
|
||||
);
|
||||
}
|
||||
ServerNotification::AuthStatusChange(status) => {
|
||||
println!("< authStatusChange notification: {status:?}");
|
||||
}
|
||||
ServerNotification::AccountRateLimitsUpdated(snapshot) => {
|
||||
println!("< accountRateLimitsUpdated notification: {snapshot:?}");
|
||||
}
|
||||
ServerNotification::SessionConfigured(_) => {
|
||||
// SessionConfigured notifications are unrelated to login; skip.
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Not a server notification (likely a conversation event); keep waiting.
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_turn(&mut self, thread_id: &str, turn_id: &str) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
|
||||
let Ok(server_notification) = ServerNotification::try_from(notification) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
match server_notification {
|
||||
ServerNotification::ThreadStarted(payload) => {
|
||||
if payload.thread.id == thread_id {
|
||||
println!("< thread/started notification: {:?}", payload.thread);
|
||||
}
|
||||
}
|
||||
ServerNotification::TurnStarted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("< turn/started notification: {:?}", payload.turn.status);
|
||||
}
|
||||
}
|
||||
ServerNotification::AgentMessageDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::CommandExecutionOutputDelta(delta) => {
|
||||
print!("{}", delta.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::TerminalInteraction(delta) => {
|
||||
println!("[stdin sent: {}]", delta.stdin);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
ServerNotification::ItemStarted(payload) => {
|
||||
println!("\n< item started: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::ItemCompleted(payload) => {
|
||||
println!("< item completed: {:?}", payload.item);
|
||||
}
|
||||
ServerNotification::TurnCompleted(payload) => {
|
||||
if payload.turn.id == turn_id {
|
||||
println!("\n< turn/completed notification: {:?}", payload.turn.status);
|
||||
if payload.turn.status == TurnStatus::Failed
|
||||
&& let Some(error) = payload.turn.error
|
||||
{
|
||||
println!("[turn error] {}", error.message);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
ServerNotification::McpToolCallProgress(payload) => {
|
||||
println!("< MCP tool progress: {}", payload.message);
|
||||
}
|
||||
_ => {
|
||||
println!("[UNKNOWN SERVER NOTIFICATION] {server_notification:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_event(
|
||||
&self,
|
||||
notification: JSONRPCNotification,
|
||||
conversation_id: &ThreadId,
|
||||
) -> Result<Option<Event>> {
|
||||
let params = notification
|
||||
.params
|
||||
.context("event notification missing params")?;
|
||||
|
||||
let mut map = match params {
|
||||
Value::Object(map) => map,
|
||||
other => bail!("unexpected params shape: {other:?}"),
|
||||
};
|
||||
|
||||
let conversation_value = map
|
||||
.remove("conversationId")
|
||||
.context("event missing conversationId")?;
|
||||
let notification_conversation: ThreadId = serde_json::from_value(conversation_value)
|
||||
.context("conversationId was not a valid UUID")?;
|
||||
|
||||
if ¬ification_conversation != conversation_id {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let event_value = Value::Object(map);
|
||||
let event: Event =
|
||||
serde_json::from_value(event_value).context("failed to decode event payload")?;
|
||||
Ok(Some(event))
|
||||
}
|
||||
|
||||
fn send_request<T>(
|
||||
&mut self,
|
||||
request: ClientRequest,
|
||||
request_id: RequestId,
|
||||
method: &str,
|
||||
) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
self.write_request(&request)?;
|
||||
self.wait_for_response(request_id, method)
|
||||
}
|
||||
|
||||
fn write_request(&mut self, request: &ClientRequest) -> Result<()> {
|
||||
let request_json = serde_json::to_string(request)?;
|
||||
let request_pretty = serde_json::to_string_pretty(request)?;
|
||||
print_multiline_with_prefix("> ", &request_pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{request_json}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush request to codex app-server")?;
|
||||
} else {
|
||||
bail!("codex app-server stdin closed");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_response<T>(&mut self, request_id: RequestId, method: &str) -> Result<T>
|
||||
where
|
||||
T: DeserializeOwned,
|
||||
{
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Response(JSONRPCResponse { id, result }) => {
|
||||
if id == request_id {
|
||||
return serde_json::from_value(result)
|
||||
.with_context(|| format!("{method} response missing payload"));
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
bail!("{method} failed: {err:?}");
|
||||
}
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
self.pending_notifications.push_back(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_notification(&mut self) -> Result<JSONRPCNotification> {
|
||||
if let Some(notification) = self.pending_notifications.pop_front() {
|
||||
return Ok(notification);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message()?;
|
||||
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => return Ok(notification),
|
||||
JSONRPCMessage::Response(_) | JSONRPCMessage::Error(_) => {
|
||||
// No outstanding requests, so ignore stray responses/errors for now.
|
||||
continue;
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
self.handle_server_request(request)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_jsonrpc_message(&mut self) -> Result<JSONRPCMessage> {
|
||||
loop {
|
||||
let mut response_line = String::new();
|
||||
let bytes = self
|
||||
.stdout
|
||||
.read_line(&mut response_line)
|
||||
.context("failed to read from codex app-server")?;
|
||||
|
||||
if bytes == 0 {
|
||||
bail!("codex app-server closed stdout");
|
||||
}
|
||||
|
||||
let trimmed = response_line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsed: Value =
|
||||
serde_json::from_str(trimmed).context("response was not valid JSON-RPC")?;
|
||||
let pretty = serde_json::to_string_pretty(&parsed)?;
|
||||
print_multiline_with_prefix("< ", &pretty);
|
||||
let message: JSONRPCMessage = serde_json::from_value(parsed)
|
||||
.context("response was not a valid JSON-RPC message")?;
|
||||
return Ok(message);
|
||||
}
|
||||
}
|
||||
|
||||
fn request_id(&self) -> RequestId {
|
||||
RequestId::String(Uuid::new_v4().to_string())
|
||||
}
|
||||
|
||||
fn handle_server_request(&mut self, request: JSONRPCRequest) -> Result<()> {
|
||||
let server_request = ServerRequest::try_from(request)
|
||||
.context("failed to deserialize ServerRequest from JSONRPCRequest")?;
|
||||
|
||||
match server_request {
|
||||
ServerRequest::CommandExecutionRequestApproval { request_id, params } => {
|
||||
self.handle_command_execution_request_approval(request_id, params)?;
|
||||
}
|
||||
ServerRequest::FileChangeRequestApproval { request_id, params } => {
|
||||
self.approve_file_change_request(request_id, params)?;
|
||||
}
|
||||
other => {
|
||||
bail!("received unsupported server request: {other:?}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_command_execution_request_approval(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: CommandExecutionRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let CommandExecutionRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
command,
|
||||
cwd,
|
||||
command_actions,
|
||||
proposed_execpolicy_amendment,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< commandExecution approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(command) = command.as_deref() {
|
||||
println!("< command: {command}");
|
||||
}
|
||||
if let Some(cwd) = cwd.as_ref() {
|
||||
println!("< cwd: {}", cwd.display());
|
||||
}
|
||||
if let Some(command_actions) = command_actions.as_ref()
|
||||
&& !command_actions.is_empty()
|
||||
{
|
||||
println!("< command actions: {command_actions:?}");
|
||||
}
|
||||
if let Some(execpolicy_amendment) = proposed_execpolicy_amendment.as_ref() {
|
||||
println!("< proposed execpolicy amendment: {execpolicy_amendment:?}");
|
||||
}
|
||||
|
||||
let response = CommandExecutionRequestApprovalResponse {
|
||||
decision: CommandExecutionApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved commandExecution request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn approve_file_change_request(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
params: FileChangeRequestApprovalParams,
|
||||
) -> Result<()> {
|
||||
let FileChangeRequestApprovalParams {
|
||||
thread_id,
|
||||
turn_id,
|
||||
item_id,
|
||||
reason,
|
||||
grant_root,
|
||||
} = params;
|
||||
|
||||
println!(
|
||||
"\n< fileChange approval requested for thread {thread_id}, turn {turn_id}, item {item_id}"
|
||||
);
|
||||
if let Some(reason) = reason.as_deref() {
|
||||
println!("< reason: {reason}");
|
||||
}
|
||||
if let Some(grant_root) = grant_root.as_deref() {
|
||||
println!("< grant root: {}", grant_root.display());
|
||||
}
|
||||
|
||||
let response = FileChangeRequestApprovalResponse {
|
||||
decision: FileChangeApprovalDecision::Accept,
|
||||
};
|
||||
self.send_server_request_response(request_id, &response)?;
|
||||
println!("< approved fileChange request for item {item_id}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn send_server_request_response<T>(&mut self, request_id: RequestId, response: &T) -> Result<()>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let message = JSONRPCMessage::Response(JSONRPCResponse {
|
||||
id: request_id,
|
||||
result: serde_json::to_value(response)?,
|
||||
});
|
||||
self.write_jsonrpc_message(message)
|
||||
}
|
||||
|
||||
fn write_jsonrpc_message(&mut self, message: JSONRPCMessage) -> Result<()> {
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
let pretty = serde_json::to_string_pretty(&message)?;
|
||||
print_multiline_with_prefix("> ", &pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{payload}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush response to codex app-server")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
bail!("codex app-server stdin closed")
|
||||
}
|
||||
}
|
||||
|
||||
fn print_multiline_with_prefix(prefix: &str, payload: &str) {
|
||||
for line in payload.lines() {
|
||||
println!("{prefix}{line}");
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for CodexClient {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.stdin.take();
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = self.child.kill();
|
||||
let _ = self.child.wait();
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server",
|
||||
crate_name = "codex_app_server",
|
||||
integration_deps_extra = ["//codex-rs/app-server/tests/common:common"],
|
||||
test_tags = ["no-sandbox"],
|
||||
)
|
||||
@@ -1,8 +1,7 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "codex-app-server"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
version = { workspace = true }
|
||||
|
||||
[[bin]]
|
||||
name = "codex-app-server"
|
||||
@@ -22,21 +21,14 @@ codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-chatgpt = { workspace = true }
|
||||
codex-login = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-feedback = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-json-to-toml = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
time = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"io-std",
|
||||
"macros",
|
||||
@@ -46,25 +38,17 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
opentelemetry-appender-tracing = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
app_test_support = { workspace = true }
|
||||
axum = { workspace = true, default-features = false, features = [
|
||||
"http1",
|
||||
"json",
|
||||
"tokio",
|
||||
] }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
rmcp = { workspace = true, default-features = false, features = [
|
||||
"server",
|
||||
"transport-streamable-http-server",
|
||||
] }
|
||||
serial_test = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -1,19 +1,6 @@
|
||||
# codex-app-server
|
||||
|
||||
`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt).
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Protocol](#protocol)
|
||||
- [Message Schema](#message-schema)
|
||||
- [Core Primitives](#core-primitives)
|
||||
- [Lifecycle Overview](#lifecycle-overview)
|
||||
- [Initialization](#initialization)
|
||||
- [API Overview](#api-overview)
|
||||
- [Events](#events)
|
||||
- [Approvals](#approvals)
|
||||
- [Skills](#skills)
|
||||
- [Auth endpoints](#auth-endpoints)
|
||||
`codex app-server` is the interface Codex uses to power rich interfaces such as the [Codex VS Code extension](https://marketplace.visualstudio.com/items?itemName=openai.chatgpt). The message schema is currently unstable, but those who wish to build experimental UIs on top of Codex may find it valuable.
|
||||
|
||||
## Protocol
|
||||
|
||||
@@ -28,82 +15,40 @@ codex app-server generate-ts --out DIR
|
||||
codex app-server generate-json-schema --out DIR
|
||||
```
|
||||
|
||||
## Core Primitives
|
||||
|
||||
The API exposes three top level primitives representing an interaction between a user and Codex:
|
||||
|
||||
- **Thread**: A conversation between a user and the Codex agent. Each thread contains multiple turns.
|
||||
- **Turn**: One turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
|
||||
- **Item**: Represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations. Example items include user message, agent reasoning, agent message, shell command, file edit, etc.
|
||||
|
||||
Use the thread APIs to create, list, or archive conversations. Drive a conversation with turn APIs and stream progress via turn notifications.
|
||||
|
||||
## Lifecycle Overview
|
||||
|
||||
- Initialize once: Immediately after launching the codex app-server process, send an `initialize` request with your client metadata, then emit an `initialized` notification. Any other request before this handshake gets rejected.
|
||||
- Start (or resume) a thread: Call `thread/start` to open a fresh conversation. The response returns the thread object and you’ll also get a `thread/started` notification. If you’re continuing an existing conversation, call `thread/resume` with its ID instead. If you want to branch from an existing conversation, call `thread/fork` to create a new thread id with copied history.
|
||||
- Begin a turn: To send user input, call `turn/start` with the target `threadId` and the user's input. Optional fields let you override model, cwd, sandbox policy, etc. This immediately returns the new turn object and triggers a `turn/started` notification.
|
||||
- Stream events: After `turn/start`, keep reading JSON-RPC notifications on stdout. You’ll see `item/started`, `item/completed`, deltas like `item/agentMessage/delta`, tool progress, etc. These represent streaming model output plus any side effects (commands, tool calls, reasoning notes).
|
||||
- Finish the turn: When the model is done (or the turn is interrupted via making the `turn/interrupt` call), the server sends `turn/completed` with the final turn state and token usage.
|
||||
|
||||
## Initialization
|
||||
|
||||
Clients must send a single `initialize` request before invoking any other method, then acknowledge with an `initialized` notification. The server returns the user agent string it will present to upstream services; subsequent requests issued before initialization receive a `"Not initialized"` error, and repeated `initialize` calls receive an `"Already initialized"` error.
|
||||
|
||||
Applications building on top of `codex app-server` should identify themselves via the `clientInfo` parameter.
|
||||
|
||||
**Important**: `clientInfo.name` is used to identify the client for the OpenAI Compliance Logs Platform. If
|
||||
you are developing a new Codex integration that is intended for enterprise use, please contact us to get it
|
||||
added to a known clients list. For more context: https://chatgpt.com/admin/api-reference#tag/Logs:-Codex
|
||||
|
||||
Example (from OpenAI's official VSCode extension):
|
||||
Example:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "initialize",
|
||||
"id": 0,
|
||||
"params": {
|
||||
"clientInfo": {
|
||||
"name": "codex_vscode",
|
||||
"title": "Codex VS Code Extension",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
{ "method": "initialize", "id": 0, "params": {
|
||||
"clientInfo": { "name": "codex-vscode", "title": "Codex VS Code Extension", "version": "0.1.0" }
|
||||
} }
|
||||
{ "id": 0, "result": { "userAgent": "codex-app-server/0.1.0 codex-vscode/0.1.0" } }
|
||||
{ "method": "initialized" }
|
||||
```
|
||||
|
||||
## API Overview
|
||||
## Core primitives
|
||||
|
||||
We have 3 top level primitives:
|
||||
- Thread - a conversation between the Codex agent and a user. Each thread contains multiple turns.
|
||||
- Turn - one turn of the conversation, typically starting with a user message and finishing with an agent message. Each turn contains multiple items.
|
||||
- Item - represents user inputs and agent outputs as part of the turn, persisted and used as the context for future conversations.
|
||||
|
||||
## Thread & turn endpoints
|
||||
|
||||
The JSON-RPC API exposes dedicated methods for managing Codex conversations. Threads store long-lived conversation metadata, and turns store the per-message exchange (input → Codex output, including streamed items). Use the thread APIs to create, list, or archive sessions, then drive the conversation with turn APIs and notifications.
|
||||
|
||||
### Quick reference
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; emits `thread/started` and auto-subscribes you to turn/item events for the new thread.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/loaded/list` — list the thread ids currently loaded in memory.
|
||||
- `thread/read` — read a stored thread by id without resuming it; optionally include turns via `includeTurns`.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `thread/unarchive` — move an archived rollout file back into the sessions directory; returns the restored `thread` on success.
|
||||
- `thread/rollback` — drop the last N turns from the agent’s in-memory context and persist a rollback marker in the rollout so future resumes see the pruned history; returns the updated `thread` (with `turns` populated) on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits `item/started`/`item/completed` notifications with `enteredReviewMode` and `exitedReviewMode` items, plus a final assistant `agentMessage` containing the review.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `model/list` — list available models (with reasoning effort options).
|
||||
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination).
|
||||
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
|
||||
- `app/list` — list available apps.
|
||||
- `skills/config/write` — write user-level skill config by path.
|
||||
- `mcpServer/oauth/login` — start an OAuth login for a configured MCP server; returns an `authorization_url` and later emits `mcpServer/oauthLogin/completed` once the browser flow finishes.
|
||||
- `tool/requestUserInput` — prompt the user with 1–3 short questions for a tool call and return their answers (experimental).
|
||||
- `config/mcpServer/reload` — reload MCP server config from disk and queue a refresh for loaded threads (applied on each thread's next active turn); returns `{}`. Use this after editing `config.toml` without restarting the server.
|
||||
- `mcpServerStatus/list` — enumerate configured MCP servers with their tools, resources, resource templates, and auth status; supports cursor+limit pagination.
|
||||
- `feedback/upload` — submit a feedback report (classification + optional reason/logs and conversation_id); returns the tracking thread id.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `config/read` — fetch the effective config on disk after resolving config layering.
|
||||
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
|
||||
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
|
||||
- `configRequirements/read` — fetch the loaded requirements allow-lists from `requirements.toml` and/or MDM (or `null` if none are configured).
|
||||
|
||||
### Example: Start or resume a thread
|
||||
### 1) Start or resume a thread
|
||||
|
||||
Start a fresh thread when you need a new Codex conversation.
|
||||
|
||||
@@ -111,24 +56,10 @@ Start a fresh thread when you need a new Codex conversation.
|
||||
{ "method": "thread/start", "id": 10, "params": {
|
||||
// Optionally set config settings. If not specified, will use the user's
|
||||
// current config settings.
|
||||
"model": "gpt-5.1-codex",
|
||||
"model": "gpt-5-codex",
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "never",
|
||||
"sandbox": "workspaceWrite",
|
||||
"personality": "friendly",
|
||||
"dynamicTools": [
|
||||
{
|
||||
"name": "lookup_ticket",
|
||||
"description": "Fetch a ticket by id",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string" }
|
||||
},
|
||||
"required": ["id"]
|
||||
}
|
||||
}
|
||||
],
|
||||
} }
|
||||
{ "id": 10, "result": {
|
||||
"thread": {
|
||||
@@ -141,34 +72,19 @@ Start a fresh thread when you need a new Codex conversation.
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted. You can also pass the same configuration overrides supported by `thread/start`, such as `personality`:
|
||||
To continue a stored session, call `thread/resume` with the `thread.id` you previously recorded. The response shape matches `thread/start`, and no additional notifications are emitted:
|
||||
|
||||
```json
|
||||
{ "method": "thread/resume", "id": 11, "params": {
|
||||
"threadId": "thr_123",
|
||||
"personality": "friendly"
|
||||
} }
|
||||
{ "method": "thread/resume", "id": 11, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 11, "result": { "thread": { "id": "thr_123", … } } }
|
||||
```
|
||||
|
||||
To branch from a stored session, call `thread/fork` with the `thread.id`. This creates a new thread id and emits a `thread/started` notification for it:
|
||||
|
||||
```json
|
||||
{ "method": "thread/fork", "id": 12, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 12, "result": { "thread": { "id": "thr_456", … } } }
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
### Example: List threads (with pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Results default to `createdAt` (newest first) descending. Pass any combination of:
|
||||
### 2) List threads (pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Pass any combination of:
|
||||
- `cursor` — opaque string from a prior response; omit for the first page.
|
||||
- `limit` — server defaults to a reasonable page size if unset.
|
||||
- `sortKey` — `created_at` (default) or `updated_at`.
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
- `sourceKinds` — restrict results to specific sources; omit or pass `[]` for interactive sessions only (`cli`, `vscode`).
|
||||
- `archived` — when `true`, list archived threads only. When `false` or `null`, list non-archived threads (default).
|
||||
|
||||
Example:
|
||||
|
||||
@@ -176,12 +92,11 @@ Example:
|
||||
{ "method": "thread/list", "id": 20, "params": {
|
||||
"cursor": null,
|
||||
"limit": 25,
|
||||
"sortKey": "created_at"
|
||||
} }
|
||||
{ "id": 20, "result": {
|
||||
"data": [
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111, "updatedAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000, "updatedAt": 1730750000 }
|
||||
{ "id": "thr_a", "preview": "Create a TUI", "modelProvider": "openai", "createdAt": 1730831111 },
|
||||
{ "id": "thr_b", "preview": "Fix tests", "modelProvider": "openai", "createdAt": 1730750000 }
|
||||
],
|
||||
"nextCursor": "opaque-token-or-null"
|
||||
} }
|
||||
@@ -189,32 +104,7 @@ Example:
|
||||
|
||||
When `nextCursor` is `null`, you’ve reached the final page.
|
||||
|
||||
### Example: List loaded threads
|
||||
|
||||
`thread/loaded/list` returns thread ids currently loaded in memory. This is useful when you want to check which sessions are active without scanning rollouts on disk.
|
||||
|
||||
```json
|
||||
{ "method": "thread/loaded/list", "id": 21 }
|
||||
{ "id": 21, "result": {
|
||||
"data": ["thr_123", "thr_456"]
|
||||
} }
|
||||
```
|
||||
|
||||
### Example: Read a thread
|
||||
|
||||
Use `thread/read` to fetch a stored thread by id without resuming it. Pass `includeTurns` when you want the rollout history loaded into `thread.turns`.
|
||||
|
||||
```json
|
||||
{ "method": "thread/read", "id": 22, "params": { "threadId": "thr_123" } }
|
||||
{ "id": 22, "result": { "thread": { "id": "thr_123", "turns": [] } } }
|
||||
```
|
||||
|
||||
```json
|
||||
{ "method": "thread/read", "id": 23, "params": { "threadId": "thr_123", "includeTurns": true } }
|
||||
{ "id": 23, "result": { "thread": { "id": "thr_123", "turns": [ ... ] } } }
|
||||
```
|
||||
|
||||
### Example: Archive a thread
|
||||
### 3) Archive a thread
|
||||
|
||||
Use `thread/archive` to move the persisted rollout (stored as a JSONL file on disk) into the archived sessions directory.
|
||||
|
||||
@@ -223,18 +113,9 @@ Use `thread/archive` to move the persisted rollout (stored as a JSONL file on di
|
||||
{ "id": 21, "result": {} }
|
||||
```
|
||||
|
||||
An archived thread will not appear in `thread/list` unless `archived` is set to `true`.
|
||||
An archived thread will not appear in future calls to `thread/list`.
|
||||
|
||||
### Example: Unarchive a thread
|
||||
|
||||
Use `thread/unarchive` to move an archived rollout back into the sessions directory.
|
||||
|
||||
```json
|
||||
{ "method": "thread/unarchive", "id": 24, "params": { "threadId": "thr_b" } }
|
||||
{ "id": 24, "result": { "thread": { "id": "thr_b" } } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (send user input)
|
||||
### 4) Start a turn (send user input)
|
||||
|
||||
Turns attach user input (text or images) to a thread and trigger Codex generation. The `input` field is a list of discriminated unions:
|
||||
|
||||
@@ -242,7 +123,7 @@ Turns attach user input (text or images) to a thread and trigger Codex generatio
|
||||
- `{"type":"image","url":"https://…png"}`
|
||||
- `{"type":"localImage","path":"/tmp/screenshot.png"}`
|
||||
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread. `outputSchema` applies only to the current turn.
|
||||
You can optionally specify config overrides on the new turn. If specified, these settings become the default for subsequent turns on the same thread.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 30, "params": {
|
||||
@@ -252,21 +133,13 @@ You can optionally specify config overrides on the new turn. If specified, these
|
||||
"cwd": "/Users/me/project",
|
||||
"approvalPolicy": "unlessTrusted",
|
||||
"sandboxPolicy": {
|
||||
"type": "workspaceWrite",
|
||||
"mode": "workspaceWrite",
|
||||
"writableRoots": ["/Users/me/project"],
|
||||
"networkAccess": true
|
||||
},
|
||||
"model": "gpt-5.1-codex",
|
||||
"model": "gpt-5-codex",
|
||||
"effort": "medium",
|
||||
"summary": "concise",
|
||||
"personality": "friendly",
|
||||
// Optional JSON Schema to constrain the final assistant message for this turn.
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": { "answer": { "type": "string" } },
|
||||
"required": ["answer"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
"summary": "concise"
|
||||
} }
|
||||
{ "id": 30, "result": { "turn": {
|
||||
"id": "turn_456",
|
||||
@@ -276,27 +149,7 @@ You can optionally specify config overrides on the new turn. If specified, these
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Start a turn (invoke a skill)
|
||||
|
||||
Invoke a skill explicitly by including `$<skill-name>` in the text input and adding a `skill` input item alongside it.
|
||||
|
||||
```json
|
||||
{ "method": "turn/start", "id": 33, "params": {
|
||||
"threadId": "thr_123",
|
||||
"input": [
|
||||
{ "type": "text", "text": "$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage." },
|
||||
{ "type": "skill", "name": "skill-creator", "path": "/Users/me/.codex/skills/skill-creator/SKILL.md" }
|
||||
]
|
||||
} }
|
||||
{ "id": 33, "result": { "turn": {
|
||||
"id": "turn_457",
|
||||
"status": "inProgress",
|
||||
"items": [],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
### Example: Interrupt an active turn
|
||||
### 5) Interrupt an active turn
|
||||
|
||||
You can cancel a running Turn with `turn/interrupt`.
|
||||
|
||||
@@ -310,284 +163,11 @@ You can cancel a running Turn with `turn/interrupt`.
|
||||
|
||||
The server requests cancellations for running subprocesses, then emits a `turn/completed` event with `status: "interrupted"`. Rely on the `turn/completed` to know when Codex-side cleanup is done.
|
||||
|
||||
### Example: Request a code review
|
||||
|
||||
Use `review/start` to run Codex’s reviewer on the currently checked-out project. The request takes the thread id plus a `target` describing what should be reviewed:
|
||||
|
||||
- `{"type":"uncommittedChanges"}` — staged, unstaged, and untracked files.
|
||||
- `{"type":"baseBranch","branch":"main"}` — diff against the provided branch’s upstream (see prompt for the exact `git merge-base`/`git diff` instructions Codex will run).
|
||||
- `{"type":"commit","sha":"abc1234","title":"Optional subject"}` — review a specific commit.
|
||||
- `{"type":"custom","instructions":"Free-form reviewer instructions"}` — fallback prompt equivalent to the legacy manual review request.
|
||||
- `delivery` (`"inline"` or `"detached"`, default `"inline"`) — where the review runs:
|
||||
- `"inline"`: run the review as a new turn on the existing thread. The response’s `reviewThreadId` equals the original `threadId`, and no new `thread/started` notification is emitted.
|
||||
- `"detached"`: fork a new review thread from the parent conversation and run the review there. The response’s `reviewThreadId` is the id of this new review thread, and the server emits a `thread/started` notification for it before streaming review items.
|
||||
|
||||
Example request/response:
|
||||
|
||||
```json
|
||||
{ "method": "review/start", "id": 40, "params": {
|
||||
"threadId": "thr_123",
|
||||
"delivery": "inline",
|
||||
"target": { "type": "commit", "sha": "1234567deadbeef", "title": "Polish tui colors" }
|
||||
} }
|
||||
{ "id": 40, "result": {
|
||||
"turn": {
|
||||
"id": "turn_900",
|
||||
"status": "inProgress",
|
||||
"items": [
|
||||
{ "type": "userMessage", "id": "turn_900", "content": [ { "type": "text", "text": "Review commit 1234567: Polish tui colors" } ] }
|
||||
],
|
||||
"error": null
|
||||
},
|
||||
"reviewThreadId": "thr_123"
|
||||
} }
|
||||
```
|
||||
|
||||
For a detached review, use `"delivery": "detached"`. The response is the same shape, but `reviewThreadId` will be the id of the new review thread (different from the original `threadId`). The server also emits a `thread/started` notification for that new thread before streaming the review turn.
|
||||
|
||||
Codex streams the usual `turn/started` notification followed by an `item/started`
|
||||
with an `enteredReviewMode` item so clients can show progress:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "item/started",
|
||||
"params": {
|
||||
"item": {
|
||||
"type": "enteredReviewMode",
|
||||
"id": "turn_900",
|
||||
"review": "current changes"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
When the reviewer finishes, the server emits `item/started` and `item/completed`
|
||||
containing an `exitedReviewMode` item with the final review text:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "item/completed",
|
||||
"params": {
|
||||
"item": {
|
||||
"type": "exitedReviewMode",
|
||||
"id": "turn_900",
|
||||
"review": "Looks solid overall...\n\n- Prefer Stylize helpers — app.rs:10-20\n ..."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `review` string is plain text that already bundles the overall explanation plus a bullet list for each structured finding (matching `ThreadItem::ExitedReviewMode` in the generated schema). Use this notification to render the reviewer output in your client.
|
||||
|
||||
### Example: One-off command execution
|
||||
|
||||
Run a standalone command (argv vector) in the server’s sandbox without creating a thread or turn:
|
||||
|
||||
```json
|
||||
{ "method": "command/exec", "id": 32, "params": {
|
||||
"command": ["ls", "-la"],
|
||||
"cwd": "/Users/me/project", // optional; defaults to server cwd
|
||||
"sandboxPolicy": { "type": "workspaceWrite" }, // optional; defaults to user config
|
||||
"timeoutMs": 10000 // optional; ms timeout; defaults to server timeout
|
||||
} }
|
||||
{ "id": 32, "result": { "exitCode": 0, "stdout": "...", "stderr": "" } }
|
||||
```
|
||||
|
||||
- For clients that are already sandboxed externally, set `sandboxPolicy` to `{"type":"externalSandbox","networkAccess":"enabled"}` (or omit `networkAccess` to keep it restricted). Codex will not enforce its own sandbox in this mode; it tells the model it has full file-system access and passes the `networkAccess` state through `environment_context`.
|
||||
|
||||
Notes:
|
||||
|
||||
- Empty `command` arrays are rejected.
|
||||
- `sandboxPolicy` accepts the same shape used by `turn/start` (e.g., `dangerFullAccess`, `readOnly`, `workspaceWrite` with flags, `externalSandbox` with `networkAccess` `restricted|enabled`).
|
||||
- When omitted, `timeoutMs` falls back to the server default.
|
||||
|
||||
## Events
|
||||
|
||||
Event notifications are the server-initiated event stream for thread lifecycles, turn lifecycles, and the items within them. After you start or resume a thread, keep reading stdout for `thread/started`, `turn/*`, and `item/*` notifications.
|
||||
|
||||
### Turn events
|
||||
|
||||
The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` status). Token usage events stream separately via `thread/tokenUsage/updated`. Clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
|
||||
|
||||
- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo?, additionalDetails? } }`.
|
||||
- `turn/diff/updated` — `{ threadId, turnId, diff }` represents the up-to-date snapshot of the turn-level unified diff, emitted after every FileChange item. `diff` is the latest aggregated unified diff across every file change in the turn. UIs can render this to show the full "what changed" view without stitching individual `fileChange` items.
|
||||
- `turn/plan/updated` — `{ turnId, explanation?, plan }` whenever the agent shares or changes its plan; each `plan` entry is `{ step, status }` with `status` in `pending`, `inProgress`, or `completed`.
|
||||
|
||||
Today both notifications carry an empty `items` array even when item events were streamed; rely on `item/*` notifications for the canonical item list until this is fixed.
|
||||
|
||||
#### Items
|
||||
|
||||
`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
|
||||
|
||||
- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
|
||||
- `agentMessage` — `{id, text}` containing the accumulated agent reply.
|
||||
- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
|
||||
- `commandExecution` — `{id, command, cwd, status, commandActions, aggregatedOutput?, exitCode?, durationMs?}` for sandboxed commands; `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `fileChange` — `{id, changes, status}` describing proposed edits; `changes` list `{path, kind, diff}` and `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `collabToolCall` — `{id, tool, status, senderThreadId, receiverThreadId?, newThreadId?, prompt?, agentStatus?}` describing collab tool calls (`spawn_agent`, `send_input`, `wait`, `close_agent`); `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `webSearch` — `{id, query}` for a web search request issued by the agent.
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
|
||||
- `compacted` - `{threadId, turnId}` when codex compacts the conversation history. This can happen automatically.
|
||||
|
||||
All items emit two shared lifecycle events:
|
||||
|
||||
- `item/started` — emits the full `item` when a new unit of work begins so the UI can render it immediately; the `item.id` in this payload matches the `itemId` used by deltas.
|
||||
- `item/completed` — sends the final `item` once that work finishes (e.g., after a tool call or message completes); treat this as the authoritative state.
|
||||
|
||||
There are additional item-specific events:
|
||||
|
||||
#### agentMessage
|
||||
|
||||
- `item/agentMessage/delta` — appends streamed text for the agent message; concatenate `delta` values for the same `itemId` in order to reconstruct the full reply.
|
||||
|
||||
#### reasoning
|
||||
|
||||
- `item/reasoning/summaryTextDelta` — streams readable reasoning summaries; `summaryIndex` increments when a new summary section opens.
|
||||
- `item/reasoning/summaryPartAdded` — marks the boundary between reasoning summary sections for an `itemId`; subsequent `summaryTextDelta` entries share the same `summaryIndex`.
|
||||
- `item/reasoning/textDelta` — streams raw reasoning text (only applicable for e.g. open source models); use `contentIndex` to group deltas that belong together before showing them in the UI.
|
||||
|
||||
#### commandExecution
|
||||
|
||||
- `item/commandExecution/outputDelta` — streams stdout/stderr for the command; append deltas in order to render live output alongside `aggregatedOutput` in the final item.
|
||||
Final `commandExecution` items include parsed `commandActions`, `status`, `exitCode`, and `durationMs` so the UI can summarize what ran and whether it succeeded.
|
||||
|
||||
#### fileChange
|
||||
|
||||
- `item/fileChange/outputDelta` - contains the tool call response of the underlying `apply_patch` tool call.
|
||||
|
||||
### Errors
|
||||
|
||||
`error` event is emitted whenever the server hits an error mid-turn (for example, upstream model errors or quota limits). Carries the same `{ error: { message, codexErrorInfo?, additionalDetails? } }` payload as `turn.status: "failed"` and may precede that terminal notification.
|
||||
|
||||
`codexErrorInfo` maps to the `CodexErrorInfo` enum. Common values:
|
||||
|
||||
- `ContextWindowExceeded`
|
||||
- `UsageLimitExceeded`
|
||||
- `HttpConnectionFailed { httpStatusCode? }`: upstream HTTP failures including 4xx/5xx
|
||||
- `ResponseStreamConnectionFailed { httpStatusCode? }`: failure to connect to the response SSE stream
|
||||
- `ResponseStreamDisconnected { httpStatusCode? }`: disconnect of the response SSE stream in the middle of a turn before completion
|
||||
- `ResponseTooManyFailedAttempts { httpStatusCode? }`
|
||||
- `BadRequest`
|
||||
- `Unauthorized`
|
||||
- `SandboxError`
|
||||
- `InternalServerError`
|
||||
- `Other`: all unclassified errors
|
||||
|
||||
When an upstream HTTP status is available (for example, from the Responses API or a provider), it is forwarded in `httpStatusCode` on the relevant `codexErrorInfo` variant.
|
||||
|
||||
## Approvals
|
||||
|
||||
Certain actions (shell commands or modifying files) may require explicit user approval depending on the user's config. When `turn/start` is used, the app-server drives an approval flow by sending a server-initiated JSON-RPC request to the client. The client must respond to tell Codex whether to proceed. UIs should present these requests inline with the active turn so users can review the proposed command or diff before choosing.
|
||||
|
||||
- Requests include `threadId` and `turnId`—use them to scope UI state to the active conversation.
|
||||
- Respond with a single `{ "decision": "accept" | "decline" }` payload (plus optional `acceptSettings` on command executions). The server resumes or declines the work and ends the item with `item/completed`.
|
||||
|
||||
### Command execution approvals
|
||||
|
||||
Order of messages:
|
||||
|
||||
1. `item/started` — shows the pending `commandExecution` item with `command`, `cwd`, and other fields so you can render the proposed action.
|
||||
2. `item/commandExecution/requestApproval` (request) — carries the same `itemId`, `threadId`, `turnId`, optionally `reason`, plus `command`, `cwd`, and `commandActions` for friendly display.
|
||||
3. Client response — `{ "decision": "accept", "acceptSettings": { "forSession": false } }` or `{ "decision": "decline" }`.
|
||||
4. `item/completed` — final `commandExecution` item with `status: "completed" | "failed" | "declined"` and execution output. Render this as the authoritative result.
|
||||
|
||||
### File change approvals
|
||||
|
||||
Order of messages:
|
||||
|
||||
1. `item/started` — emits a `fileChange` item with `changes` (diff chunk summaries) and `status: "inProgress"`. Show the proposed edits and paths to the user.
|
||||
2. `item/fileChange/requestApproval` (request) — includes `itemId`, `threadId`, `turnId`, and an optional `reason`.
|
||||
3. Client response — `{ "decision": "accept" }` or `{ "decision": "decline" }`.
|
||||
4. `item/completed` — returns the same `fileChange` item with `status` updated to `completed`, `failed`, or `declined` after the patch attempt. Rely on this to show success/failure and finalize the diff state in your UI.
|
||||
|
||||
UI guidance for IDEs: surface an approval dialog as soon as the request arrives. The turn will proceed after the server receives a response to the approval request. The terminal `item/completed` notification will be sent with the appropriate status.
|
||||
|
||||
## Skills
|
||||
|
||||
Invoke a skill by including `$<skill-name>` in the text input. Add a `skill` input item (recommended) so the backend injects full skill instructions instead of relying on the model to resolve the name.
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "turn/start",
|
||||
"id": 101,
|
||||
"params": {
|
||||
"threadId": "thread-1",
|
||||
"input": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "$skill-creator Add a new skill for triaging flaky CI."
|
||||
},
|
||||
{
|
||||
"type": "skill",
|
||||
"name": "skill-creator",
|
||||
"path": "/Users/me/.codex/skills/skill-creator/SKILL.md"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you omit the `skill` item, the model will still parse the `$<skill-name>` marker and try to locate the skill, which can add latency.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
$skill-creator Add a new skill for triaging flaky CI and include step-by-step usage.
|
||||
```
|
||||
|
||||
Use `skills/list` to fetch the available skills (optionally scoped by `cwds`, with `forceReload`).
|
||||
|
||||
```json
|
||||
{ "method": "skills/list", "id": 25, "params": {
|
||||
"cwds": ["/Users/me/project"],
|
||||
"forceReload": false
|
||||
} }
|
||||
{ "id": 25, "result": {
|
||||
"data": [{
|
||||
"cwd": "/Users/me/project",
|
||||
"skills": [
|
||||
{
|
||||
"name": "skill-creator",
|
||||
"description": "Create or update a Codex skill",
|
||||
"enabled": true,
|
||||
"interface": {
|
||||
"displayName": "Skill Creator",
|
||||
"shortDescription": "Create or update a Codex skill",
|
||||
"iconSmall": "icon.svg",
|
||||
"iconLarge": "icon-large.svg",
|
||||
"brandColor": "#111111",
|
||||
"defaultPrompt": "Add a new skill for triaging flaky CI."
|
||||
}
|
||||
}
|
||||
],
|
||||
"errors": []
|
||||
}]
|
||||
} }
|
||||
```
|
||||
|
||||
To enable or disable a skill by path:
|
||||
|
||||
```json
|
||||
{
|
||||
"method": "skills/config/write",
|
||||
"id": 26,
|
||||
"params": {
|
||||
"path": "/Users/me/.codex/skills/skill-creator/SKILL.md",
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Auth endpoints
|
||||
|
||||
The JSON-RPC auth/account surface exposes request/response methods plus server-initiated notifications (no `id`). Use these to determine auth state, start or cancel logins, logout, and inspect ChatGPT rate limits.
|
||||
|
||||
### API Overview
|
||||
|
||||
### Quick reference
|
||||
- `account/read` — fetch current account info; optionally refresh tokens.
|
||||
- `account/login/start` — begin login (`apiKey` or `chatgpt`).
|
||||
- `account/login/completed` (notify) — emitted when a login attempt finishes (success or error).
|
||||
@@ -595,19 +175,15 @@ The JSON-RPC auth/account surface exposes request/response methods plus server-i
|
||||
- `account/logout` — sign out; triggers `account/updated`.
|
||||
- `account/updated` (notify) — emitted whenever auth mode changes (`authMode`: `apikey`, `chatgpt`, or `null`).
|
||||
- `account/rateLimits/read` — fetch ChatGPT rate limits; updates arrive via `account/rateLimits/updated` (notify).
|
||||
- `account/rateLimits/updated` (notify) — emitted whenever a user's ChatGPT rate limits change.
|
||||
- `mcpServer/oauthLogin/completed` (notify) — emitted after a `mcpServer/oauth/login` flow finishes for a server; payload includes `{ name, success, error? }`.
|
||||
|
||||
### 1) Check auth state
|
||||
|
||||
Request:
|
||||
|
||||
```json
|
||||
{ "method": "account/read", "id": 1, "params": { "refreshToken": false } }
|
||||
```
|
||||
|
||||
Response examples:
|
||||
|
||||
```json
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": false } } // No OpenAI auth needed (e.g., OSS/local models)
|
||||
{ "id": 1, "result": { "account": null, "requiresOpenaiAuth": true } } // OpenAI auth required (typical for OpenAI-hosted models)
|
||||
@@ -616,7 +192,6 @@ Response examples:
|
||||
```
|
||||
|
||||
Field notes:
|
||||
|
||||
- `refreshToken` (bool): set `true` to force a token refresh.
|
||||
- `requiresOpenaiAuth` reflects the active provider; when `false`, Codex can run without OpenAI credentials.
|
||||
|
||||
@@ -624,11 +199,7 @@ Field notes:
|
||||
|
||||
1. Send:
|
||||
```json
|
||||
{
|
||||
"method": "account/login/start",
|
||||
"id": 2,
|
||||
"params": { "type": "apiKey", "apiKey": "sk-…" }
|
||||
}
|
||||
{ "method": "account/login/start", "id": 2, "params": { "type": "apiKey", "apiKey": "sk-…" } }
|
||||
```
|
||||
2. Expect:
|
||||
```json
|
||||
@@ -678,7 +249,12 @@ Field notes:
|
||||
```
|
||||
|
||||
Field notes:
|
||||
|
||||
- `usedPercent` is current usage within the OpenAI quota window.
|
||||
- `windowDurationMins` is the quota window length.
|
||||
- `resetsAt` is a Unix timestamp (seconds) for the next reset.
|
||||
|
||||
### Dev notes
|
||||
|
||||
- `codex app-server generate-ts --out <dir>` emits v2 types under `v2/`.
|
||||
- `codex app-server generate-json-schema --out <dir>` outputs `codex_app_server_protocol.schemas.json`.
|
||||
- See [“Authentication and authorization” in the config docs](../../docs/config.md#authentication-and-authorization) for configuration knobs.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,155 +0,0 @@
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigReadResponse;
|
||||
use codex_app_server_protocol::ConfigRequirements;
|
||||
use codex_app_server_protocol::ConfigRequirementsReadResponse;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::ConfigWriteErrorCode;
|
||||
use codex_app_server_protocol::ConfigWriteResponse;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::SandboxMode;
|
||||
use codex_core::config::ConfigService;
|
||||
use codex_core::config::ConfigServiceError;
|
||||
use codex_core::config_loader::ConfigRequirementsToml;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::config_loader::SandboxModeRequirement as CoreSandboxModeRequirement;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct ConfigApi {
|
||||
service: ConfigService,
|
||||
}
|
||||
|
||||
impl ConfigApi {
|
||||
pub(crate) fn new(
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
) -> Self {
|
||||
Self {
|
||||
service: ConfigService::new(codex_home, cli_overrides, loader_overrides),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn read(
|
||||
&self,
|
||||
params: ConfigReadParams,
|
||||
) -> Result<ConfigReadResponse, JSONRPCErrorError> {
|
||||
self.service.read(params).await.map_err(map_error)
|
||||
}
|
||||
|
||||
pub(crate) async fn config_requirements_read(
|
||||
&self,
|
||||
) -> Result<ConfigRequirementsReadResponse, JSONRPCErrorError> {
|
||||
let requirements = self
|
||||
.service
|
||||
.read_requirements()
|
||||
.await
|
||||
.map_err(map_error)?
|
||||
.map(map_requirements_toml_to_api);
|
||||
|
||||
Ok(ConfigRequirementsReadResponse { requirements })
|
||||
}
|
||||
|
||||
pub(crate) async fn write_value(
|
||||
&self,
|
||||
params: ConfigValueWriteParams,
|
||||
) -> Result<ConfigWriteResponse, JSONRPCErrorError> {
|
||||
self.service.write_value(params).await.map_err(map_error)
|
||||
}
|
||||
|
||||
pub(crate) async fn batch_write(
|
||||
&self,
|
||||
params: ConfigBatchWriteParams,
|
||||
) -> Result<ConfigWriteResponse, JSONRPCErrorError> {
|
||||
self.service.batch_write(params).await.map_err(map_error)
|
||||
}
|
||||
}
|
||||
|
||||
fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigRequirements {
|
||||
ConfigRequirements {
|
||||
allowed_approval_policies: requirements.allowed_approval_policies.map(|policies| {
|
||||
policies
|
||||
.into_iter()
|
||||
.map(codex_app_server_protocol::AskForApproval::from)
|
||||
.collect()
|
||||
}),
|
||||
allowed_sandbox_modes: requirements.allowed_sandbox_modes.map(|modes| {
|
||||
modes
|
||||
.into_iter()
|
||||
.filter_map(map_sandbox_mode_requirement_to_api)
|
||||
.collect()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_sandbox_mode_requirement_to_api(mode: CoreSandboxModeRequirement) -> Option<SandboxMode> {
|
||||
match mode {
|
||||
CoreSandboxModeRequirement::ReadOnly => Some(SandboxMode::ReadOnly),
|
||||
CoreSandboxModeRequirement::WorkspaceWrite => Some(SandboxMode::WorkspaceWrite),
|
||||
CoreSandboxModeRequirement::DangerFullAccess => Some(SandboxMode::DangerFullAccess),
|
||||
CoreSandboxModeRequirement::ExternalSandbox => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_error(err: ConfigServiceError) -> JSONRPCErrorError {
|
||||
if let Some(code) = err.write_error_code() {
|
||||
return config_write_error(code, err.to_string());
|
||||
}
|
||||
|
||||
JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: err.to_string(),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn config_write_error(code: ConfigWriteErrorCode, message: impl Into<String>) -> JSONRPCErrorError {
|
||||
JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: message.into(),
|
||||
data: Some(json!({
|
||||
"config_write_error_code": code,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn map_requirements_toml_to_api_converts_core_enums() {
|
||||
let requirements = ConfigRequirementsToml {
|
||||
allowed_approval_policies: Some(vec![
|
||||
CoreAskForApproval::Never,
|
||||
CoreAskForApproval::OnRequest,
|
||||
]),
|
||||
allowed_sandbox_modes: Some(vec![
|
||||
CoreSandboxModeRequirement::ReadOnly,
|
||||
CoreSandboxModeRequirement::ExternalSandbox,
|
||||
]),
|
||||
mcp_servers: None,
|
||||
};
|
||||
|
||||
let mapped = map_requirements_toml_to_api(requirements);
|
||||
|
||||
assert_eq!(
|
||||
mapped.allowed_approval_policies,
|
||||
Some(vec![
|
||||
codex_app_server_protocol::AskForApproval::Never,
|
||||
codex_app_server_protocol::AskForApproval::OnRequest,
|
||||
])
|
||||
);
|
||||
assert_eq!(
|
||||
mapped.allowed_sandbox_modes,
|
||||
Some(vec![SandboxMode::ReadOnly]),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
use codex_app_server_protocol::DynamicToolCallResponse;
|
||||
use codex_core::CodexThread;
|
||||
use codex_protocol::dynamic_tools::DynamicToolResponse as CoreDynamicToolResponse;
|
||||
use codex_protocol::protocol::Op;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::error;
|
||||
|
||||
pub(crate) async fn on_call_response(
|
||||
call_id: String,
|
||||
receiver: oneshot::Receiver<serde_json::Value>,
|
||||
conversation: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
let fallback = CoreDynamicToolResponse {
|
||||
call_id: call_id.clone(),
|
||||
output: "dynamic tool request failed".to_string(),
|
||||
success: false,
|
||||
};
|
||||
if let Err(err) = conversation
|
||||
.submit(Op::DynamicToolResponse {
|
||||
id: call_id.clone(),
|
||||
response: fallback,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit DynamicToolResponse: {err}");
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let response = serde_json::from_value::<DynamicToolCallResponse>(value).unwrap_or_else(|err| {
|
||||
error!("failed to deserialize DynamicToolCallResponse: {err}");
|
||||
DynamicToolCallResponse {
|
||||
output: "dynamic tool response was invalid".to_string(),
|
||||
success: false,
|
||||
}
|
||||
});
|
||||
let response = CoreDynamicToolResponse {
|
||||
call_id: call_id.clone(),
|
||||
output: response.output,
|
||||
success: response.success,
|
||||
};
|
||||
if let Err(err) = conversation
|
||||
.submit(Op::DynamicToolResponse {
|
||||
id: call_id,
|
||||
response,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit DynamicToolResponse: {err}");
|
||||
}
|
||||
}
|
||||
@@ -1,155 +0,0 @@
|
||||
use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_core::INTERACTIVE_SESSION_SOURCES;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
|
||||
|
||||
pub(crate) fn compute_source_filters(
|
||||
source_kinds: Option<Vec<ThreadSourceKind>>,
|
||||
) -> (Vec<CoreSessionSource>, Option<Vec<ThreadSourceKind>>) {
|
||||
let Some(source_kinds) = source_kinds else {
|
||||
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
|
||||
};
|
||||
|
||||
if source_kinds.is_empty() {
|
||||
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
|
||||
}
|
||||
|
||||
let requires_post_filter = source_kinds.iter().any(|kind| {
|
||||
matches!(
|
||||
kind,
|
||||
ThreadSourceKind::Exec
|
||||
| ThreadSourceKind::AppServer
|
||||
| ThreadSourceKind::SubAgent
|
||||
| ThreadSourceKind::SubAgentReview
|
||||
| ThreadSourceKind::SubAgentCompact
|
||||
| ThreadSourceKind::SubAgentThreadSpawn
|
||||
| ThreadSourceKind::SubAgentOther
|
||||
| ThreadSourceKind::Unknown
|
||||
)
|
||||
});
|
||||
|
||||
if requires_post_filter {
|
||||
(Vec::new(), Some(source_kinds))
|
||||
} else {
|
||||
let interactive_sources = source_kinds
|
||||
.iter()
|
||||
.filter_map(|kind| match kind {
|
||||
ThreadSourceKind::Cli => Some(CoreSessionSource::Cli),
|
||||
ThreadSourceKind::VsCode => Some(CoreSessionSource::VSCode),
|
||||
ThreadSourceKind::Exec
|
||||
| ThreadSourceKind::AppServer
|
||||
| ThreadSourceKind::SubAgent
|
||||
| ThreadSourceKind::SubAgentReview
|
||||
| ThreadSourceKind::SubAgentCompact
|
||||
| ThreadSourceKind::SubAgentThreadSpawn
|
||||
| ThreadSourceKind::SubAgentOther
|
||||
| ThreadSourceKind::Unknown => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(interactive_sources, Some(source_kinds))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn source_kind_matches(source: &CoreSessionSource, filter: &[ThreadSourceKind]) -> bool {
|
||||
filter.iter().any(|kind| match kind {
|
||||
ThreadSourceKind::Cli => matches!(source, CoreSessionSource::Cli),
|
||||
ThreadSourceKind::VsCode => matches!(source, CoreSessionSource::VSCode),
|
||||
ThreadSourceKind::Exec => matches!(source, CoreSessionSource::Exec),
|
||||
ThreadSourceKind::AppServer => matches!(source, CoreSessionSource::Mcp),
|
||||
ThreadSourceKind::SubAgent => matches!(source, CoreSessionSource::SubAgent(_)),
|
||||
ThreadSourceKind::SubAgentReview => {
|
||||
matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Review)
|
||||
)
|
||||
}
|
||||
ThreadSourceKind::SubAgentCompact => {
|
||||
matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Compact)
|
||||
)
|
||||
}
|
||||
ThreadSourceKind::SubAgentThreadSpawn => matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::ThreadSpawn { .. })
|
||||
),
|
||||
ThreadSourceKind::SubAgentOther => matches!(
|
||||
source,
|
||||
CoreSessionSource::SubAgent(CoreSubAgentSource::Other(_))
|
||||
),
|
||||
ThreadSourceKind::Unknown => matches!(source, CoreSessionSource::Unknown),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::ThreadId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_defaults_to_interactive_sources() {
|
||||
let (allowed_sources, filter) = compute_source_filters(None);
|
||||
|
||||
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
|
||||
assert_eq!(filter, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_empty_means_interactive_sources() {
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(Vec::new()));
|
||||
|
||||
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
|
||||
assert_eq!(filter, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_interactive_only_skips_post_filtering() {
|
||||
let source_kinds = vec![ThreadSourceKind::Cli, ThreadSourceKind::VsCode];
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(source_kinds.clone()));
|
||||
|
||||
assert_eq!(
|
||||
allowed_sources,
|
||||
vec![CoreSessionSource::Cli, CoreSessionSource::VSCode]
|
||||
);
|
||||
assert_eq!(filter, Some(source_kinds));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compute_source_filters_subagent_variant_requires_post_filtering() {
|
||||
let source_kinds = vec![ThreadSourceKind::SubAgentReview];
|
||||
let (allowed_sources, filter) = compute_source_filters(Some(source_kinds.clone()));
|
||||
|
||||
assert_eq!(allowed_sources, Vec::new());
|
||||
assert_eq!(filter, Some(source_kinds));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn source_kind_matches_distinguishes_subagent_variants() {
|
||||
let parent_thread_id =
|
||||
ThreadId::from_string(&Uuid::new_v4().to_string()).expect("valid thread id");
|
||||
let review = CoreSessionSource::SubAgent(CoreSubAgentSource::Review);
|
||||
let spawn = CoreSessionSource::SubAgent(CoreSubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth: 1,
|
||||
});
|
||||
|
||||
assert!(source_kind_matches(
|
||||
&review,
|
||||
&[ThreadSourceKind::SubAgentReview]
|
||||
));
|
||||
assert!(!source_kind_matches(
|
||||
&review,
|
||||
&[ThreadSourceKind::SubAgentThreadSpawn]
|
||||
));
|
||||
assert!(source_kind_matches(
|
||||
&spawn,
|
||||
&[ThreadSourceKind::SubAgentThreadSpawn]
|
||||
));
|
||||
assert!(!source_kind_matches(
|
||||
&spawn,
|
||||
&[ThreadSourceKind::SubAgentReview]
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::num::NonZero;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
@@ -18,10 +19,6 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
roots: Vec<String>,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
) -> Vec<FuzzyFileSearchResult> {
|
||||
if roots.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
@@ -62,7 +59,11 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
Ok(Ok((root, res))) => {
|
||||
for m in res.matches {
|
||||
let path = m.path;
|
||||
let file_name = file_search::file_name_from_path(&path);
|
||||
//TODO(shijie): Move file name generation to file_search lib.
|
||||
let file_name = Path::new(&path)
|
||||
.file_name()
|
||||
.map(|name| name.to_string_lossy().into_owned())
|
||||
.unwrap_or_else(|| path.clone());
|
||||
let result = FuzzyFileSearchResult {
|
||||
root: root.clone(),
|
||||
path,
|
||||
|
||||
@@ -2,9 +2,8 @@
|
||||
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config_loader::ConfigLayerStackOrdering;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge;
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
@@ -12,37 +11,25 @@ use std::path::PathBuf;
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use codex_app_server_protocol::TextPosition as AppTextPosition;
|
||||
use codex_app_server_protocol::TextRange as AppTextRange;
|
||||
use codex_core::ExecPolicyError;
|
||||
use codex_core::check_execpolicy_for_warnings;
|
||||
use codex_core::config_loader::ConfigLoadError;
|
||||
use codex_core::config_loader::TextRange as CoreTextRange;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{self};
|
||||
use tokio::sync::mpsc;
|
||||
use toml::Value as TomlValue;
|
||||
use tracing::Level;
|
||||
use tracing::debug;
|
||||
use tracing::error;
|
||||
use tracing::info;
|
||||
use tracing::warn;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::Layer;
|
||||
use tracing_subscriber::filter::Targets;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
|
||||
mod bespoke_event_handling;
|
||||
mod codex_message_processor;
|
||||
mod config_api;
|
||||
mod dynamic_tools;
|
||||
mod error_code;
|
||||
mod filters;
|
||||
mod fuzzy_file_search;
|
||||
mod message_processor;
|
||||
mod models;
|
||||
@@ -53,125 +40,13 @@ mod outgoing_message;
|
||||
/// plenty for an interactive CLI.
|
||||
const CHANNEL_CAPACITY: usize = 128;
|
||||
|
||||
fn config_warning_from_error(
|
||||
summary: impl Into<String>,
|
||||
err: &std::io::Error,
|
||||
) -> ConfigWarningNotification {
|
||||
let (path, range) = match config_error_location(err) {
|
||||
Some((path, range)) => (Some(path), Some(range)),
|
||||
None => (None, None),
|
||||
};
|
||||
ConfigWarningNotification {
|
||||
summary: summary.into(),
|
||||
details: Some(err.to_string()),
|
||||
path,
|
||||
range,
|
||||
}
|
||||
}
|
||||
|
||||
fn config_error_location(err: &std::io::Error) -> Option<(String, AppTextRange)> {
|
||||
err.get_ref()
|
||||
.and_then(|err| err.downcast_ref::<ConfigLoadError>())
|
||||
.map(|err| {
|
||||
let config_error = err.config_error();
|
||||
(
|
||||
config_error.path.to_string_lossy().to_string(),
|
||||
app_text_range(&config_error.range),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn exec_policy_warning_location(err: &ExecPolicyError) -> (Option<String>, Option<AppTextRange>) {
|
||||
match err {
|
||||
ExecPolicyError::ParsePolicy { path, source } => {
|
||||
if let Some(location) = source.location() {
|
||||
let range = AppTextRange {
|
||||
start: AppTextPosition {
|
||||
line: location.range.start.line,
|
||||
column: location.range.start.column,
|
||||
},
|
||||
end: AppTextPosition {
|
||||
line: location.range.end.line,
|
||||
column: location.range.end.column,
|
||||
},
|
||||
};
|
||||
return (Some(location.path), Some(range));
|
||||
}
|
||||
(Some(path.clone()), None)
|
||||
}
|
||||
_ => (None, None),
|
||||
}
|
||||
}
|
||||
|
||||
fn app_text_range(range: &CoreTextRange) -> AppTextRange {
|
||||
AppTextRange {
|
||||
start: AppTextPosition {
|
||||
line: range.start.line,
|
||||
column: range.start.column,
|
||||
},
|
||||
end: AppTextPosition {
|
||||
line: range.end.line,
|
||||
column: range.end.column,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn project_config_warning(config: &Config) -> Option<ConfigWarningNotification> {
|
||||
let mut disabled_folders = Vec::new();
|
||||
|
||||
for layer in config
|
||||
.config_layer_stack
|
||||
.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst, true)
|
||||
{
|
||||
if !matches!(layer.name, ConfigLayerSource::Project { .. })
|
||||
|| layer.disabled_reason.is_none()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let ConfigLayerSource::Project { dot_codex_folder } = &layer.name {
|
||||
disabled_folders.push((
|
||||
dot_codex_folder.as_path().display().to_string(),
|
||||
layer
|
||||
.disabled_reason
|
||||
.as_ref()
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| "config.toml is disabled.".to_string()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if disabled_folders.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut message = concat!(
|
||||
"Project config.toml files are disabled in the following folders. ",
|
||||
"Settings in those files are ignored, but skills and exec policies still load.\n",
|
||||
)
|
||||
.to_string();
|
||||
for (index, (folder, reason)) in disabled_folders.iter().enumerate() {
|
||||
let display_index = index + 1;
|
||||
message.push_str(&format!(" {display_index}. {folder}\n"));
|
||||
message.push_str(&format!(" {reason}\n"));
|
||||
}
|
||||
|
||||
Some(ConfigWarningNotification {
|
||||
summary: message,
|
||||
details: None,
|
||||
path: None,
|
||||
range: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn run_main(
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
loader_overrides: LoaderOverrides,
|
||||
default_analytics_enabled: bool,
|
||||
) -> IoResult<()> {
|
||||
// Set up channels.
|
||||
let (incoming_tx, mut incoming_rx) = mpsc::channel::<JSONRPCMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::channel::<OutgoingMessage>(CHANNEL_CAPACITY);
|
||||
let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::<OutgoingMessage>();
|
||||
|
||||
// Task: read from stdin, push to `incoming_tx`.
|
||||
let stdin_reader_handle = tokio::spawn({
|
||||
@@ -204,134 +79,60 @@ pub async fn run_main(
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let loader_overrides_for_config_api = loader_overrides.clone();
|
||||
let mut config_warnings = Vec::new();
|
||||
let config = match ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides.clone())
|
||||
.loader_overrides(loader_overrides)
|
||||
.build()
|
||||
let config = Config::load_with_cli_overrides(cli_kv_overrides, ConfigOverrides::default())
|
||||
.await
|
||||
{
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
let message = config_warning_from_error("Invalid configuration; using defaults.", &err);
|
||||
config_warnings.push(message);
|
||||
Config::load_default_with_cli_overrides(cli_kv_overrides.clone()).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading default config after config error: {e}"),
|
||||
)
|
||||
})?
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(Some(err)) =
|
||||
check_execpolicy_for_warnings(&config.features, &config.config_layer_stack).await
|
||||
{
|
||||
let (path, range) = exec_policy_warning_location(&err);
|
||||
let message = ConfigWarningNotification {
|
||||
summary: "Error parsing rules; custom rules not applied.".to_string(),
|
||||
details: Some(err.to_string()),
|
||||
path,
|
||||
range,
|
||||
};
|
||||
config_warnings.push(message);
|
||||
}
|
||||
|
||||
if let Some(warning) = project_config_warning(&config) {
|
||||
config_warnings.push(warning);
|
||||
}
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}"))
|
||||
})?;
|
||||
|
||||
let feedback = CodexFeedback::new();
|
||||
|
||||
let otel = codex_core::otel_init::build_provider(
|
||||
&config,
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
Some("codex_app_server"),
|
||||
default_analytics_enabled,
|
||||
)
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
let otel =
|
||||
codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION")).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
let stderr_fmt = tracing_subscriber::fmt::layer()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::FULL)
|
||||
.with_filter(EnvFilter::from_default_env());
|
||||
|
||||
let feedback_layer = feedback.logger_layer();
|
||||
let feedback_metadata_layer = feedback.metadata_layer();
|
||||
|
||||
let otel_logger_layer = otel.as_ref().and_then(|o| o.logger_layer());
|
||||
|
||||
let otel_tracing_layer = otel.as_ref().and_then(|o| o.tracing_layer());
|
||||
let feedback_layer = tracing_subscriber::fmt::layer()
|
||||
.with_writer(feedback.make_writer())
|
||||
.with_ansi(false)
|
||||
.with_target(false)
|
||||
.with_filter(Targets::new().with_default(Level::TRACE));
|
||||
|
||||
let _ = tracing_subscriber::registry()
|
||||
.with(stderr_fmt)
|
||||
.with(feedback_layer)
|
||||
.with(feedback_metadata_layer)
|
||||
.with(otel_logger_layer)
|
||||
.with(otel_tracing_layer)
|
||||
.with(otel.as_ref().map(|provider| {
|
||||
OpenTelemetryTracingBridge::new(&provider.logger).with_filter(
|
||||
tracing_subscriber::filter::filter_fn(codex_core::otel_init::codex_export_filter),
|
||||
)
|
||||
}))
|
||||
.try_init();
|
||||
for warning in &config_warnings {
|
||||
match &warning.details {
|
||||
Some(details) => error!("{} {}", warning.summary, details),
|
||||
None => error!("{}", warning.summary),
|
||||
}
|
||||
}
|
||||
|
||||
// Task: process incoming messages.
|
||||
let processor_handle = tokio::spawn({
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone();
|
||||
let loader_overrides = loader_overrides_for_config_api;
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
feedback.clone(),
|
||||
config_warnings,
|
||||
);
|
||||
let mut thread_created_rx = processor.thread_created_receiver();
|
||||
async move {
|
||||
let mut listen_for_threads = true;
|
||||
loop {
|
||||
tokio::select! {
|
||||
msg = incoming_rx.recv() => {
|
||||
let Some(msg) = msg else {
|
||||
break;
|
||||
};
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
created = thread_created_rx.recv(), if listen_for_threads => {
|
||||
match created {
|
||||
Ok(thread_id) => {
|
||||
processor.try_attach_thread_listener(thread_id).await;
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => {
|
||||
// TODO(jif) handle lag.
|
||||
// Assumes thread creation volume is low enough that lag never happens.
|
||||
// If it does, we log and continue without resyncing to avoid attaching
|
||||
// listeners for threads that should remain unsubscribed.
|
||||
warn!("thread_created receiver lagged; skipping resync");
|
||||
}
|
||||
Err(tokio::sync::broadcast::error::RecvError::Closed) => {
|
||||
listen_for_threads = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
while let Some(msg) = incoming_rx.recv().await {
|
||||
match msg {
|
||||
JSONRPCMessage::Request(r) => processor.process_request(r).await,
|
||||
JSONRPCMessage::Response(r) => processor.process_response(r).await,
|
||||
JSONRPCMessage::Notification(n) => processor.process_notification(n).await,
|
||||
JSONRPCMessage::Error(e) => processor.process_error(e),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,43 +1,10 @@
|
||||
use codex_app_server::run_main;
|
||||
use codex_arg0::arg0_dispatch_or_else;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::path::PathBuf;
|
||||
|
||||
// Debug-only test hook: lets integration tests point the server at a temporary
|
||||
// managed config file without writing to /etc.
|
||||
const MANAGED_CONFIG_PATH_ENV_VAR: &str = "CODEX_APP_SERVER_MANAGED_CONFIG_PATH";
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move {
|
||||
let managed_config_path = managed_config_path_from_debug_env();
|
||||
let loader_overrides = LoaderOverrides {
|
||||
managed_config_path,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
run_main(
|
||||
codex_linux_sandbox_exe,
|
||||
CliConfigOverrides::default(),
|
||||
loader_overrides,
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn managed_config_path_from_debug_env() -> Option<PathBuf> {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if let Ok(value) = std::env::var(MANAGED_CONFIG_PATH_ENV_VAR) {
|
||||
return if value.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PathBuf::from(value))
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
@@ -1,44 +1,30 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::config_api::ConfigApi;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientRequest;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
|
||||
use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCRequest;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::ConversationManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::default_client::SetOriginatorError;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use tokio::sync::broadcast;
|
||||
use toml::Value as TomlValue;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
config_api: ConfigApi,
|
||||
initialized: bool,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
@@ -48,10 +34,7 @@ impl MessageProcessor {
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
feedback: CodexFeedback,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
) -> Self {
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(
|
||||
@@ -59,28 +42,23 @@ impl MessageProcessor {
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.codex_home.clone(),
|
||||
let conversation_manager = Arc::new(ConversationManager::new(
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
auth_manager,
|
||||
thread_manager,
|
||||
conversation_manager,
|
||||
outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
Arc::clone(&config),
|
||||
cli_overrides.clone(),
|
||||
config,
|
||||
feedback,
|
||||
);
|
||||
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides, loader_overrides);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
config_api,
|
||||
initialized: false,
|
||||
config_warnings,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,27 +108,6 @@ impl MessageProcessor {
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
if let Err(error) = set_default_originator(name.clone()) {
|
||||
match error {
|
||||
SetOriginatorError::InvalidHeaderValue => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"Invalid clientInfo.name: '{name}'. Must be a valid HTTP header value."
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
SetOriginatorError::AlreadyInitialized => {
|
||||
// No-op. This is expected to happen if the originator is already set via env var.
|
||||
// TODO(owen): Once we remove support for CODEX_INTERNAL_ORIGINATOR_OVERRIDE,
|
||||
// this will be an unexpected state and we can return a JSON-RPC error indicating
|
||||
// internal server error.
|
||||
}
|
||||
}
|
||||
}
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
@@ -161,16 +118,6 @@ impl MessageProcessor {
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
self.initialized = true;
|
||||
if !self.config_warnings.is_empty() {
|
||||
for notification in self.config_warnings.drain(..) {
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::ConfigWarning(
|
||||
notification,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -187,26 +134,9 @@ impl MessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
match codex_request {
|
||||
ClientRequest::ConfigRead { request_id, params } => {
|
||||
self.handle_config_read(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ConfigValueWrite { request_id, params } => {
|
||||
self.handle_config_value_write(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ConfigBatchWrite { request_id, params } => {
|
||||
self.handle_config_batch_write(request_id, params).await;
|
||||
}
|
||||
ClientRequest::ConfigRequirementsRead {
|
||||
request_id,
|
||||
params: _,
|
||||
} => {
|
||||
self.handle_config_requirements_read(request_id).await;
|
||||
}
|
||||
other => {
|
||||
self.codex_message_processor.process_request(other).await;
|
||||
}
|
||||
}
|
||||
self.codex_message_processor
|
||||
.process_request(codex_request)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) {
|
||||
@@ -215,19 +145,6 @@ impl MessageProcessor {
|
||||
tracing::info!("<- notification: {:?}", notification);
|
||||
}
|
||||
|
||||
pub(crate) fn thread_created_receiver(&self) -> broadcast::Receiver<ThreadId> {
|
||||
self.codex_message_processor.thread_created_receiver()
|
||||
}
|
||||
|
||||
pub(crate) async fn try_attach_thread_listener(&mut self, thread_id: ThreadId) {
|
||||
if !self.initialized {
|
||||
return;
|
||||
}
|
||||
self.codex_message_processor
|
||||
.try_attach_thread_listener(thread_id)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Handle a standalone JSON-RPC response originating from the peer.
|
||||
pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) {
|
||||
tracing::info!("<- response: {:?}", response);
|
||||
@@ -239,40 +156,4 @@ impl MessageProcessor {
|
||||
pub(crate) fn process_error(&mut self, err: JSONRPCError) {
|
||||
tracing::error!("<- error: {:?}", err);
|
||||
}
|
||||
|
||||
async fn handle_config_read(&self, request_id: RequestId, params: ConfigReadParams) {
|
||||
match self.config_api.read(params).await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_value_write(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
params: ConfigValueWriteParams,
|
||||
) {
|
||||
match self.config_api.write_value(params).await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_batch_write(
|
||||
&self,
|
||||
request_id: RequestId,
|
||||
params: ConfigBatchWriteParams,
|
||||
) {
|
||||
match self.config_api.batch_write(params).await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_config_requirements_read(&self, request_id: RequestId) {
|
||||
match self.config_api.config_requirements_read().await {
|
||||
Ok(response) => self.outgoing.send_response(request_id, response).await,
|
||||
Err(error) => self.outgoing.send_error(request_id, error).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::Model;
|
||||
use codex_app_server_protocol::ReasoningEffortOption;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::models_manager::manager::RefreshStrategy;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ReasoningEffortPreset;
|
||||
use codex_common::model_presets::ModelPreset;
|
||||
use codex_common::model_presets::ReasoningEffortPreset;
|
||||
use codex_common::model_presets::builtin_model_presets;
|
||||
|
||||
pub async fn supported_models(thread_manager: Arc<ThreadManager>, config: &Config) -> Vec<Model> {
|
||||
thread_manager
|
||||
.list_models(config, RefreshStrategy::OnlineIfUncached)
|
||||
.await
|
||||
pub fn supported_models(auth_mode: Option<AuthMode>) -> Vec<Model> {
|
||||
builtin_model_presets(auth_mode)
|
||||
.into_iter()
|
||||
.filter(|preset| preset.show_in_picker)
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
}
|
||||
@@ -28,13 +22,12 @@ fn model_from_preset(preset: ModelPreset) -> Model {
|
||||
preset.supported_reasoning_efforts,
|
||||
),
|
||||
default_reasoning_effort: preset.default_reasoning_effort,
|
||||
supports_personality: preset.supports_personality,
|
||||
is_default: preset.is_default,
|
||||
}
|
||||
}
|
||||
|
||||
fn reasoning_efforts_from_preset(
|
||||
efforts: Vec<ReasoningEffortPreset>,
|
||||
efforts: &'static [ReasoningEffortPreset],
|
||||
) -> Vec<ReasoningEffortOption> {
|
||||
efforts
|
||||
.iter()
|
||||
|
||||
@@ -16,18 +16,15 @@ use tracing::warn;
|
||||
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
|
||||
#[cfg(test)]
|
||||
use codex_protocol::account::PlanType;
|
||||
|
||||
/// Sends messages to the client and manages request callbacks.
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_request_id: AtomicI64,
|
||||
sender: mpsc::Sender<OutgoingMessage>,
|
||||
sender: mpsc::UnboundedSender<OutgoingMessage>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>,
|
||||
}
|
||||
|
||||
impl OutgoingMessageSender {
|
||||
pub(crate) fn new(sender: mpsc::Sender<OutgoingMessage>) -> Self {
|
||||
pub(crate) fn new(sender: mpsc::UnboundedSender<OutgoingMessage>) -> Self {
|
||||
Self {
|
||||
next_request_id: AtomicI64::new(0),
|
||||
sender,
|
||||
@@ -48,12 +45,8 @@ impl OutgoingMessageSender {
|
||||
}
|
||||
|
||||
let outgoing_message =
|
||||
OutgoingMessage::Request(request.request_with_id(outgoing_message_id.clone()));
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send request {outgoing_message_id:?} to client: {err:?}");
|
||||
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
|
||||
request_id_to_callback.remove(&outgoing_message_id);
|
||||
}
|
||||
OutgoingMessage::Request(request.request_with_id(outgoing_message_id));
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
rx_approve
|
||||
}
|
||||
|
||||
@@ -79,9 +72,7 @@ impl OutgoingMessageSender {
|
||||
match serde_json::to_value(response) {
|
||||
Ok(result) => {
|
||||
let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result });
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send response to client: {err:?}");
|
||||
}
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
Err(err) => {
|
||||
self.send_error(
|
||||
@@ -98,29 +89,21 @@ impl OutgoingMessageSender {
|
||||
}
|
||||
|
||||
pub(crate) async fn send_server_notification(&self, notification: ServerNotification) {
|
||||
if let Err(err) = self
|
||||
let _ = self
|
||||
.sender
|
||||
.send(OutgoingMessage::AppServerNotification(notification))
|
||||
.await
|
||||
{
|
||||
warn!("failed to send server notification to client: {err:?}");
|
||||
}
|
||||
.send(OutgoingMessage::AppServerNotification(notification));
|
||||
}
|
||||
|
||||
/// All notifications should be migrated to [`ServerNotification`] and
|
||||
/// [`OutgoingMessage::Notification`] should be removed.
|
||||
pub(crate) async fn send_notification(&self, notification: OutgoingNotification) {
|
||||
let outgoing_message = OutgoingMessage::Notification(notification);
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send notification to client: {err:?}");
|
||||
}
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
|
||||
pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) {
|
||||
let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error });
|
||||
if let Err(err) = self.sender.send(outgoing_message).await {
|
||||
warn!("failed to send error to client: {err:?}");
|
||||
}
|
||||
let _ = self.sender.send(outgoing_message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,7 +145,6 @@ mod tests {
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
@@ -233,8 +215,6 @@ mod tests {
|
||||
resets_at: Some(123),
|
||||
}),
|
||||
secondary: None,
|
||||
credits: None,
|
||||
plan_type: Some(PlanType::Plus),
|
||||
},
|
||||
});
|
||||
|
||||
@@ -249,9 +229,7 @@ mod tests {
|
||||
"windowDurationMins": 15,
|
||||
"resetsAt": 123
|
||||
},
|
||||
"secondary": null,
|
||||
"credits": null,
|
||||
"planType": "plus"
|
||||
"secondary": null
|
||||
}
|
||||
},
|
||||
}),
|
||||
@@ -280,28 +258,4 @@ mod tests {
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn verify_config_warning_notification_serialization() {
|
||||
let notification = ServerNotification::ConfigWarning(ConfigWarningNotification {
|
||||
summary: "Config error: using defaults".to_string(),
|
||||
details: Some("error loading config: bad config".to_string()),
|
||||
path: None,
|
||||
range: None,
|
||||
});
|
||||
|
||||
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
|
||||
assert_eq!(
|
||||
json!( {
|
||||
"method": "configWarning",
|
||||
"params": {
|
||||
"summary": "Config error: using defaults",
|
||||
"details": "error loading config: bad config",
|
||||
},
|
||||
}),
|
||||
serde_json::to_value(jsonrpc_notification)
|
||||
.expect("ensure the notification serializes correctly"),
|
||||
"ensure the notification serializes correctly"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "common",
|
||||
crate_name = "app_test_support",
|
||||
crate_srcs = glob(["*.rs"]),
|
||||
)
|
||||
@@ -1,20 +1,19 @@
|
||||
[package]
|
||||
edition = "2024"
|
||||
name = "app_test_support"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
assert_cmd = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-core = { workspace = true, features = ["test-support"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
codex-utils-cargo-bin = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
@@ -25,5 +24,3 @@ tokio = { workspace = true, features = [
|
||||
] }
|
||||
uuid = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
core_test_support = { path = "../../../core/tests/common" }
|
||||
shlex = { workspace = true }
|
||||
|
||||
@@ -49,16 +49,6 @@ impl ChatGptAuthFixture {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_user_id(mut self, chatgpt_user_id: impl Into<String>) -> Self {
|
||||
self.claims.chatgpt_user_id = Some(chatgpt_user_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_account_id(mut self, chatgpt_account_id: impl Into<String>) -> Self {
|
||||
self.claims.chatgpt_account_id = Some(chatgpt_account_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn email(mut self, email: impl Into<String>) -> Self {
|
||||
self.claims.email = Some(email.into());
|
||||
self
|
||||
@@ -79,8 +69,6 @@ impl ChatGptAuthFixture {
|
||||
pub struct ChatGptIdTokenClaims {
|
||||
pub email: Option<String>,
|
||||
pub plan_type: Option<String>,
|
||||
pub chatgpt_user_id: Option<String>,
|
||||
pub chatgpt_account_id: Option<String>,
|
||||
}
|
||||
|
||||
impl ChatGptIdTokenClaims {
|
||||
@@ -97,16 +85,6 @@ impl ChatGptIdTokenClaims {
|
||||
self.plan_type = Some(plan_type.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_user_id(mut self, chatgpt_user_id: impl Into<String>) -> Self {
|
||||
self.chatgpt_user_id = Some(chatgpt_user_id.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn chatgpt_account_id(mut self, chatgpt_account_id: impl Into<String>) -> Self {
|
||||
self.chatgpt_account_id = Some(chatgpt_account_id.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
@@ -115,20 +93,10 @@ pub fn encode_id_token(claims: &ChatGptIdTokenClaims) -> Result<String> {
|
||||
if let Some(email) = &claims.email {
|
||||
payload.insert("email".to_string(), json!(email));
|
||||
}
|
||||
let mut auth_payload = serde_json::Map::new();
|
||||
if let Some(plan_type) = &claims.plan_type {
|
||||
auth_payload.insert("chatgpt_plan_type".to_string(), json!(plan_type));
|
||||
}
|
||||
if let Some(chatgpt_user_id) = &claims.chatgpt_user_id {
|
||||
auth_payload.insert("chatgpt_user_id".to_string(), json!(chatgpt_user_id));
|
||||
}
|
||||
if let Some(chatgpt_account_id) = &claims.chatgpt_account_id {
|
||||
auth_payload.insert("chatgpt_account_id".to_string(), json!(chatgpt_account_id));
|
||||
}
|
||||
if !auth_payload.is_empty() {
|
||||
payload.insert(
|
||||
"https://api.openai.com/auth".to_string(),
|
||||
serde_json::Value::Object(auth_payload),
|
||||
json!({ "chatgpt_plan_type": plan_type }),
|
||||
);
|
||||
}
|
||||
let payload = serde_json::Value::Object(payload);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
mod auth_fixtures;
|
||||
mod mcp_process;
|
||||
mod mock_model_server;
|
||||
mod models_cache;
|
||||
mod responses;
|
||||
mod rollout;
|
||||
|
||||
@@ -10,29 +9,13 @@ pub use auth_fixtures::ChatGptIdTokenClaims;
|
||||
pub use auth_fixtures::encode_id_token;
|
||||
pub use auth_fixtures::write_chatgpt_auth;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
pub use core_test_support::format_with_current_shell;
|
||||
pub use core_test_support::format_with_current_shell_display;
|
||||
pub use core_test_support::format_with_current_shell_display_non_login;
|
||||
pub use core_test_support::format_with_current_shell_non_login;
|
||||
pub use core_test_support::test_path_buf_with_windows;
|
||||
pub use core_test_support::test_tmp_path;
|
||||
pub use core_test_support::test_tmp_path_buf;
|
||||
pub use mcp_process::DEFAULT_CLIENT_NAME;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_responses_server_repeating_assistant;
|
||||
pub use mock_model_server::create_mock_responses_server_sequence;
|
||||
pub use mock_model_server::create_mock_responses_server_sequence_unchecked;
|
||||
pub use models_cache::write_models_cache;
|
||||
pub use models_cache::write_models_cache_with_models;
|
||||
pub use mock_model_server::create_mock_chat_completions_server;
|
||||
pub use mock_model_server::create_mock_chat_completions_server_unchecked;
|
||||
pub use responses::create_apply_patch_sse_response;
|
||||
pub use responses::create_exec_command_sse_response;
|
||||
pub use responses::create_final_assistant_message_sse_response;
|
||||
pub use responses::create_request_user_input_sse_response;
|
||||
pub use responses::create_shell_command_sse_response;
|
||||
pub use responses::create_shell_sse_response;
|
||||
pub use rollout::create_fake_rollout;
|
||||
pub use rollout::create_fake_rollout_with_source;
|
||||
pub use rollout::create_fake_rollout_with_text_elements;
|
||||
pub use rollout::rollout_path;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub fn to_response<T: DeserializeOwned>(response: JSONRPCResponse) -> anyhow::Result<T> {
|
||||
|
||||
@@ -11,19 +11,14 @@ use tokio::process::ChildStdin;
|
||||
use tokio::process::ChildStdout;
|
||||
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AppsListParams;
|
||||
use codex_app_server_protocol::ArchiveConversationParams;
|
||||
use codex_app_server_protocol::CancelLoginAccountParams;
|
||||
use codex_app_server_protocol::CancelLoginChatGptParams;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::ClientNotification;
|
||||
use codex_app_server_protocol::CollaborationModeListParams;
|
||||
use codex_app_server_protocol::ConfigBatchWriteParams;
|
||||
use codex_app_server_protocol::ConfigReadParams;
|
||||
use codex_app_server_protocol::ConfigValueWriteParams;
|
||||
use codex_app_server_protocol::FeedbackUploadParams;
|
||||
use codex_app_server_protocol::ForkConversationParams;
|
||||
use codex_app_server_protocol::GetAccountParams;
|
||||
use codex_app_server_protocol::GetAuthStatusParams;
|
||||
use codex_app_server_protocol::InitializeParams;
|
||||
@@ -40,23 +35,17 @@ use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::RemoveConversationListenerParams;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ReviewStartParams;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserTurnParams;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::SetDefaultModelParams;
|
||||
use codex_app_server_protocol::ThreadArchiveParams;
|
||||
use codex_app_server_protocol::ThreadForkParams;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadLoadedListParams;
|
||||
use codex_app_server_protocol::ThreadReadParams;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadRollbackParams;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadUnarchiveParams;
|
||||
use codex_app_server_protocol::TurnInterruptParams;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_core::default_client::CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR;
|
||||
use std::process::Command as StdCommand;
|
||||
use tokio::process::Command;
|
||||
|
||||
pub struct McpProcess {
|
||||
@@ -68,11 +57,9 @@ pub struct McpProcess {
|
||||
process: Child,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
pending_messages: VecDeque<JSONRPCMessage>,
|
||||
pending_user_messages: VecDeque<JSONRPCNotification>,
|
||||
}
|
||||
|
||||
pub const DEFAULT_CLIENT_NAME: &str = "codex-app-server-tests";
|
||||
|
||||
impl McpProcess {
|
||||
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
|
||||
Self::new_with_env(codex_home, &[]).await
|
||||
@@ -87,8 +74,12 @@ impl McpProcess {
|
||||
codex_home: &Path,
|
||||
env_overrides: &[(&str, Option<&str>)],
|
||||
) -> anyhow::Result<Self> {
|
||||
let program = codex_utils_cargo_bin::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-app-server")?;
|
||||
// Use assert_cmd to locate the binary path and then switch to tokio::process::Command
|
||||
let std_cmd = StdCommand::cargo_bin("codex-app-server")
|
||||
.context("should find binary for codex-mcp-server")?;
|
||||
|
||||
let program = std_cmd.get_program().to_owned();
|
||||
|
||||
let mut cmd = Command::new(program);
|
||||
|
||||
cmd.stdin(Stdio::piped());
|
||||
@@ -96,7 +87,6 @@ impl McpProcess {
|
||||
cmd.stderr(Stdio::piped());
|
||||
cmd.env("CODEX_HOME", codex_home);
|
||||
cmd.env("RUST_LOG", "debug");
|
||||
cmd.env_remove(CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR);
|
||||
|
||||
for (k, v) in env_overrides {
|
||||
match v {
|
||||
@@ -138,66 +128,37 @@ impl McpProcess {
|
||||
process,
|
||||
stdin,
|
||||
stdout,
|
||||
pending_messages: VecDeque::new(),
|
||||
pending_user_messages: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Performs the initialization handshake with the MCP server.
|
||||
pub async fn initialize(&mut self) -> anyhow::Result<()> {
|
||||
let initialized = self
|
||||
.initialize_with_client_info(ClientInfo {
|
||||
name: DEFAULT_CLIENT_NAME.to_string(),
|
||||
let params = Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-app-server-tests".to_string(),
|
||||
title: None,
|
||||
version: "0.1.0".to_string(),
|
||||
})
|
||||
.await?;
|
||||
let JSONRPCMessage::Response(_) = initialized else {
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let JSONRPCMessage::Response(response) = initialized else {
|
||||
unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}");
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sends initialize with the provided client info and returns the response/error message.
|
||||
pub async fn initialize_with_client_info(
|
||||
&mut self,
|
||||
client_info: ClientInfo,
|
||||
) -> anyhow::Result<JSONRPCMessage> {
|
||||
let params = Some(serde_json::to_value(InitializeParams { client_info })?);
|
||||
let request_id = self.send_request("initialize", params).await?;
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Response(response) => {
|
||||
if response.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(JSONRPCMessage::Response(response))
|
||||
}
|
||||
JSONRPCMessage::Error(error) => {
|
||||
if error.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize error id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
error.id
|
||||
);
|
||||
}
|
||||
Ok(JSONRPCMessage::Error(error))
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Notification: {notification:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {request:?}");
|
||||
}
|
||||
if response.id != RequestId::Integer(req_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
req_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a `newConversation` JSON-RPC request.
|
||||
@@ -238,7 +199,7 @@ impl McpProcess {
|
||||
}
|
||||
|
||||
/// Send a `removeConversationListener` JSON-RPC request.
|
||||
pub async fn send_remove_thread_listener_request(
|
||||
pub async fn send_remove_conversation_listener_request(
|
||||
&mut self,
|
||||
params: RemoveConversationListenerParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
@@ -348,15 +309,6 @@ impl McpProcess {
|
||||
self.send_request("thread/resume", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/fork` JSON-RPC request.
|
||||
pub async fn send_thread_fork_request(
|
||||
&mut self,
|
||||
params: ThreadForkParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/fork", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/archive` JSON-RPC request.
|
||||
pub async fn send_thread_archive_request(
|
||||
&mut self,
|
||||
@@ -366,24 +318,6 @@ impl McpProcess {
|
||||
self.send_request("thread/archive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/unarchive` JSON-RPC request.
|
||||
pub async fn send_thread_unarchive_request(
|
||||
&mut self,
|
||||
params: ThreadUnarchiveParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/unarchive", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/rollback` JSON-RPC request.
|
||||
pub async fn send_thread_rollback_request(
|
||||
&mut self,
|
||||
params: ThreadRollbackParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/rollback", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/list` JSON-RPC request.
|
||||
pub async fn send_thread_list_request(
|
||||
&mut self,
|
||||
@@ -393,24 +327,6 @@ impl McpProcess {
|
||||
self.send_request("thread/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/loaded/list` JSON-RPC request.
|
||||
pub async fn send_thread_loaded_list_request(
|
||||
&mut self,
|
||||
params: ThreadLoadedListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/loaded/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `thread/read` JSON-RPC request.
|
||||
pub async fn send_thread_read_request(
|
||||
&mut self,
|
||||
params: ThreadReadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("thread/read", params).await
|
||||
}
|
||||
|
||||
/// Send a `model/list` JSON-RPC request.
|
||||
pub async fn send_list_models_request(
|
||||
&mut self,
|
||||
@@ -420,21 +336,6 @@ impl McpProcess {
|
||||
self.send_request("model/list", params).await
|
||||
}
|
||||
|
||||
/// Send an `app/list` JSON-RPC request.
|
||||
pub async fn send_apps_list_request(&mut self, params: AppsListParams) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("app/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `collaborationMode/list` JSON-RPC request.
|
||||
pub async fn send_list_collaboration_modes_request(
|
||||
&mut self,
|
||||
params: CollaborationModeListParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("collaborationMode/list", params).await
|
||||
}
|
||||
|
||||
/// Send a `resumeConversation` JSON-RPC request.
|
||||
pub async fn send_resume_conversation_request(
|
||||
&mut self,
|
||||
@@ -444,15 +345,6 @@ impl McpProcess {
|
||||
self.send_request("resumeConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `forkConversation` JSON-RPC request.
|
||||
pub async fn send_fork_conversation_request(
|
||||
&mut self,
|
||||
params: ForkConversationParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("forkConversation", params).await
|
||||
}
|
||||
|
||||
/// Send a `loginApiKey` JSON-RPC request.
|
||||
pub async fn send_login_api_key_request(
|
||||
&mut self,
|
||||
@@ -485,15 +377,6 @@ impl McpProcess {
|
||||
self.send_request("turn/interrupt", params).await
|
||||
}
|
||||
|
||||
/// Send a `review/start` JSON-RPC request (v2).
|
||||
pub async fn send_review_start_request(
|
||||
&mut self,
|
||||
params: ReviewStartParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("review/start", params).await
|
||||
}
|
||||
|
||||
/// Send a `cancelLoginChatGpt` JSON-RPC request.
|
||||
pub async fn send_cancel_login_chat_gpt_request(
|
||||
&mut self,
|
||||
@@ -508,30 +391,6 @@ impl McpProcess {
|
||||
self.send_request("logoutChatGpt", None).await
|
||||
}
|
||||
|
||||
pub async fn send_config_read_request(
|
||||
&mut self,
|
||||
params: ConfigReadParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("config/read", params).await
|
||||
}
|
||||
|
||||
pub async fn send_config_value_write_request(
|
||||
&mut self,
|
||||
params: ConfigValueWriteParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("config/value/write", params).await
|
||||
}
|
||||
|
||||
pub async fn send_config_batch_write_request(
|
||||
&mut self,
|
||||
params: ConfigBatchWriteParams,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = Some(serde_json::to_value(params)?);
|
||||
self.send_request("config/batchWrite", params).await
|
||||
}
|
||||
|
||||
/// Send an `account/logout` JSON-RPC request.
|
||||
pub async fn send_logout_account_request(&mut self) -> anyhow::Result<i64> {
|
||||
self.send_request("account/logout", None).await
|
||||
@@ -644,16 +503,27 @@ impl McpProcess {
|
||||
pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result<ServerRequest> {
|
||||
eprintln!("in read_stream_until_request_message()");
|
||||
|
||||
let message = self
|
||||
.read_stream_until_message(|message| matches!(message, JSONRPCMessage::Request(_)))
|
||||
.await?;
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
|
||||
let JSONRPCMessage::Request(jsonrpc_request) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Request, got {message:?}");
|
||||
};
|
||||
jsonrpc_request
|
||||
.try_into()
|
||||
.with_context(|| "failed to deserialize ServerRequest from JSONRPCRequest")
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(jsonrpc_request) => {
|
||||
return jsonrpc_request.try_into().with_context(
|
||||
|| "failed to deserialize ServerRequest from JSONRPCRequest",
|
||||
);
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_response_message(
|
||||
@@ -662,32 +532,52 @@ impl McpProcess {
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
eprintln!("in read_stream_until_response_message({request_id:?})");
|
||||
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
Self::message_request_id(message) == Some(&request_id)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Response(response) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Response, got {message:?}");
|
||||
};
|
||||
Ok(response)
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(jsonrpc_response) => {
|
||||
if jsonrpc_response.id == request_id {
|
||||
return Ok(jsonrpc_response);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_error_message(
|
||||
&mut self,
|
||||
request_id: RequestId,
|
||||
) -> anyhow::Result<JSONRPCError> {
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
Self::message_request_id(message) == Some(&request_id)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Error(err) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Error, got {message:?}");
|
||||
};
|
||||
Ok(err)
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
eprintln!("notification: {notification:?}");
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
// Keep scanning; we're waiting for an error with matching id.
|
||||
}
|
||||
JSONRPCMessage::Error(err) => {
|
||||
if err.id == request_id {
|
||||
return Ok(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_stream_until_notification_message(
|
||||
@@ -696,64 +586,46 @@ impl McpProcess {
|
||||
) -> anyhow::Result<JSONRPCNotification> {
|
||||
eprintln!("in read_stream_until_notification_message({method})");
|
||||
|
||||
let message = self
|
||||
.read_stream_until_message(|message| {
|
||||
matches!(
|
||||
message,
|
||||
JSONRPCMessage::Notification(notification) if notification.method == method
|
||||
)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let JSONRPCMessage::Notification(notification) = message else {
|
||||
unreachable!("expected JSONRPCMessage::Notification, got {message:?}");
|
||||
};
|
||||
Ok(notification)
|
||||
}
|
||||
|
||||
/// Clears any buffered messages so future reads only consider new stream items.
|
||||
///
|
||||
/// We call this when e.g. we want to validate against the next turn and no longer care about
|
||||
/// messages buffered from the prior turn.
|
||||
pub fn clear_message_buffer(&mut self) {
|
||||
self.pending_messages.clear();
|
||||
}
|
||||
|
||||
/// Reads the stream until a message matches `predicate`, buffering any non-matching messages
|
||||
/// for later reads.
|
||||
async fn read_stream_until_message<F>(&mut self, predicate: F) -> anyhow::Result<JSONRPCMessage>
|
||||
where
|
||||
F: Fn(&JSONRPCMessage) -> bool,
|
||||
{
|
||||
if let Some(message) = self.take_pending_message(&predicate) {
|
||||
return Ok(message);
|
||||
if let Some(notification) = self.take_pending_notification_by_method(method) {
|
||||
return Ok(notification);
|
||||
}
|
||||
|
||||
loop {
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
if predicate(&message) {
|
||||
return Ok(message);
|
||||
match message {
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
if notification.method == method {
|
||||
return Ok(notification);
|
||||
}
|
||||
self.enqueue_user_message(notification);
|
||||
}
|
||||
JSONRPCMessage::Request(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Error(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}");
|
||||
}
|
||||
JSONRPCMessage::Response(_) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}");
|
||||
}
|
||||
}
|
||||
self.pending_messages.push_back(message);
|
||||
}
|
||||
}
|
||||
|
||||
fn take_pending_message<F>(&mut self, predicate: &F) -> Option<JSONRPCMessage>
|
||||
where
|
||||
F: Fn(&JSONRPCMessage) -> bool,
|
||||
{
|
||||
if let Some(pos) = self.pending_messages.iter().position(predicate) {
|
||||
return self.pending_messages.remove(pos);
|
||||
fn take_pending_notification_by_method(&mut self, method: &str) -> Option<JSONRPCNotification> {
|
||||
if let Some(pos) = self
|
||||
.pending_user_messages
|
||||
.iter()
|
||||
.position(|notification| notification.method == method)
|
||||
{
|
||||
return self.pending_user_messages.remove(pos);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn message_request_id(message: &JSONRPCMessage) -> Option<&RequestId> {
|
||||
match message {
|
||||
JSONRPCMessage::Request(request) => Some(&request.id),
|
||||
JSONRPCMessage::Response(response) => Some(&response.id),
|
||||
JSONRPCMessage::Error(err) => Some(&err.id),
|
||||
JSONRPCMessage::Notification(_) => None,
|
||||
fn enqueue_user_message(&mut self, notification: JSONRPCNotification) {
|
||||
if notification.method == "codex/event/user_message" {
|
||||
self.pending_user_messages.push_back(notification);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use core_test_support::responses;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path_regex;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
/// Create a mock server that will provide the responses, in order, for
|
||||
/// requests to the `/v1/responses` endpoint.
|
||||
pub async fn create_mock_responses_server_sequence(responses: Vec<String>) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
/// requests to the `/v1/chat/completions` endpoint.
|
||||
pub async fn create_mock_chat_completions_server(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let num_calls = responses.len();
|
||||
let seq_responder = SeqResponder {
|
||||
@@ -21,7 +20,7 @@ pub async fn create_mock_responses_server_sequence(responses: Vec<String>) -> Mo
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.expect(num_calls as u64)
|
||||
.mount(&server)
|
||||
@@ -30,10 +29,10 @@ pub async fn create_mock_responses_server_sequence(responses: Vec<String>) -> Mo
|
||||
server
|
||||
}
|
||||
|
||||
/// Same as `create_mock_responses_server_sequence` but does not enforce an
|
||||
/// Same as `create_mock_chat_completions_server` but does not enforce an
|
||||
/// expectation on the number of calls.
|
||||
pub async fn create_mock_responses_server_sequence_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
pub async fn create_mock_chat_completions_server_unchecked(responses: Vec<String>) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let seq_responder = SeqResponder {
|
||||
num_calls: AtomicUsize::new(0),
|
||||
@@ -41,7 +40,7 @@ pub async fn create_mock_responses_server_sequence_unchecked(responses: Vec<Stri
|
||||
};
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.and(path("/v1/chat/completions"))
|
||||
.respond_with(seq_responder)
|
||||
.mount(&server)
|
||||
.await;
|
||||
@@ -58,24 +57,10 @@ impl Respond for SeqResponder {
|
||||
fn respond(&self, _: &wiremock::Request) -> ResponseTemplate {
|
||||
let call_num = self.num_calls.fetch_add(1, Ordering::SeqCst);
|
||||
match self.responses.get(call_num) {
|
||||
Some(response) => responses::sse_response(response.clone()),
|
||||
Some(response) => ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.set_body_raw(response.clone(), "text/event-stream"),
|
||||
None => panic!("no response for {call_num}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a mock responses API server that returns the same assistant message for every request.
|
||||
pub async fn create_mock_responses_server_repeating_assistant(message: &str) -> MockServer {
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", message),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
Mock::given(method("POST"))
|
||||
.and(path_regex(".*/responses$"))
|
||||
.respond_with(responses::sse_response(body))
|
||||
.mount(&server)
|
||||
.await;
|
||||
server
|
||||
}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::Utc;
|
||||
use codex_core::models_manager::model_presets::all_model_presets;
|
||||
use codex_protocol::openai_models::ConfigShellToolType;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelVisibility;
|
||||
use codex_protocol::openai_models::TruncationPolicyConfig;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
/// Convert a ModelPreset to ModelInfo for cache storage.
|
||||
fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
|
||||
ModelInfo {
|
||||
slug: preset.id.clone(),
|
||||
display_name: preset.display_name.clone(),
|
||||
description: Some(preset.description.clone()),
|
||||
default_reasoning_level: Some(preset.default_reasoning_effort),
|
||||
supported_reasoning_levels: preset.supported_reasoning_efforts.clone(),
|
||||
shell_type: ConfigShellToolType::ShellCommand,
|
||||
visibility: if preset.show_in_picker {
|
||||
ModelVisibility::List
|
||||
} else {
|
||||
ModelVisibility::Hide
|
||||
},
|
||||
supported_in_api: true,
|
||||
priority,
|
||||
upgrade: preset.upgrade.as_ref().map(|u| u.into()),
|
||||
base_instructions: "base instructions".to_string(),
|
||||
model_instructions_template: None,
|
||||
supports_reasoning_summaries: false,
|
||||
support_verbosity: false,
|
||||
default_verbosity: None,
|
||||
apply_patch_tool_type: None,
|
||||
truncation_policy: TruncationPolicyConfig::bytes(10_000),
|
||||
supports_parallel_tool_calls: false,
|
||||
context_window: Some(272_000),
|
||||
auto_compact_token_limit: None,
|
||||
effective_context_window_percent: 95,
|
||||
experimental_supported_tools: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a models_cache.json file to the codex home directory.
|
||||
/// This prevents ModelsManager from making network requests to refresh models.
|
||||
/// The cache will be treated as fresh (within TTL) and used instead of fetching from the network.
|
||||
/// Uses the built-in model presets from ModelsManager, converted to ModelInfo format.
|
||||
pub fn write_models_cache(codex_home: &Path) -> std::io::Result<()> {
|
||||
// Get all presets and filter for show_in_picker (same as builtin_model_presets does)
|
||||
let presets: Vec<&ModelPreset> = all_model_presets()
|
||||
.iter()
|
||||
.filter(|preset| preset.show_in_picker)
|
||||
.collect();
|
||||
// Convert presets to ModelInfo, assigning priorities (lower = earlier in list).
|
||||
// Priority is used for sorting, so the first model gets the lowest priority.
|
||||
let models: Vec<ModelInfo> = presets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, preset)| {
|
||||
// Lower priority = earlier in list.
|
||||
let priority = idx as i32;
|
||||
preset_to_info(preset, priority)
|
||||
})
|
||||
.collect();
|
||||
|
||||
write_models_cache_with_models(codex_home, models)
|
||||
}
|
||||
|
||||
/// Write a models_cache.json file with specific models.
|
||||
/// Useful when tests need specific models to be available.
|
||||
pub fn write_models_cache_with_models(
|
||||
codex_home: &Path,
|
||||
models: Vec<ModelInfo>,
|
||||
) -> std::io::Result<()> {
|
||||
let cache_path = codex_home.join("models_cache.json");
|
||||
// DateTime<Utc> serializes to RFC3339 format by default with serde
|
||||
let fetched_at: DateTime<Utc> = Utc::now();
|
||||
let cache = json!({
|
||||
"fetched_at": fetched_at,
|
||||
"etag": null,
|
||||
"models": models
|
||||
});
|
||||
std::fs::write(cache_path, serde_json::to_string_pretty(&cache)?)
|
||||
}
|
||||
@@ -1,86 +1,95 @@
|
||||
use core_test_support::responses;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn create_shell_command_sse_response(
|
||||
pub fn create_shell_sse_response(
|
||||
command: Vec<String>,
|
||||
workdir: Option<&Path>,
|
||||
timeout_ms: Option<u64>,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
// The `arguments` for the `shell_command` tool is a serialized JSON object.
|
||||
let command_str = shlex::try_join(command.iter().map(String::as_str))?;
|
||||
// The `arguments`` for the `shell` tool is a serialized JSON object.
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"command": command_str,
|
||||
"command": command,
|
||||
"workdir": workdir.map(|w| w.to_string_lossy()),
|
||||
"timeout_ms": timeout_ms
|
||||
"timeout": timeout_ms
|
||||
}))?;
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "shell_command", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_final_assistant_message_sse_response(message: &str) -> anyhow::Result<String> {
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", message),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
let assistant_message = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": message
|
||||
},
|
||||
"finish_reason": "stop"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&assistant_message)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
pub fn create_apply_patch_sse_response(
|
||||
patch_content: &str,
|
||||
call_id: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_apply_patch_shell_command_call_via_heredoc(call_id, patch_content),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_exec_command_sse_response(call_id: &str) -> anyhow::Result<String> {
|
||||
let (cmd, args) = if cfg!(windows) {
|
||||
("cmd.exe", vec!["/d", "/c", "echo hi"])
|
||||
} else {
|
||||
("/bin/sh", vec!["-c", "echo hi"])
|
||||
};
|
||||
let command = std::iter::once(cmd.to_string())
|
||||
.chain(args.into_iter().map(str::to_string))
|
||||
.collect::<Vec<_>>();
|
||||
// Use shell command to call apply_patch with heredoc format
|
||||
let shell_command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF");
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"cmd": command.join(" "),
|
||||
"yield_time_ms": 500
|
||||
}))?;
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "exec_command", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
}
|
||||
|
||||
pub fn create_request_user_input_sse_response(call_id: &str) -> anyhow::Result<String> {
|
||||
let tool_call_arguments = serde_json::to_string(&json!({
|
||||
"questions": [{
|
||||
"id": "confirm_path",
|
||||
"header": "Confirm",
|
||||
"question": "Proceed with the plan?",
|
||||
"isOther": false,
|
||||
"options": [{
|
||||
"label": "Yes (Recommended)",
|
||||
"description": "Continue the current plan."
|
||||
}, {
|
||||
"label": "No",
|
||||
"description": "Stop and revisit the approach."
|
||||
}]
|
||||
}]
|
||||
"command": ["bash", "-lc", shell_command]
|
||||
}))?;
|
||||
|
||||
Ok(responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_function_call(call_id, "request_user_input", &tool_call_arguments),
|
||||
responses::ev_completed("resp-1"),
|
||||
]))
|
||||
let tool_call = json!({
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": call_id,
|
||||
"function": {
|
||||
"name": "shell",
|
||||
"arguments": tool_call_arguments
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": "tool_calls"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let sse = format!(
|
||||
"data: {}\n\ndata: DONE\n\n",
|
||||
serde_json::to_string(&tool_call)?
|
||||
);
|
||||
Ok(sse)
|
||||
}
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
use anyhow::Result;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::GitInfo;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::fs::FileTimes;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub fn rollout_path(codex_home: &Path, filename_ts: &str, thread_id: &str) -> PathBuf {
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
codex_home
|
||||
.join("sessions")
|
||||
.join(year)
|
||||
.join(month)
|
||||
.join(day)
|
||||
.join(format!("rollout-{filename_ts}-{thread_id}.jsonl"))
|
||||
}
|
||||
|
||||
/// Create a minimal rollout file under `CODEX_HOME/sessions/YYYY/MM/DD/`.
|
||||
///
|
||||
/// - `filename_ts` is the filename timestamp component in `YYYY-MM-DDThh-mm-ss` format.
|
||||
@@ -37,54 +22,30 @@ pub fn create_fake_rollout(
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
) -> Result<String> {
|
||||
create_fake_rollout_with_source(
|
||||
codex_home,
|
||||
filename_ts,
|
||||
meta_rfc3339,
|
||||
preview,
|
||||
model_provider,
|
||||
git_info,
|
||||
SessionSource::Cli,
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a minimal rollout file with an explicit session source.
|
||||
pub fn create_fake_rollout_with_source(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
source: SessionSource,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ThreadId::from_string(&uuid_str)?;
|
||||
let conversation_id = ConversationId::from_string(&uuid_str)?;
|
||||
|
||||
let file_path = rollout_path(codex_home, filename_ts, &uuid_str);
|
||||
let dir = file_path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow::anyhow!("missing rollout parent directory"))?;
|
||||
fs::create_dir_all(dir)?;
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
|
||||
// Build JSONL lines
|
||||
let meta = SessionMeta {
|
||||
let payload = serde_json::to_value(SessionMeta {
|
||||
id: conversation_id,
|
||||
forked_from_id: None,
|
||||
timestamp: meta_rfc3339.to_string(),
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
source,
|
||||
instructions: None,
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
base_instructions: None,
|
||||
};
|
||||
let payload = serde_json::to_value(SessionMetaLine {
|
||||
meta,
|
||||
git: git_info,
|
||||
})?;
|
||||
|
||||
let lines = [
|
||||
@@ -116,85 +77,6 @@ pub fn create_fake_rollout_with_source(
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(&file_path, lines.join("\n") + "\n")?;
|
||||
let parsed = chrono::DateTime::parse_from_rfc3339(meta_rfc3339)?.with_timezone(&chrono::Utc);
|
||||
let times = FileTimes::new().set_modified(parsed.into());
|
||||
std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.open(&file_path)?
|
||||
.set_times(times)?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
|
||||
pub fn create_fake_rollout_with_text_elements(
|
||||
codex_home: &Path,
|
||||
filename_ts: &str,
|
||||
meta_rfc3339: &str,
|
||||
preview: &str,
|
||||
text_elements: Vec<serde_json::Value>,
|
||||
model_provider: Option<&str>,
|
||||
git_info: Option<GitInfo>,
|
||||
) -> Result<String> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let uuid_str = uuid.to_string();
|
||||
let conversation_id = ThreadId::from_string(&uuid_str)?;
|
||||
|
||||
// sessions/YYYY/MM/DD derived from filename_ts (YYYY-MM-DDThh-mm-ss)
|
||||
let year = &filename_ts[0..4];
|
||||
let month = &filename_ts[5..7];
|
||||
let day = &filename_ts[8..10];
|
||||
let dir = codex_home.join("sessions").join(year).join(month).join(day);
|
||||
fs::create_dir_all(&dir)?;
|
||||
|
||||
let file_path = dir.join(format!("rollout-{filename_ts}-{uuid}.jsonl"));
|
||||
|
||||
// Build JSONL lines
|
||||
let meta = SessionMeta {
|
||||
id: conversation_id,
|
||||
forked_from_id: None,
|
||||
timestamp: meta_rfc3339.to_string(),
|
||||
cwd: PathBuf::from("/"),
|
||||
originator: "codex".to_string(),
|
||||
cli_version: "0.0.0".to_string(),
|
||||
source: SessionSource::Cli,
|
||||
model_provider: model_provider.map(str::to_string),
|
||||
base_instructions: None,
|
||||
};
|
||||
let payload = serde_json::to_value(SessionMetaLine {
|
||||
meta,
|
||||
git: git_info,
|
||||
})?;
|
||||
|
||||
let lines = [
|
||||
json!( {
|
||||
"timestamp": meta_rfc3339,
|
||||
"type": "session_meta",
|
||||
"payload": payload
|
||||
})
|
||||
.to_string(),
|
||||
json!( {
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"response_item",
|
||||
"payload": {
|
||||
"type":"message",
|
||||
"role":"user",
|
||||
"content":[{"type":"input_text","text": preview}]
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
json!( {
|
||||
"timestamp": meta_rfc3339,
|
||||
"type":"event_msg",
|
||||
"payload": {
|
||||
"type":"user_message",
|
||||
"message": preview,
|
||||
"text_elements": text_elements,
|
||||
"local_images": []
|
||||
}
|
||||
})
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
fs::write(file_path, lines.join("\n") + "\n")?;
|
||||
Ok(uuid_str)
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "http://127.0.0.1:0/v1"
|
||||
wire_api = "responses"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
{requires_line}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_responses_server_sequence;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::format_with_current_shell;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
@@ -23,10 +22,10 @@ use codex_app_server_protocol::SendUserTurnResponse;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol_config_types::ReasoningEffort;
|
||||
use codex_core::protocol_config_types::ReasoningSummary;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use codex_protocol::protocol::Event;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
@@ -57,7 +56,7 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
// Create a mock model server that immediately ends each turn.
|
||||
// Two turns are expected: initial session configure + one user message.
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
create_shell_sse_response(
|
||||
vec!["ls".to_string()],
|
||||
Some(&working_directory),
|
||||
Some(5000),
|
||||
@@ -65,7 +64,7 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("Enjoy your new git repo!")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
@@ -108,17 +107,12 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
let AddConversationSubscriptionResponse { subscription_id } =
|
||||
to_response::<AddConversationSubscriptionResponse>(add_listener_resp)?;
|
||||
|
||||
// Drop any buffered events from conversation setup to avoid
|
||||
// matching an earlier task_complete.
|
||||
mcp.clear_message_buffer();
|
||||
|
||||
// 3) sendUserMessage (should trigger notifications; we only validate an OK response)
|
||||
let send_user_id = mcp
|
||||
.send_send_user_message_request(SendUserMessageParams {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "text".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -129,38 +123,13 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
.await??;
|
||||
let SendUserMessageResponse {} = to_response::<SendUserMessageResponse>(send_user_resp)?;
|
||||
|
||||
let task_started_notification: JSONRPCNotification = timeout(
|
||||
// Verify the task_finished notification is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
let task_finished_notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_started"),
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
let task_started_event: Event = serde_json::from_value(
|
||||
task_started_notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("task_started should have params"),
|
||||
)
|
||||
.expect("task_started should deserialize to Event");
|
||||
|
||||
// Verify the task_finished notification for this turn is received.
|
||||
// Note this also ensures that the final request to the server was made.
|
||||
let task_finished_notification: JSONRPCNotification = loop {
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
let event: Event = serde_json::from_value(
|
||||
notification
|
||||
.params
|
||||
.clone()
|
||||
.expect("task_complete should have params"),
|
||||
)
|
||||
.expect("task_complete should deserialize to Event");
|
||||
if event.id == task_started_event.id {
|
||||
break notification;
|
||||
}
|
||||
};
|
||||
let serde_json::Value::Object(map) = task_finished_notification
|
||||
.params
|
||||
.expect("notification should have params")
|
||||
@@ -175,7 +144,9 @@ async fn test_codex_jsonrpc_conversation_flow() -> Result<()> {
|
||||
|
||||
// 4) removeConversationListener
|
||||
let remove_listener_id = mcp
|
||||
.send_remove_thread_listener_request(RemoveConversationListenerParams { subscription_id })
|
||||
.send_remove_conversation_listener_request(RemoveConversationListenerParams {
|
||||
subscription_id,
|
||||
})
|
||||
.await?;
|
||||
let remove_listener_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
@@ -204,7 +175,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
|
||||
// Mock server will request a python shell call for the first and second turn, then finish.
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
@@ -215,7 +186,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
"call1",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 1")?,
|
||||
create_shell_command_sse_response(
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
@@ -227,7 +198,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done 2")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
// Start MCP server and initialize.
|
||||
@@ -271,7 +242,6 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run python".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -297,9 +267,14 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
ExecCommandApprovalParams {
|
||||
conversation_id,
|
||||
call_id: "call1".to_string(),
|
||||
command: format_with_current_shell("python3 -c 'print(42)'"),
|
||||
command: vec![
|
||||
"python3".to_string(),
|
||||
"-c".to_string(),
|
||||
"print(42)".to_string(),
|
||||
],
|
||||
cwd: working_directory.clone(),
|
||||
reason: None,
|
||||
risk: None,
|
||||
parsed_cmd: vec![ParsedCommand::Unknown {
|
||||
cmd: "python3 -c 'print(42)'".to_string()
|
||||
}],
|
||||
@@ -314,7 +289,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Wait for first TurnComplete
|
||||
// Wait for first TaskComplete
|
||||
let _ = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
@@ -327,7 +302,6 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run python again".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: working_directory.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
@@ -335,7 +309,6 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
model: "mock-model".to_string(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
// Acknowledge sendUserTurn
|
||||
@@ -380,22 +353,30 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
std::fs::create_dir(&second_cwd)?;
|
||||
|
||||
let responses = vec![
|
||||
create_shell_command_sse_response(
|
||||
vec!["echo".to_string(), "first".to_string(), "turn".to_string()],
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo first turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-first",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done first")?,
|
||||
create_shell_command_sse_response(
|
||||
vec!["echo".to_string(), "second".to_string(), "turn".to_string()],
|
||||
create_shell_sse_response(
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string(),
|
||||
],
|
||||
None,
|
||||
Some(5000),
|
||||
"call-second",
|
||||
)?,
|
||||
create_final_assistant_message_sse_response("done second")?,
|
||||
];
|
||||
let server = create_mock_responses_server_sequence(responses).await;
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
create_config_toml(&codex_home, &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(&codex_home).await?;
|
||||
@@ -437,12 +418,11 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "first turn".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: first_cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::WorkspaceWrite {
|
||||
writable_roots: vec![first_cwd.try_into()?],
|
||||
writable_roots: vec![first_cwd.clone()],
|
||||
network_access: false,
|
||||
exclude_tmpdir_env_var: false,
|
||||
exclude_slash_tmp: false,
|
||||
@@ -450,7 +430,6 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
model: model.clone(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
@@ -463,14 +442,12 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
)
|
||||
.await??;
|
||||
mcp.clear_message_buffer();
|
||||
|
||||
let second_turn_id = mcp
|
||||
.send_send_user_turn_request(SendUserTurnParams {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "second turn".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: second_cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
@@ -478,7 +455,6 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
model: model.clone(),
|
||||
effort: Some(ReasoningEffort::Medium),
|
||||
summary: ReasoningSummary::Auto,
|
||||
output_schema: None,
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
@@ -505,9 +481,13 @@ async fn test_send_user_turn_updates_sandbox_and_cwd_between_turns() -> Result<(
|
||||
exec_begin.cwd, second_cwd,
|
||||
"exec turn should run from updated cwd"
|
||||
);
|
||||
let expected_command = format_with_current_shell("echo second turn");
|
||||
assert_eq!(
|
||||
exec_begin.command, expected_command,
|
||||
exec_begin.command,
|
||||
vec![
|
||||
"bash".to_string(),
|
||||
"-lc".to_string(),
|
||||
"echo second turn".to_string()
|
||||
],
|
||||
"exec turn should run expected command"
|
||||
);
|
||||
|
||||
@@ -534,7 +514,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::test_tmp_path;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserSavedConfigResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
@@ -11,10 +10,10 @@ use codex_app_server_protocol::Tools;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningEffort;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::config_types::Verbosity;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
@@ -24,13 +23,11 @@ use tokio::time::timeout;
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
fn create_config_toml(codex_home: &Path) -> std::io::Result<()> {
|
||||
let writable_root = test_tmp_path();
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "gpt-5.1-codex-max"
|
||||
r#"
|
||||
model = "gpt-5-codex"
|
||||
approval_policy = "on-request"
|
||||
sandbox_mode = "workspace-write"
|
||||
model_reasoning_summary = "detailed"
|
||||
@@ -41,7 +38,7 @@ forced_chatgpt_workspace_id = "12345678-0000-0000-0000-000000000000"
|
||||
forced_login_method = "chatgpt"
|
||||
|
||||
[sandbox_workspace_write]
|
||||
writable_roots = [{}]
|
||||
writable_roots = ["/tmp"]
|
||||
network_access = true
|
||||
exclude_tmpdir_env_var = true
|
||||
exclude_slash_tmp = true
|
||||
@@ -59,8 +56,6 @@ model_verbosity = "medium"
|
||||
model_provider = "openai"
|
||||
chatgpt_base_url = "https://api.chatgpt.com"
|
||||
"#,
|
||||
serde_json::json!(writable_root)
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -80,20 +75,19 @@ async fn get_config_toml_parses_all_fields() -> Result<()> {
|
||||
.await??;
|
||||
|
||||
let config: GetUserSavedConfigResponse = to_response(resp)?;
|
||||
let writable_root = test_tmp_path();
|
||||
let expected = GetUserSavedConfigResponse {
|
||||
config: UserSavedConfig {
|
||||
approval_policy: Some(AskForApproval::OnRequest),
|
||||
sandbox_mode: Some(SandboxMode::WorkspaceWrite),
|
||||
sandbox_settings: Some(SandboxSettings {
|
||||
writable_roots: vec![writable_root],
|
||||
writable_roots: vec!["/tmp".into()],
|
||||
network_access: Some(true),
|
||||
exclude_tmpdir_env_var: Some(true),
|
||||
exclude_slash_tmp: Some(true),
|
||||
}),
|
||||
forced_chatgpt_workspace_id: Some("12345678-0000-0000-0000-000000000000".into()),
|
||||
forced_login_method: Some(ForcedLoginMethod::Chatgpt),
|
||||
model: Some("gpt-5.1-codex-max".into()),
|
||||
model: Some("gpt-5-codex".into()),
|
||||
model_reasoning_effort: Some(ReasoningEffort::High),
|
||||
model_reasoning_summary: Some(ReasoningSummary::Detailed),
|
||||
model_verbosity: Some(Verbosity::Medium),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_final_assistant_message_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::AddConversationListenerParams;
|
||||
use codex_app_server_protocol::AddConversationSubscriptionResponse;
|
||||
@@ -11,7 +12,6 @@ use codex_app_server_protocol::NewConversationResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use core_test_support::responses;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use std::path::Path;
|
||||
@@ -23,9 +23,8 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
// Mock server – we won't strictly rely on it, but provide one to satisfy any model wiring.
|
||||
let response_body = create_final_assistant_message_sse_response("Done")?;
|
||||
let server = responses::start_mock_server().await;
|
||||
let response_mock = responses::mount_sse_sequence(&server, vec![response_body]).await;
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_chat_completions_server(responses).await;
|
||||
|
||||
// Temporary Codex home with config pointing at the mock server.
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -77,7 +76,6 @@ async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
conversation_id,
|
||||
items: vec![InputItem::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -88,30 +86,32 @@ async fn test_conversation_create_and_send_message_ok() -> Result<()> {
|
||||
.await??;
|
||||
let _ok: SendUserMessageResponse = to_response::<SendUserMessageResponse>(send_resp)?;
|
||||
|
||||
// Avoid race condition by waiting for the mock server to receive the responses request.
|
||||
// avoid race condition by waiting for the mock server to receive the chat.completions request
|
||||
let deadline = std::time::Instant::now() + DEFAULT_READ_TIMEOUT;
|
||||
let requests = loop {
|
||||
let requests = response_mock.requests();
|
||||
let requests = server.received_requests().await.unwrap_or_default();
|
||||
if !requests.is_empty() {
|
||||
break requests;
|
||||
}
|
||||
if std::time::Instant::now() >= deadline {
|
||||
panic!("mock server did not receive the responses request in time");
|
||||
panic!("mock server did not receive the chat.completions request in time");
|
||||
}
|
||||
tokio::time::sleep(std::time::Duration::from_millis(10)).await;
|
||||
};
|
||||
|
||||
// Verify the outbound request body matches expectations for Responses.
|
||||
// Verify the outbound request body matches expectations for Chat Completions.
|
||||
let request = requests
|
||||
.first()
|
||||
.expect("mock server should have received at least one request");
|
||||
let body = request.body_json();
|
||||
let body = request.body_json::<serde_json::Value>()?;
|
||||
assert_eq!(body["model"], json!("o3"));
|
||||
let user_texts = request.message_input_texts("user");
|
||||
assert!(
|
||||
user_texts.iter().any(|text| text == "Hello"),
|
||||
"expected user input to include Hello, got {user_texts:?}"
|
||||
);
|
||||
assert!(body["stream"].as_bool().unwrap_or(false));
|
||||
let messages = body["messages"]
|
||||
.as_array()
|
||||
.expect("messages should be array");
|
||||
let last = messages.last().expect("at least one message");
|
||||
assert_eq!(last["role"], json!("user"));
|
||||
assert_eq!(last["content"], json!("Hello"));
|
||||
|
||||
drop(server);
|
||||
Ok(())
|
||||
@@ -133,7 +133,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
@@ -1,140 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_fake_rollout;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ForkConversationParams;
|
||||
use codex_app_server_protocol::ForkConversationResponse;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::NewConversationParams; // reused for overrides shape
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::SessionConfiguredNotification;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn fork_conversation_creates_new_rollout() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
let preview = "Hello A";
|
||||
let conversation_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T12-00-00",
|
||||
"2025-01-02T12:00:00Z",
|
||||
preview,
|
||||
Some("openai"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let original_path = codex_home
|
||||
.path()
|
||||
.join("sessions")
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("02")
|
||||
.join(format!(
|
||||
"rollout-2025-01-02T12-00-00-{conversation_id}.jsonl"
|
||||
));
|
||||
assert!(
|
||||
original_path.exists(),
|
||||
"expected original rollout to exist at {}",
|
||||
original_path.display()
|
||||
);
|
||||
let original_contents = std::fs::read_to_string(&original_path)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let fork_req_id = mcp
|
||||
.send_fork_conversation_request(ForkConversationParams {
|
||||
path: Some(original_path.clone()),
|
||||
conversation_id: None,
|
||||
overrides: Some(NewConversationParams {
|
||||
model: Some("o3".to_string()),
|
||||
..Default::default()
|
||||
}),
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Expect a sessionConfigured notification for the forked session.
|
||||
let notification: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("sessionConfigured"),
|
||||
)
|
||||
.await??;
|
||||
let session_configured: ServerNotification = notification.try_into()?;
|
||||
let ServerNotification::SessionConfigured(SessionConfiguredNotification {
|
||||
model,
|
||||
session_id,
|
||||
rollout_path,
|
||||
initial_messages: session_initial_messages,
|
||||
..
|
||||
}) = session_configured
|
||||
else {
|
||||
unreachable!("expected sessionConfigured notification");
|
||||
};
|
||||
|
||||
assert_eq!(model, "o3");
|
||||
assert_ne!(
|
||||
session_id.to_string(),
|
||||
conversation_id,
|
||||
"expected a new conversation id when forking"
|
||||
);
|
||||
assert_ne!(
|
||||
rollout_path, original_path,
|
||||
"expected a new rollout path when forking"
|
||||
);
|
||||
assert!(
|
||||
rollout_path.exists(),
|
||||
"expected forked rollout to exist at {}",
|
||||
rollout_path.display()
|
||||
);
|
||||
|
||||
let session_initial_messages =
|
||||
session_initial_messages.expect("expected initial messages when forking from rollout");
|
||||
match session_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, preview);
|
||||
}
|
||||
other => panic!("unexpected initial messages from rollout fork: {other:#?}"),
|
||||
}
|
||||
|
||||
// Then the response for forkConversation.
|
||||
let fork_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(fork_req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ForkConversationResponse {
|
||||
conversation_id: forked_id,
|
||||
model: forked_model,
|
||||
initial_messages: response_initial_messages,
|
||||
rollout_path: response_rollout_path,
|
||||
} = to_response::<ForkConversationResponse>(fork_resp)?;
|
||||
|
||||
assert_eq!(forked_model, "o3");
|
||||
assert_eq!(response_rollout_path, rollout_path);
|
||||
assert_ne!(forked_id.to_string(), conversation_id);
|
||||
|
||||
let response_initial_messages =
|
||||
response_initial_messages.expect("expected initial messages in fork response");
|
||||
match response_initial_messages.as_slice() {
|
||||
[EventMsg::UserMessage(message)] => {
|
||||
assert_eq!(message.message, preview);
|
||||
}
|
||||
other => panic!("unexpected initial messages in fork response: {other:#?}"),
|
||||
}
|
||||
|
||||
let after_contents = std::fs::read_to_string(&original_path)?;
|
||||
assert_eq!(
|
||||
after_contents, original_contents,
|
||||
"fork should not mutate the original rollout file"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -18,8 +18,8 @@ use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_responses_server_sequence;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::create_mock_chat_completions_server;
|
||||
use app_test_support::create_shell_sse_response;
|
||||
use app_test_support::to_response;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
@@ -56,7 +56,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
std::fs::create_dir(&working_directory)?;
|
||||
|
||||
// Create mock server with a single SSE response: the long sleep command
|
||||
let server = create_mock_responses_server_sequence(vec![create_shell_command_sse_response(
|
||||
let server = create_mock_chat_completions_server(vec![create_shell_sse_response(
|
||||
shell_command.clone(),
|
||||
Some(&working_directory),
|
||||
Some(10_000), // 10 seconds timeout in ms
|
||||
@@ -105,7 +105,6 @@ async fn shell_command_interruption() -> anyhow::Result<()> {
|
||||
conversation_id,
|
||||
items: vec![codex_app_server_protocol::InputItem::Text {
|
||||
text: "run first sleep command".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
@@ -154,7 +153,7 @@ model_provider = "mock_provider"
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
wire_api = "chat"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
|
||||
@@ -6,7 +6,7 @@ use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::ListConversationsParams;
|
||||
use codex_app_server_protocol::ListConversationsResponse;
|
||||
use codex_app_server_protocol::NewConversationParams;
|
||||
use codex_app_server_protocol::NewConversationParams; // reused for overrides shape
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ResumeConversationResponse;
|
||||
@@ -31,7 +31,6 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
"2025-01-02T12:00:00Z",
|
||||
"Hello A",
|
||||
Some("openai"),
|
||||
None,
|
||||
)?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
@@ -39,7 +38,6 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
"2025-01-01T13:00:00Z",
|
||||
"Hello B",
|
||||
Some("openai"),
|
||||
None,
|
||||
)?;
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
@@ -47,7 +45,6 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
"2025-01-01T12:00:00Z",
|
||||
"Hello C",
|
||||
None,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
@@ -108,7 +105,6 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
"2025-01-01T11:30:00Z",
|
||||
"Hello TP",
|
||||
Some("test-provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
// Filtering by model provider should return only matching sessions.
|
||||
@@ -307,7 +303,6 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
content: vec![ContentItem::InputText {
|
||||
text: fork_history_text.to_string(),
|
||||
}],
|
||||
end_turn: None,
|
||||
}];
|
||||
let resume_with_history_req_id = mcp
|
||||
.send_resume_conversation_request(ResumeConversationParams {
|
||||
@@ -359,81 +354,3 @@ async fn test_list_and_resume_conversations() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn list_conversations_fetches_through_filtered_pages() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
|
||||
// Only the last 3 conversations match the provider filter; request 3 and
|
||||
// ensure pagination keeps fetching past non-matching pages.
|
||||
let cases = [
|
||||
(
|
||||
"2025-03-04T12-00-00",
|
||||
"2025-03-04T12:00:00Z",
|
||||
"skip_provider",
|
||||
),
|
||||
(
|
||||
"2025-03-03T12-00-00",
|
||||
"2025-03-03T12:00:00Z",
|
||||
"skip_provider",
|
||||
),
|
||||
(
|
||||
"2025-03-02T12-00-00",
|
||||
"2025-03-02T12:00:00Z",
|
||||
"target_provider",
|
||||
),
|
||||
(
|
||||
"2025-03-01T12-00-00",
|
||||
"2025-03-01T12:00:00Z",
|
||||
"target_provider",
|
||||
),
|
||||
(
|
||||
"2025-02-28T12-00-00",
|
||||
"2025-02-28T12:00:00Z",
|
||||
"target_provider",
|
||||
),
|
||||
];
|
||||
|
||||
for (ts_file, ts_rfc, provider) in cases {
|
||||
create_fake_rollout(
|
||||
codex_home.path(),
|
||||
ts_file,
|
||||
ts_rfc,
|
||||
"Hello",
|
||||
Some(provider),
|
||||
None,
|
||||
)?;
|
||||
}
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let req_id = mcp
|
||||
.send_list_conversations_request(ListConversationsParams {
|
||||
page_size: Some(3),
|
||||
cursor: None,
|
||||
model_providers: Some(vec!["target_provider".to_string()]),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
|
||||
)
|
||||
.await??;
|
||||
let ListConversationsResponse { items, next_cursor } =
|
||||
to_response::<ListConversationsResponse>(resp)?;
|
||||
|
||||
assert_eq!(
|
||||
items.len(),
|
||||
3,
|
||||
"should fetch across pages to satisfy the limit"
|
||||
);
|
||||
assert!(
|
||||
items
|
||||
.iter()
|
||||
.all(|item| item.model_provider == "target_provider")
|
||||
);
|
||||
assert_eq!(next_cursor, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user