mirror of
https://github.com/openai/codex.git
synced 2026-02-02 23:13:37 +00:00
Compare commits
15 Commits
latest-alp
...
tag
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
124c7fc2be | ||
|
|
0a1323747b | ||
|
|
348d379509 | ||
|
|
6912ba9fda | ||
|
|
27cec53ddc | ||
|
|
42273d94e8 | ||
|
|
1a5289a4ef | ||
|
|
359142f22f | ||
|
|
ecff4d4f72 | ||
|
|
985333feff | ||
|
|
e01610f762 | ||
|
|
09693d259b | ||
|
|
f8ba48d995 | ||
|
|
677532f97b | ||
|
|
beb83225e5 |
@@ -1,4 +0,0 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
codex-rs/target
|
||||
51
.bazelrc
51
.bazelrc
@@ -1,51 +0,0 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
# Dummy xcode config so we don't need to build xcode_locator in repo rule.
|
||||
common --xcode_version_config=//:disable_xcode
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
common --remote_cache_compression
|
||||
startup --experimental_remote_repo_contents_cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
# Runfiles strategy rationale: codex-rs/utils/cargo-bin/README.md
|
||||
common --noenable_runfiles
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
common --@rules_cc//cc/toolchains/args/archiver_flags:use_libtool_on_macos=False
|
||||
common --@toolchains_llvm_bootstrapped//config:experimental_stub_libgcc_s
|
||||
|
||||
# We need to use the sh toolchain on windows so we don't send host bash paths to the linux executor.
|
||||
common:windows --@rules_rust//rust/settings:experimental_use_sh_toolchain_for_bootstrap_process_wrapper
|
||||
|
||||
# TODO(zbarsky): rules_rust doesn't implement this flag properly with remote exec...
|
||||
# common --@rules_rust//rust/settings:pipelined_compilation
|
||||
|
||||
common --incompatible_strict_action_env
|
||||
# Not ideal, but We need to allow dotslash to be found
|
||||
common --test_env=PATH=/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
|
||||
common --test_output=errors
|
||||
common --bes_results_url=https://app.buildbuddy.io/invocation/
|
||||
common --bes_backend=grpcs://remote.buildbuddy.io
|
||||
common --remote_cache=grpcs://remote.buildbuddy.io
|
||||
common --remote_download_toplevel
|
||||
common --nobuild_runfile_links
|
||||
common --remote_timeout=3600
|
||||
common --noexperimental_throttle_remote_action_building
|
||||
common --experimental_remote_execution_keepalive
|
||||
common --grpc_keepalive_time=30s
|
||||
|
||||
# This limits both in-flight executions and concurrent downloads. Even with high number
|
||||
# of jobs execution will still be limited by CPU cores, so this just pays a bit of
|
||||
# memory in exchange for higher download concurrency.
|
||||
common --jobs=30
|
||||
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
@@ -1 +0,0 @@
|
||||
9.0.0
|
||||
@@ -1,6 +1,6 @@
|
||||
[codespell]
|
||||
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
9
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -40,18 +40,11 @@ body:
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: input
|
||||
id: terminal
|
||||
attributes:
|
||||
label: What terminal emulator and version are you using (if applicable)?
|
||||
description: Also note any multiplexer in use (screen / tmux / zellij)
|
||||
description: |
|
||||
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: What issue are you seeing?
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
description: Please include the full error messages and prompts with PII redacted. If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
BIN
.github/codex-cli-login.png
vendored
Normal file
BIN
.github/codex-cli-login.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.9 MiB |
BIN
.github/codex-cli-permissions.png
vendored
Normal file
BIN
.github/codex-cli-permissions.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 408 KiB |
BIN
.github/codex-cli-splash.png
vendored
BIN
.github/codex-cli-splash.png
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 818 KiB After Width: | Height: | Size: 3.1 MiB |
4
.github/codex/labels/codex-rust-review.md
vendored
4
.github/codex/labels/codex-rust-review.md
vendored
@@ -15,10 +15,10 @@ Things to look out for when doing the review:
|
||||
|
||||
## Code Organization
|
||||
|
||||
- Each crate in the Cargo workspace in `codex-rs` has a specific purpose: make a note if you believe new code is not introduced in the correct crate.
|
||||
- Each create in the Cargo workspace in `codex-rs` has a specific purpose: make a note if you believe new code is not introduced in the correct crate.
|
||||
- When possible, try to keep the `core` crate as small as possible. Non-core but shared logic is often a good candidate for `codex-rs/common`.
|
||||
- Be wary of large files and offer suggestions for how to break things into more reasonably-sized files.
|
||||
- Rust files should generally be organized such that the public parts of the API appear near the top of the file and helper functions go below. This is analogous to the "inverted pyramid" structure that is favored in journalism.
|
||||
- Rust files should generally be organized such that the public parts of the API appear near the top of the file and helper functions go below. This is analagous to the "inverted pyramid" structure that is favored in journalism.
|
||||
|
||||
## Assertions in Tests
|
||||
|
||||
|
||||
BIN
.github/demo.gif
vendored
Normal file
BIN
.github/demo.gif
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 MiB |
163
.github/scripts/install-musl-build-tools.sh
vendored
163
.github/scripts/install-musl-build-tools.sh
vendored
@@ -1,163 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
: "${TARGET:?TARGET environment variable is required}"
|
||||
: "${GITHUB_ENV:?GITHUB_ENV environment variable is required}"
|
||||
|
||||
apt_update_args=()
|
||||
if [[ -n "${APT_UPDATE_ARGS:-}" ]]; then
|
||||
# shellcheck disable=SC2206
|
||||
apt_update_args=(${APT_UPDATE_ARGS})
|
||||
fi
|
||||
|
||||
apt_install_args=()
|
||||
if [[ -n "${APT_INSTALL_ARGS:-}" ]]; then
|
||||
# shellcheck disable=SC2206
|
||||
apt_install_args=(${APT_INSTALL_ARGS})
|
||||
fi
|
||||
|
||||
sudo apt-get update "${apt_update_args[@]}"
|
||||
sudo apt-get install -y "${apt_install_args[@]}" musl-tools pkg-config g++ clang libc++-dev libc++abi-dev lld
|
||||
|
||||
case "${TARGET}" in
|
||||
x86_64-unknown-linux-musl)
|
||||
arch="x86_64"
|
||||
;;
|
||||
aarch64-unknown-linux-musl)
|
||||
arch="aarch64"
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected musl target: ${TARGET}" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Use the musl toolchain as the Rust linker to avoid Zig injecting its own CRT.
|
||||
if command -v "${arch}-linux-musl-gcc" >/dev/null; then
|
||||
musl_linker="$(command -v "${arch}-linux-musl-gcc")"
|
||||
elif command -v musl-gcc >/dev/null; then
|
||||
musl_linker="$(command -v musl-gcc)"
|
||||
else
|
||||
echo "musl gcc not found after install; arch=${arch}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
zig_target="${TARGET/-unknown-linux-musl/-linux-musl}"
|
||||
runner_temp="${RUNNER_TEMP:-/tmp}"
|
||||
tool_root="${runner_temp}/codex-musl-tools-${TARGET}"
|
||||
mkdir -p "${tool_root}"
|
||||
|
||||
sysroot=""
|
||||
if command -v zig >/dev/null; then
|
||||
zig_bin="$(command -v zig)"
|
||||
cc="${tool_root}/zigcc"
|
||||
cxx="${tool_root}/zigcxx"
|
||||
|
||||
cat >"${cc}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
# Drop any explicit --target/-target flags. Zig expects -target and
|
||||
# rejects Rust triples like *-unknown-linux-musl.
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" cc -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
cat >"${cxx}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" c++ -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
chmod +x "${cc}" "${cxx}"
|
||||
|
||||
sysroot="$("${zig_bin}" cc -target "${zig_target}" -print-sysroot 2>/dev/null || true)"
|
||||
else
|
||||
cc="${musl_linker}"
|
||||
|
||||
if command -v "${arch}-linux-musl-g++" >/dev/null; then
|
||||
cxx="$(command -v "${arch}-linux-musl-g++")"
|
||||
elif command -v musl-g++ >/dev/null; then
|
||||
cxx="$(command -v musl-g++)"
|
||||
else
|
||||
cxx="${cc}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${sysroot}" && "${sysroot}" != "/" ]]; then
|
||||
echo "BORING_BSSL_SYSROOT=${sysroot}" >> "$GITHUB_ENV"
|
||||
boring_sysroot_var="BORING_BSSL_SYSROOT_${TARGET}"
|
||||
boring_sysroot_var="${boring_sysroot_var//-/_}"
|
||||
echo "${boring_sysroot_var}=${sysroot}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
cflags="-pthread"
|
||||
cxxflags="-pthread"
|
||||
if [[ "${TARGET}" == "aarch64-unknown-linux-musl" ]]; then
|
||||
# BoringSSL enables -Wframe-larger-than=25344 under clang and treats warnings as errors.
|
||||
cflags="${cflags} -Wno-error=frame-larger-than"
|
||||
cxxflags="${cxxflags} -Wno-error=frame-larger-than"
|
||||
fi
|
||||
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
echo "CC=${cc}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CC=${cc}" >> "$GITHUB_ENV"
|
||||
target_cc_var="CC_${TARGET}"
|
||||
target_cc_var="${target_cc_var//-/_}"
|
||||
echo "${target_cc_var}=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
target_cxx_var="CXX_${TARGET}"
|
||||
target_cxx_var="${target_cxx_var//-/_}"
|
||||
echo "${target_cxx_var}=${cxx}" >> "$GITHUB_ENV"
|
||||
|
||||
cargo_linker_var="CARGO_TARGET_${TARGET^^}_LINKER"
|
||||
cargo_linker_var="${cargo_linker_var//-/_}"
|
||||
echo "${cargo_linker_var}=${musl_linker}" >> "$GITHUB_ENV"
|
||||
|
||||
echo "CMAKE_C_COMPILER=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_CXX_COMPILER=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_ARGS=-DCMAKE_HAVE_THREADS_LIBRARY=1 -DCMAKE_USE_PTHREADS_INIT=1 -DCMAKE_THREAD_LIBS_INIT=-pthread -DTHREADS_PREFER_PTHREAD_FLAG=ON" >> "$GITHUB_ENV"
|
||||
20
.github/workflows/Dockerfile.bazel
vendored
20
.github/workflows/Dockerfile.bazel
vendored
@@ -1,20 +0,0 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
# TODO(mbolin): Published to docker.io/mbolin491/codex-bazel:latest for
|
||||
# initial debugging, but we should publish to a more proper location.
|
||||
#
|
||||
# docker buildx create --use
|
||||
# docker buildx build --platform linux/amd64,linux/arm64 -f .github/workflows/Dockerfile.bazel -t mbolin491/codex-bazel:latest --push .
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl git python3 ca-certificates && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dotslash.
|
||||
RUN curl -LSfs "https://github.com/facebook/dotslash/releases/download/v0.5.8/dotslash-ubuntu-22.04.$(uname -m).tar.gz" | tar fxz - -C /usr/local/bin
|
||||
|
||||
# Ubuntu 24.04 ships with user 'ubuntu' already created with UID 1000.
|
||||
USER ubuntu
|
||||
|
||||
WORKDIR /workspace
|
||||
110
.github/workflows/bazel.yml
vendored
110
.github/workflows/bazel.yml
vendored
@@ -1,110 +0,0 @@
|
||||
name: Bazel (experimental)
|
||||
|
||||
# Note this workflow was originally derived from:
|
||||
# https://github.com/cerisier/toolchains_llvm_bootstrapped/blob/main/.github/workflows/ci.yaml
|
||||
|
||||
on:
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Cancel previous actions from the same PR or branch except 'main' branch.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency and https://docs.github.com/en/actions/learn-github-actions/contexts for more info.
|
||||
group: concurrency-group::${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }}${{ github.ref_name == 'main' && format('::{0}', github.run_id) || ''}}
|
||||
cancel-in-progress: ${{ github.ref_name != 'main' }}
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# macOS
|
||||
- os: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
|
||||
# Linux
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
# TODO: Enable Windows once we fix the toolchain issues there.
|
||||
#- os: windows-latest
|
||||
# target: x86_64-pc-windows-gnullvm
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Configure a human readable name for each job
|
||||
name: Local Bazel build on ${{ matrix.os }} for ${{ matrix.target }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Make DotSlash available in PATH (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: cp "$(which dotslash)" /usr/local/bin
|
||||
|
||||
- name: Make DotSlash available in PATH (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: Copy-Item (Get-Command dotslash).Source -Destination "$env:LOCALAPPDATA\Microsoft\WindowsApps\dotslash.exe"
|
||||
|
||||
# Install Bazel via Bazelisk
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@v3
|
||||
|
||||
# TODO(mbolin): Bring this back once we have caching working. Currently,
|
||||
# we never seem to get a cache hit but we still end up paying the cost of
|
||||
# uploading at the end of the build, which takes over a minute!
|
||||
#
|
||||
# Cache build and external artifacts so that the next ci build is incremental.
|
||||
# Because github action caches cannot be updated after a build, we need to
|
||||
# store the contents of each build in a unique cache key, then fall back to loading
|
||||
# it on the next ci run. We use hashFiles(...) in the key and restore-keys- with
|
||||
# the prefix to load the most recent cache for the branch on a cache miss. You
|
||||
# should customize the contents of hashFiles to capture any bazel input sources,
|
||||
# although this doesn't need to be perfect. If none of the input sources change
|
||||
# then a cache hit will load an existing cache and bazel won't have to do any work.
|
||||
# In the case of a cache miss, you want the fallback cache to contain most of the
|
||||
# previously built artifacts to minimize build time. The more precise you are with
|
||||
# hashFiles sources the less work bazel will have to do.
|
||||
# - name: Mount bazel caches
|
||||
# uses: actions/cache@v5
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cache/bazel-repo-cache
|
||||
# ~/.cache/bazel-repo-contents-cache
|
||||
# key: bazel-cache-${{ matrix.os }}-${{ hashFiles('**/BUILD.bazel', '**/*.bzl', 'MODULE.bazel') }}
|
||||
# restore-keys: |
|
||||
# bazel-cache-${{ matrix.os }}
|
||||
|
||||
- name: Configure Bazel startup args (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Use a very short path to reduce argv/path length issues.
|
||||
"BAZEL_STARTUP_ARGS=--output_user_root=C:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: bazel test //...
|
||||
env:
|
||||
BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
bazel $BAZEL_STARTUP_ARGS --bazelrc=.github/workflows/ci.bazelrc test //... \
|
||||
--build_metadata=REPO_URL=https://github.com/openai/codex.git \
|
||||
--build_metadata=COMMIT_SHA=$(git rev-parse HEAD) \
|
||||
--build_metadata=ROLE=CI \
|
||||
--build_metadata=VISIBILITY=PUBLIC \
|
||||
"--remote_header=x-buildbuddy-api-key=$BUILDBUDDY_API_KEY"
|
||||
2
.github/workflows/cargo-deny.yml
vendored
2
.github/workflows/cargo-deny.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Run cargo-deny
|
||||
uses: EmbarkStudios/cargo-deny-action@v2
|
||||
uses: EmbarkStudios/cargo-deny-action@v1
|
||||
with:
|
||||
rust-version: stable
|
||||
manifest-path: ./codex-rs/Cargo.toml
|
||||
|
||||
20
.github/workflows/ci.bazelrc
vendored
20
.github/workflows/ci.bazelrc
vendored
@@ -1,20 +0,0 @@
|
||||
common --remote_download_minimal
|
||||
common --nobuild_runfile_links
|
||||
common --keep_going
|
||||
|
||||
# We prefer to run the build actions entirely remotely so we can dial up the concurrency.
|
||||
# We have platform-specific tests, so we want to execute the tests on all platforms using the strongest sandboxing available on each platform.
|
||||
|
||||
# On linux, we can do a full remote build/test, by targeting the right (x86/arm) runners, so we have coverage of both.
|
||||
# Linux crossbuilds don't work until we untangle the libc constraint mess.
|
||||
common:linux --config=remote
|
||||
common:linux --strategy=remote
|
||||
common:linux --platforms=//:rbe
|
||||
|
||||
# On mac, we can run all the build actions remotely but test actions locally.
|
||||
common:macos --config=remote
|
||||
common:macos --strategy=remote
|
||||
common:macos --strategy=TestRunner=darwin-sandbox,local
|
||||
|
||||
common:windows --strategy=TestRunner=local
|
||||
|
||||
@@ -12,8 +12,6 @@ permissions:
|
||||
|
||||
jobs:
|
||||
close-stale-contributor-prs:
|
||||
# Prevent scheduled runs on forks
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Close inactive PRs from contributors
|
||||
|
||||
3
.github/workflows/issue-deduplicator.yml
vendored
3
.github/workflows/issue-deduplicator.yml
vendored
@@ -9,8 +9,7 @@ on:
|
||||
jobs:
|
||||
gather-duplicates:
|
||||
name: Identify potential duplicates
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate'))
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
3
.github/workflows/issue-labeler.yml
vendored
3
.github/workflows/issue-labeler.yml
vendored
@@ -9,8 +9,7 @@ on:
|
||||
jobs:
|
||||
gather-labels:
|
||||
name: Generate label suggestions
|
||||
# Prevent runs on forks (requires OpenAI API key, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex' && (github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label'))
|
||||
if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label') }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
184
.github/workflows/rust-ci.yml
vendored
184
.github/workflows/rust-ci.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
# --- CI to validate on different os/targets --------------------------------
|
||||
lint_build:
|
||||
name: Lint/Build — ${{ matrix.runner }} - ${{ matrix.target }}${{ matrix.profile == 'release' && ' (release)' || '' }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
# Keep job-level if to avoid spinning up runners when not needed
|
||||
@@ -106,102 +106,55 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: x86_64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
# Also run representative release builds on Mac and Linux because
|
||||
# there could be release-only build errors we want to catch.
|
||||
# Hopefully this also pre-populates the build cache to speed up
|
||||
# releases.
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: release
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: windows-x64
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: release
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- name: Compute lockfile hash
|
||||
id: lockhash
|
||||
working-directory: codex-rs
|
||||
@@ -222,10 +175,6 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
restore-keys: |
|
||||
cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
@@ -268,14 +217,6 @@ jobs:
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
|
||||
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Disable sccache wrapper (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "RUSTC_WRAPPER=" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Prepare APT cache directories (musl)
|
||||
shell: bash
|
||||
@@ -293,73 +234,15 @@ jobs:
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
TARGET: ${{ matrix.target }}
|
||||
APT_UPDATE_ARGS: -o Acquire::Retries=3
|
||||
APT_INSTALL_ARGS: --no-install-recommends
|
||||
shell: bash
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
sudo apt-get -y update -o Acquire::Retries=3
|
||||
sudo apt-get -y install --no-install-recommends musl-tools pkg-config
|
||||
|
||||
- name: Install cargo-chef
|
||||
if: ${{ matrix.profile == 'release' }}
|
||||
@@ -406,10 +289,6 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
key: cargo-home-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ steps.lockhash.outputs.toolchain_hash }}
|
||||
|
||||
- name: Save sccache cache (fallback)
|
||||
@@ -457,7 +336,7 @@ jobs:
|
||||
|
||||
tests:
|
||||
name: Tests — ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runs_on || matrix.runner }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
needs: changed
|
||||
if: ${{ needs.changed.outputs.codex == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
|
||||
@@ -474,43 +353,46 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
profile: dev
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-x64
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-linux-arm64
|
||||
- runner: windows-x64
|
||||
- runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-x64
|
||||
- runner: windows-arm64
|
||||
- runner: windows-11-arm
|
||||
target: aarch64-pc-windows-msvc
|
||||
profile: dev
|
||||
runs_on:
|
||||
group: codex-runners
|
||||
labels: codex-windows-arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# We have been running out of space when running this job on Linux for
|
||||
# x86_64-unknown-linux-gnu, so remove some unnecessary dependencies.
|
||||
- name: Remove unnecessary dependencies to save space
|
||||
if: ${{ startsWith(matrix.runner, 'ubuntu') }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
sudo rm -rf \
|
||||
/usr/local/lib/android \
|
||||
/usr/share/dotnet \
|
||||
/usr/local/share/boost \
|
||||
/usr/local/lib/node_modules \
|
||||
/opt/ghc
|
||||
sudo apt-get remove -y docker.io docker-compose podman buildah
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
4
.github/workflows/rust-release-prepare.yml
vendored
4
.github/workflows/rust-release-prepare.yml
vendored
@@ -14,8 +14,6 @@ permissions:
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
# Prevent scheduled runs on forks (no secrets, wastes Actions minutes)
|
||||
if: github.repository == 'openai/codex'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
@@ -43,7 +41,7 @@ jobs:
|
||||
curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/core/models.json
|
||||
|
||||
- name: Open pull request (if changed)
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
commit-message: "Update models.json"
|
||||
title: "Update models.json"
|
||||
|
||||
165
.github/workflows/rust-release.yml
vendored
165
.github/workflows/rust-release.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.92
|
||||
|
||||
- name: Validate tag matches Cargo.toml version
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -45,20 +45,11 @@ jobs:
|
||||
echo "✅ Tag and Cargo.toml agree (${tag_ver})"
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Verify config schema fixture
|
||||
shell: bash
|
||||
working-directory: codex-rs
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "If this fails, run: just write-config-schema to overwrite fixture with intentional changes."
|
||||
cargo run -p codex-core --bin codex-write-config-schema
|
||||
git diff --exit-code core/config.schema.json
|
||||
|
||||
build:
|
||||
needs: tag-check
|
||||
name: Build - ${{ matrix.runner }} - ${{ matrix.target }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
@@ -89,30 +80,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Use hermetic Cargo home (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cargo_home="${GITHUB_WORKSPACE}/.cargo-home"
|
||||
mkdir -p "${cargo_home}/bin"
|
||||
echo "CARGO_HOME=${cargo_home}" >> "$GITHUB_ENV"
|
||||
echo "${cargo_home}/bin" >> "$GITHUB_PATH"
|
||||
: > "${cargo_home}/config.toml"
|
||||
|
||||
- uses: actions/cache@v5
|
||||
with:
|
||||
path: |
|
||||
@@ -120,76 +91,14 @@ jobs:
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
${{ github.workspace }}/.cargo-home/bin/
|
||||
${{ github.workspace }}/.cargo-home/registry/index/
|
||||
${{ github.workspace }}/.cargo-home/registry/cache/
|
||||
${{ github.workspace }}/.cargo-home/git/db/
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Cargo build
|
||||
shell: bash
|
||||
@@ -327,7 +236,6 @@ jobs:
|
||||
# Path that contains the uncompressed binaries for the current
|
||||
# ${{ matrix.target }}
|
||||
dest="dist/${{ matrix.target }}"
|
||||
repo_root=$PWD
|
||||
|
||||
# We want to ship the raw Windows executables in the GitHub Release
|
||||
# in addition to the compressed archives. Keep the originals for
|
||||
@@ -367,30 +275,7 @@ jobs:
|
||||
# Must run from inside the dest dir so 7z won't
|
||||
# embed the directory path inside the zip.
|
||||
if [[ "${{ matrix.runner }}" == windows* ]]; then
|
||||
if [[ "$base" == "codex-${{ matrix.target }}.exe" ]]; then
|
||||
# Bundle the sandbox helper binaries into the main codex zip so
|
||||
# WinGet installs include the required helpers next to codex.exe.
|
||||
# Fall back to the single-binary zip if the helpers are missing
|
||||
# to avoid breaking releases.
|
||||
bundle_dir="$(mktemp -d)"
|
||||
runner_src="$dest/codex-command-runner-${{ matrix.target }}.exe"
|
||||
setup_src="$dest/codex-windows-sandbox-setup-${{ matrix.target }}.exe"
|
||||
if [[ -f "$runner_src" && -f "$setup_src" ]]; then
|
||||
cp "$dest/$base" "$bundle_dir/$base"
|
||||
cp "$runner_src" "$bundle_dir/codex-command-runner.exe"
|
||||
cp "$setup_src" "$bundle_dir/codex-windows-sandbox-setup.exe"
|
||||
# Use an absolute path so bundle zips land in the real dist
|
||||
# dir even when 7z runs from a temp directory.
|
||||
(cd "$bundle_dir" && 7z a "$repo_root/$dest/${base}.zip" .)
|
||||
else
|
||||
echo "warning: missing sandbox binaries; falling back to single-binary zip"
|
||||
echo "warning: expected $runner_src and $setup_src"
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
rm -rf "$bundle_dir"
|
||||
else
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
(cd "$dest" && 7z a "${base}.zip" "$base")
|
||||
fi
|
||||
|
||||
# Also create .zst (existing behaviour) *and* remove the original
|
||||
@@ -438,26 +323,6 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Generate release notes from tag commit message
|
||||
id: release_notes
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# On tag pushes, GITHUB_SHA may be a tag object for annotated tags;
|
||||
# peel it to the underlying commit.
|
||||
commit="$(git rev-parse "${GITHUB_SHA}^{commit}")"
|
||||
notes_path="${RUNNER_TEMP}/release-notes.md"
|
||||
|
||||
# Use the commit message for the commit the tag points at (not the
|
||||
# annotated tag message).
|
||||
git log -1 --format=%B "${commit}" > "${notes_path}"
|
||||
# Ensure trailing newline so GitHub's markdown renderer doesn't
|
||||
# occasionally run the last line into subsequent content.
|
||||
echo >> "${notes_path}"
|
||||
|
||||
echo "path=${notes_path}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: dist
|
||||
@@ -473,10 +338,6 @@ jobs:
|
||||
|
||||
ls -R dist/
|
||||
|
||||
- name: Add config schema release asset
|
||||
run: |
|
||||
cp codex-rs/core/config.schema.json dist/config-schema.json
|
||||
|
||||
- name: Define release name
|
||||
id: release_name
|
||||
run: |
|
||||
@@ -534,7 +395,6 @@ jobs:
|
||||
with:
|
||||
name: ${{ steps.release_name.outputs.name }}
|
||||
tag_name: ${{ github.ref_name }}
|
||||
body_path: ${{ steps.release_notes.outputs.path }}
|
||||
files: dist/**
|
||||
# Mark as prerelease only when the version has a suffix after x.y.z
|
||||
# (e.g. -alpha, -beta). Otherwise publish a normal release.
|
||||
@@ -547,19 +407,6 @@ jobs:
|
||||
tag: ${{ github.ref_name }}
|
||||
config: .github/dotslash-config.json
|
||||
|
||||
- name: Trigger developers.openai.com deploy
|
||||
# Only trigger the deploy if the release is not a pre-release.
|
||||
# The deploy is used to update the developers.openai.com website with the new config schema json file.
|
||||
if: ${{ !contains(steps.release_name.outputs.name, '-') }}
|
||||
continue-on-error: true
|
||||
env:
|
||||
DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL: ${{ secrets.DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL }}
|
||||
run: |
|
||||
if ! curl -sS -f -o /dev/null -X POST "$DEV_WEBSITE_VERCEL_DEPLOY_HOOK_URL"; then
|
||||
echo "::warning title=developers.openai.com deploy hook failed::Vercel deploy hook POST failed for ${GITHUB_REF_NAME}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Publish to npm using OIDC authentication.
|
||||
# July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
|
||||
# npm docs: https://docs.npmjs.com/trusted-publishers
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
86
.github/workflows/shell-tool-mcp.yml
vendored
86
.github/workflows/shell-tool-mcp.yml
vendored
@@ -93,83 +93,15 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install UBSan runtime (musl)
|
||||
if: ${{ matrix.install_musl }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install musl build dependencies
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
run: bash "${GITHUB_WORKSPACE}/.github/scripts/install-musl-build-tools.sh"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Configure rustc UBSan wrapper (musl host)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
ubsan=""
|
||||
if command -v ldconfig >/dev/null 2>&1; then
|
||||
ubsan="$(ldconfig -p | grep -m1 'libubsan\.so\.1' | sed -E 's/.*=> (.*)$/\1/')"
|
||||
fi
|
||||
wrapper_root="${RUNNER_TEMP:-/tmp}"
|
||||
wrapper="${wrapper_root}/rustc-ubsan-wrapper"
|
||||
cat > "${wrapper}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ -n "${ubsan}" ]]; then
|
||||
export LD_PRELOAD="${ubsan}\${LD_PRELOAD:+:\${LD_PRELOAD}}"
|
||||
fi
|
||||
exec "\$1" "\${@:2}"
|
||||
EOF
|
||||
chmod +x "${wrapper}"
|
||||
echo "RUSTC_WRAPPER=${wrapper}" >> "$GITHUB_ENV"
|
||||
echo "RUSTC_WORKSPACE_WRAPPER=" >> "$GITHUB_ENV"
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Clear sanitizer flags (musl)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Clear global Rust flags so host/proc-macro builds don't pull in UBSan.
|
||||
echo "RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_ENCODED_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "RUSTDOCFLAGS=" >> "$GITHUB_ENV"
|
||||
# Override any runner-level Cargo config rustflags as well.
|
||||
echo "CARGO_BUILD_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS=" >> "$GITHUB_ENV"
|
||||
|
||||
sanitize_flags() {
|
||||
local input="$1"
|
||||
input="${input//-fsanitize=undefined/}"
|
||||
input="${input//-fno-sanitize-recover=undefined/}"
|
||||
input="${input//-fno-sanitize-trap=undefined/}"
|
||||
echo "$input"
|
||||
}
|
||||
|
||||
cflags="$(sanitize_flags "${CFLAGS-}")"
|
||||
cxxflags="$(sanitize_flags "${CXXFLAGS-}")"
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y musl-tools pkg-config
|
||||
|
||||
- name: Build exec server binaries
|
||||
run: cargo build --release --target ${{ matrix.target }} --bin codex-exec-mcp-server --bin codex-execve-wrapper
|
||||
@@ -266,7 +198,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
@@ -308,7 +240,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
|
||||
git clone --depth 1 https://github.com/bminor/bash /tmp/bash
|
||||
cd /tmp/bash
|
||||
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
|
||||
@@ -344,6 +276,7 @@ jobs:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
@@ -436,6 +369,12 @@ jobs:
|
||||
id-token: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.8.1
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
@@ -443,7 +382,6 @@ jobs:
|
||||
registry-url: https://registry.npmjs.org
|
||||
scope: "@openai"
|
||||
|
||||
# Trusted publishing requires npm CLI version 11.5.1 or later.
|
||||
- name: Update npm
|
||||
run: npm install -g npm@latest
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,7 +9,6 @@ node_modules
|
||||
|
||||
# build
|
||||
dist/
|
||||
bazel-*
|
||||
build/
|
||||
out/
|
||||
storybook-static/
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
config:
|
||||
MD013:
|
||||
line_length: 100
|
||||
|
||||
globs:
|
||||
- "docs/tui-chat-composer.md"
|
||||
15
AGENTS.md
15
AGENTS.md
@@ -11,17 +11,14 @@ In the codex-rs folder where the rust code lives:
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When possible, make `match` statements exhaustive and avoid wildcard arms.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after you have finished making Rust code changes; do not ask for approval to run it. Additionally, run the tests:
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after making Rust code changes; do not ask for approval to run it. Before finalizing a change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates. Additionally, run the tests:
|
||||
|
||||
1. Run the test for the specific project that was changed. For example, if changes were made in `codex-rs/tui`, run `cargo test -p codex-tui`.
|
||||
2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test --all-features`. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
|
||||
Before finalizing a large change to `codex-rs`, run `just fix -p <project>` (in `codex-rs` directory) to fix any linter issues in the code. Prefer scoping with `-p` to avoid slow workspace‑wide Clippy builds; only run `just fix` without `-p` if you changed shared crates.
|
||||
2. Once those pass, if any changes were made in common, core, or protocol, run the complete test suite with `cargo test --all-features`.
|
||||
When running interactively, ask the user before running `just fix` to finalize. `just fmt` does not require approval. project-specific or individual tests can be run without asking the user, but do ask the user before running the complete test suite.
|
||||
|
||||
## TUI style conventions
|
||||
|
||||
@@ -80,12 +77,6 @@ If you don’t have the tool:
|
||||
- Prefer deep equals comparisons whenever possible. Perform `assert_eq!()` on entire objects, rather than individual fields.
|
||||
- Avoid mutating process environment in tests; prefer passing environment-derived flags or dependencies from above.
|
||||
|
||||
### Spawning workspace binaries in tests (Cargo vs Bazel)
|
||||
|
||||
- Prefer `codex_utils_cargo_bin::cargo_bin("...")` over `assert_cmd::Command::cargo_bin(...)` or `escargot` when tests need to spawn first-party binaries.
|
||||
- Under Bazel, binaries and resources may live under runfiles; use `codex_utils_cargo_bin::cargo_bin` to resolve absolute paths that remain stable after `chdir`.
|
||||
- When locating fixture files or test resources under Bazel, avoid `env!("CARGO_MANIFEST_DIR")`. Prefer `codex_utils_cargo_bin::find_resource!` so paths resolve correctly under both Cargo and Bazel runfiles.
|
||||
|
||||
### Integration tests (core)
|
||||
|
||||
- Prefer the utilities in `core_test_support::responses` when writing end-to-end Codex tests.
|
||||
|
||||
23
BUILD.bazel
23
BUILD.bazel
@@ -1,23 +0,0 @@
|
||||
load("@apple_support//xcode:xcode_config.bzl", "xcode_config")
|
||||
|
||||
xcode_config(name = "disable_xcode")
|
||||
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
platform(
|
||||
name = "local",
|
||||
constraint_values = [
|
||||
"@toolchains_llvm_bootstrapped//constraints/libc:gnu.2.28",
|
||||
],
|
||||
parents = [
|
||||
"@platforms//host",
|
||||
],
|
||||
)
|
||||
|
||||
alias(
|
||||
name = "rbe",
|
||||
actual = "@rbe_platform",
|
||||
)
|
||||
|
||||
exports_files(["AGENTS.md"])
|
||||
136
MODULE.bazel
136
MODULE.bazel
@@ -1,136 +0,0 @@
|
||||
bazel_dep(name = "platforms", version = "1.0.0")
|
||||
bazel_dep(name = "toolchains_llvm_bootstrapped", version = "0.3.1")
|
||||
archive_override(
|
||||
module_name = "toolchains_llvm_bootstrapped",
|
||||
integrity = "sha256-4/2h4tYSUSptxFVI9G50yJxWGOwHSeTeOGBlaLQBV8g=",
|
||||
strip_prefix = "toolchains_llvm_bootstrapped-d20baf67e04d8e2887e3779022890d1dc5e6b948",
|
||||
urls = ["https://github.com/cerisier/toolchains_llvm_bootstrapped/archive/d20baf67e04d8e2887e3779022890d1dc5e6b948.tar.gz"],
|
||||
)
|
||||
|
||||
osx = use_extension("@toolchains_llvm_bootstrapped//toolchain/extension:osx.bzl", "osx")
|
||||
osx.framework(name = "ApplicationServices")
|
||||
osx.framework(name = "AppKit")
|
||||
osx.framework(name = "ColorSync")
|
||||
osx.framework(name = "CoreFoundation")
|
||||
osx.framework(name = "CoreGraphics")
|
||||
osx.framework(name = "CoreServices")
|
||||
osx.framework(name = "CoreText")
|
||||
osx.framework(name = "CFNetwork")
|
||||
osx.framework(name = "Foundation")
|
||||
osx.framework(name = "ImageIO")
|
||||
osx.framework(name = "Kernel")
|
||||
osx.framework(name = "OSLog")
|
||||
osx.framework(name = "Security")
|
||||
osx.framework(name = "SystemConfiguration")
|
||||
|
||||
register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
# Needed to disable xcode...
|
||||
bazel_dep(name = "apple_support", version = "2.1.0")
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
single_version_override(
|
||||
module_name = "rules_rust",
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:rules_rust.patch",
|
||||
"//patches:rules_rust_windows_gnu.patch",
|
||||
"//patches:rules_rust_musl.patch",
|
||||
],
|
||||
)
|
||||
|
||||
RUST_TRIPLES = [
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-gnullvm",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-gnullvm",
|
||||
]
|
||||
|
||||
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.93.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
register_toolchains("@rust_toolchains//:all")
|
||||
|
||||
bazel_dep(name = "rules_rs", version = "0.0.23")
|
||||
|
||||
crate = use_extension("@rules_rs//rs:extensions.bzl", "crate")
|
||||
crate.from_cargo(
|
||||
cargo_lock = "//codex-rs:Cargo.lock",
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
crate.annotation(
|
||||
crate = "nucleo-matcher",
|
||||
strip_prefix = "matcher",
|
||||
version = "0.3.1",
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
crate.annotation(
|
||||
build_script_data = [
|
||||
"@openssl//:gen_dir",
|
||||
],
|
||||
build_script_env = {
|
||||
"OPENSSL_DIR": "$(execpath @openssl//:gen_dir)",
|
||||
"OPENSSL_NO_VENDOR": "1",
|
||||
"OPENSSL_STATIC": "1",
|
||||
},
|
||||
crate = "openssl-sys",
|
||||
data = ["@openssl//:gen_dir"],
|
||||
)
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
crate.annotation(
|
||||
crate = "runfiles",
|
||||
workspace_cargo_toml = "rust/runfiles/Cargo.toml",
|
||||
)
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//patches:windows-link.patch",
|
||||
],
|
||||
)
|
||||
|
||||
WINDOWS_IMPORT_LIB = """
|
||||
load("@rules_cc//cc:defs.bzl", "cc_import")
|
||||
|
||||
cc_import(
|
||||
name = "windows_import_lib",
|
||||
static_library = glob(["lib/*.a"])[0],
|
||||
)
|
||||
"""
|
||||
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_x86_64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_aarch64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
use_repo(crate, "crates")
|
||||
|
||||
rbe_platform_repository = use_repo_rule("//:rbe.bzl", "rbe_platform_repository")
|
||||
|
||||
rbe_platform_repository(
|
||||
name = "rbe_platform",
|
||||
)
|
||||
1416
MODULE.bazel.lock
generated
1416
MODULE.bazel.lock
generated
File diff suppressed because one or more lines are too long
70
PNPM.md
Normal file
70
PNPM.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# Migration to pnpm
|
||||
|
||||
This project has been migrated from npm to pnpm to improve dependency management and developer experience.
|
||||
|
||||
## Why pnpm?
|
||||
|
||||
- **Faster installation**: pnpm is significantly faster than npm and yarn
|
||||
- **Disk space savings**: pnpm uses a content-addressable store to avoid duplication
|
||||
- **Phantom dependency prevention**: pnpm creates a strict node_modules structure
|
||||
- **Native workspaces support**: simplified monorepo management
|
||||
|
||||
## How to use pnpm
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# Global installation of pnpm
|
||||
npm install -g pnpm@10.8.1
|
||||
|
||||
# Or with corepack (available with Node.js 22+)
|
||||
corepack enable
|
||||
corepack prepare pnpm@10.8.1 --activate
|
||||
```
|
||||
|
||||
### Common commands
|
||||
|
||||
| npm command | pnpm equivalent |
|
||||
| --------------- | ---------------- |
|
||||
| `npm install` | `pnpm install` |
|
||||
| `npm run build` | `pnpm run build` |
|
||||
| `npm test` | `pnpm test` |
|
||||
| `npm run lint` | `pnpm run lint` |
|
||||
|
||||
### Workspace-specific commands
|
||||
|
||||
| Action | Command |
|
||||
| ------------------------------------------ | ---------------------------------------- |
|
||||
| Run a command in a specific package | `pnpm --filter @openai/codex run build` |
|
||||
| Install a dependency in a specific package | `pnpm --filter @openai/codex add lodash` |
|
||||
| Run a command in all packages | `pnpm -r run test` |
|
||||
|
||||
## Monorepo structure
|
||||
|
||||
```
|
||||
codex/
|
||||
├── pnpm-workspace.yaml # Workspace configuration
|
||||
├── .npmrc # pnpm configuration
|
||||
├── package.json # Root dependencies and scripts
|
||||
├── codex-cli/ # Main package
|
||||
│ └── package.json # codex-cli specific dependencies
|
||||
└── docs/ # Documentation (future package)
|
||||
```
|
||||
|
||||
## Configuration files
|
||||
|
||||
- **pnpm-workspace.yaml**: Defines the packages included in the monorepo
|
||||
- **.npmrc**: Configures pnpm behavior
|
||||
- **Root package.json**: Contains shared scripts and dependencies
|
||||
|
||||
## CI/CD
|
||||
|
||||
CI/CD workflows have been updated to use pnpm instead of npm. Make sure your CI environments use pnpm 10.8.1 or higher.
|
||||
|
||||
## Known issues
|
||||
|
||||
If you encounter issues with pnpm, try the following solutions:
|
||||
|
||||
1. Remove the `node_modules` folder and `pnpm-lock.yaml` file, then run `pnpm install`
|
||||
2. Make sure you're using pnpm 10.8.1 or higher
|
||||
3. Verify that Node.js 22 or higher is installed
|
||||
79
README.md
79
README.md
@@ -1,11 +1,13 @@
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install --cask codex</code></p>
|
||||
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
<p align="center">
|
||||
<img src="https://github.com/openai/codex/blob/main/.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
</p>
|
||||
</br>
|
||||
If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE.</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a>.</p>
|
||||
</br>If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE</a>
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a></p>
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
@@ -13,19 +15,25 @@ If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="http
|
||||
|
||||
### Installing and running Codex CLI
|
||||
|
||||
Install globally with your preferred package manager:
|
||||
Install globally with your preferred package manager. If you use npm:
|
||||
|
||||
```shell
|
||||
# Install using npm
|
||||
npm install -g @openai/codex
|
||||
```
|
||||
|
||||
Alternatively, if you use Homebrew:
|
||||
|
||||
```shell
|
||||
# Install using Homebrew
|
||||
brew install --cask codex
|
||||
```
|
||||
|
||||
Then simply run `codex` to get started.
|
||||
Then simply run `codex` to get started:
|
||||
|
||||
```shell
|
||||
codex
|
||||
```
|
||||
|
||||
If you're running into upgrade issues with Homebrew, see the [FAQ entry on brew upgrade codex](./docs/faq.md#brew-upgrade-codex-isnt-upgrading-me).
|
||||
|
||||
<details>
|
||||
<summary>You can also go to the <a href="https://github.com/openai/codex/releases/latest">latest GitHub Release</a> and download the appropriate binary for your platform.</summary>
|
||||
@@ -45,15 +53,60 @@ Each archive contains a single entry with the platform baked into the name (e.g.
|
||||
|
||||
### Using Codex with your ChatGPT plan
|
||||
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-login.png" alt="Codex CLI login" width="80%" />
|
||||
</p>
|
||||
|
||||
Run `codex` and select **Sign in with ChatGPT**. We recommend signing into your ChatGPT account to use Codex as part of your Plus, Pro, Team, Edu, or Enterprise plan. [Learn more about what's included in your ChatGPT plan](https://help.openai.com/en/articles/11369540-codex-in-chatgpt).
|
||||
|
||||
You can also use Codex with an API key, but this requires [additional setup](https://developers.openai.com/codex/auth#sign-in-with-an-api-key).
|
||||
You can also use Codex with an API key, but this requires [additional setup](./docs/authentication.md#usage-based-billing-alternative-use-an-openai-api-key). If you previously used an API key for usage-based billing, see the [migration steps](./docs/authentication.md#migrating-from-usage-based-billing-api-key). If you're having trouble with login, please comment on [this issue](https://github.com/openai/codex/issues/1243).
|
||||
|
||||
## Docs
|
||||
### Model Context Protocol (MCP)
|
||||
|
||||
- [**Codex Documentation**](https://developers.openai.com/codex)
|
||||
Codex can access MCP servers. To configure them, refer to the [config docs](./docs/config.md#mcp_servers).
|
||||
|
||||
### Configuration
|
||||
|
||||
Codex CLI supports a rich set of configuration options, with preferences stored in `~/.codex/config.toml`. For full configuration options, see [Configuration](./docs/config.md).
|
||||
|
||||
### Execpolicy
|
||||
|
||||
See the [Execpolicy quickstart](./docs/execpolicy.md) to set up rules that govern what commands Codex can execute.
|
||||
|
||||
### Docs & FAQ
|
||||
|
||||
- [**Getting started**](./docs/getting-started.md)
|
||||
- [CLI usage](./docs/getting-started.md#cli-usage)
|
||||
- [Slash Commands](./docs/slash_commands.md)
|
||||
- [Running with a prompt as input](./docs/getting-started.md#running-with-a-prompt-as-input)
|
||||
- [Example prompts](./docs/getting-started.md#example-prompts)
|
||||
- [Custom prompts](./docs/prompts.md)
|
||||
- [Memory with AGENTS.md](./docs/getting-started.md#memory-with-agentsmd)
|
||||
- [**Configuration**](./docs/config.md)
|
||||
- [Example config](./docs/example-config.md)
|
||||
- [**Sandbox & approvals**](./docs/sandbox.md)
|
||||
- [**Execpolicy quickstart**](./docs/execpolicy.md)
|
||||
- [**Authentication**](./docs/authentication.md)
|
||||
- [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced)
|
||||
- [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine)
|
||||
- **Automating Codex**
|
||||
- [GitHub Action](https://github.com/openai/codex-action)
|
||||
- [TypeScript SDK](./sdk/typescript/README.md)
|
||||
- [Non-interactive mode (`codex exec`)](./docs/exec.md)
|
||||
- [**Advanced**](./docs/advanced.md)
|
||||
- [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging)
|
||||
- [Model Context Protocol (MCP)](./docs/advanced.md#model-context-protocol-mcp)
|
||||
- [**Zero data retention (ZDR)**](./docs/zdr.md)
|
||||
- [**Contributing**](./docs/contributing.md)
|
||||
- [**Installing & building**](./docs/install.md)
|
||||
- [**Install & build**](./docs/install.md)
|
||||
- [System Requirements](./docs/install.md#system-requirements)
|
||||
- [DotSlash](./docs/install.md#dotslash)
|
||||
- [Build from source](./docs/install.md#build-from-source)
|
||||
- [**FAQ**](./docs/faq.md)
|
||||
- [**Open source fund**](./docs/open-source-fund.md)
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
This repository is licensed under the [Apache-2.0 License](LICENSE).
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# Example announcement tips for Codex TUI.
|
||||
# Each [[announcements]] entry is evaluated in order; the last matching one is shown.
|
||||
# Dates are UTC, formatted as YYYY-MM-DD. The from_date is inclusive and the to_date is exclusive.
|
||||
# version_regex matches against the CLI version (env!("CARGO_PKG_VERSION")); omit to apply to all versions.
|
||||
# target_app specify which app should display the announcement (cli, vsce, ...).
|
||||
|
||||
[[announcements]]
|
||||
content = "Welcome to Codex! Check out the new onboarding flow."
|
||||
from_date = "2024-10-01"
|
||||
to_date = "2024-10-15"
|
||||
target_app = "cli"
|
||||
|
||||
# Test announcement only for local build version until 2026-01-10 excluded (past)
|
||||
[[announcements]]
|
||||
content = "This is a test announcement"
|
||||
version_regex = "^0\\.0\\.0$"
|
||||
to_date = "2026-05-10"
|
||||
1
codex-cli/bin/codex.js
Executable file → Normal file
1
codex-cli/bin/codex.js
Executable file → Normal file
@@ -95,6 +95,7 @@ function detectPackageManager() {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
|
||||
if (
|
||||
__dirname.includes(".bun/install/global") ||
|
||||
__dirname.includes(".bun\\install\\global")
|
||||
|
||||
18
codex-cli/package-lock.json
generated
Normal file
18
codex-cli/package-lock.json
generated
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,5 @@
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
},
|
||||
"packageManager": "pnpm@10.28.2+sha512.41872f037ad22f7348e3b1debbaf7e867cfd448f2726d9cf74c08f19507c31d2c8e7a11525b983febc2df640b5438dee6023ebb1f84ed43cc2d654d2bc326264"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
@@ -13,7 +12,6 @@ import zipfile
|
||||
from dataclasses import dataclass
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
@@ -79,45 +77,6 @@ RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
# urllib.request.urlopen() defaults to no timeout (can hang indefinitely), which is painful in CI.
|
||||
DOWNLOAD_TIMEOUT_SECS = 60
|
||||
|
||||
|
||||
def _gha_enabled() -> bool:
|
||||
# GitHub Actions supports "workflow commands" (e.g. ::group:: / ::error::) that make logs
|
||||
# much easier to scan: groups collapse noisy sections and error annotations surface the
|
||||
# failure in the UI without changing the actual exception/traceback output.
|
||||
return os.environ.get("GITHUB_ACTIONS") == "true"
|
||||
|
||||
|
||||
def _gha_escape(value: str) -> str:
|
||||
# Workflow commands require percent/newline escaping.
|
||||
return value.replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A")
|
||||
|
||||
|
||||
def _gha_error(*, title: str, message: str) -> None:
|
||||
# Emit a GitHub Actions error annotation. This does not replace stdout/stderr logs; it just
|
||||
# adds a prominent summary line to the job UI so the root cause is easier to spot.
|
||||
if not _gha_enabled():
|
||||
return
|
||||
print(
|
||||
f"::error title={_gha_escape(title)}::{_gha_escape(message)}",
|
||||
flush=True,
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _gha_group(title: str):
|
||||
# Wrap a block in a collapsible log group on GitHub Actions. Outside of GHA this is a no-op
|
||||
# so local output remains unchanged.
|
||||
if _gha_enabled():
|
||||
print(f"::group::{_gha_escape(title)}", flush=True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if _gha_enabled():
|
||||
print("::endgroup::", flush=True)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
@@ -172,20 +131,18 @@ def main() -> int:
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
print(f"Downloading native artifacts from workflow {workflow_id}...")
|
||||
|
||||
with _gha_group(f"Download native artifacts from workflow {workflow_id}"):
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_binary_components(
|
||||
artifacts_dir,
|
||||
vendor_dir,
|
||||
[BINARY_COMPONENTS[name] for name in components if name in BINARY_COMPONENTS],
|
||||
)
|
||||
|
||||
if "rg" in components:
|
||||
with _gha_group("Fetch ripgrep binaries"):
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
print("Fetching ripgrep binaries...")
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
@@ -246,14 +203,7 @@ def fetch_rg(
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
try:
|
||||
results[target] = future.result()
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep install failed",
|
||||
message=f"target={target} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(f"Failed to install ripgrep for target {target}.") from exc
|
||||
results[target] = future.result()
|
||||
print(f" installed ripgrep for {target}")
|
||||
|
||||
return [results[target] for target in targets]
|
||||
@@ -351,8 +301,6 @@ def _fetch_single_rg(
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
digest = platform_info.get("digest")
|
||||
expected_size = platform_info.get("size")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -365,32 +313,10 @@ def _fetch_single_rg(
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
print(
|
||||
f" downloading ripgrep for {target} ({platform_key}) from {url}",
|
||||
flush=True,
|
||||
)
|
||||
try:
|
||||
_download_file(url, download_path)
|
||||
except Exception as exc:
|
||||
_gha_error(
|
||||
title="ripgrep download failed",
|
||||
message=f"target={target} platform={platform_key} url={url} error={exc!r}",
|
||||
)
|
||||
raise RuntimeError(
|
||||
"Failed to download ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"expected_size={expected_size!r}, digest={digest!r}, url={url}, dest={download_path})."
|
||||
) from exc
|
||||
_download_file(url, download_path)
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
try:
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
except Exception as exc:
|
||||
raise RuntimeError(
|
||||
"Failed to extract ripgrep "
|
||||
f"(target={target}, platform={platform_key}, format={archive_format}, "
|
||||
f"member={archive_member!r}, url={url}, archive={download_path})."
|
||||
) from exc
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
@@ -400,9 +326,7 @@ def _fetch_single_rg(
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest.unlink(missing_ok=True)
|
||||
|
||||
with urlopen(url, timeout=DOWNLOAD_TIMEOUT_SECS) as response, open(dest, "wb") as out:
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
2008
codex-rs/Cargo.lock
generated
2008
codex-rs/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -6,12 +6,10 @@ members = [
|
||||
"app-server",
|
||||
"app-server-protocol",
|
||||
"app-server-test-client",
|
||||
"debug-client",
|
||||
"apply-patch",
|
||||
"arg0",
|
||||
"feedback",
|
||||
"codex-backend-openapi-models",
|
||||
"cloud-requirements",
|
||||
"cloud-tasks",
|
||||
"cloud-tasks-client",
|
||||
"cli",
|
||||
@@ -28,7 +26,6 @@ members = [
|
||||
"login",
|
||||
"mcp-server",
|
||||
"mcp-types",
|
||||
"network-proxy",
|
||||
"ollama",
|
||||
"process-hardening",
|
||||
"protocol",
|
||||
@@ -37,24 +34,22 @@ members = [
|
||||
"stdio-to-uds",
|
||||
"otel",
|
||||
"tui",
|
||||
"tui2",
|
||||
"utils/absolute-path",
|
||||
"utils/cargo-bin",
|
||||
"utils/git",
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/home-dir",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
"codex-client",
|
||||
"codex-api",
|
||||
"state",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.94.0"
|
||||
version = "0.0.0"
|
||||
# Track the edition for all workspace crates in one place. Individual
|
||||
# crates can still override this value, but keeping it here means new
|
||||
# crates created with `cargo new -w ...` automatically inherit the 2024
|
||||
@@ -73,9 +68,7 @@ codex-apply-patch = { path = "apply-patch" }
|
||||
codex-arg0 = { path = "arg0" }
|
||||
codex-async-utils = { path = "async-utils" }
|
||||
codex-backend-client = { path = "backend-client" }
|
||||
codex-cloud-requirements = { path = "cloud-requirements" }
|
||||
codex-chatgpt = { path = "chatgpt" }
|
||||
codex-cli = { path = "cli"}
|
||||
codex-client = { path = "codex-client" }
|
||||
codex-common = { path = "common" }
|
||||
codex-core = { path = "core" }
|
||||
@@ -95,15 +88,13 @@ codex-process-hardening = { path = "process-hardening" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-state = { path = "state" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-tui2 = { path = "tui2" }
|
||||
codex-utils-absolute-path = { path = "utils/absolute-path" }
|
||||
codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-cargo-bin = { path = "utils/cargo-bin" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-home-dir = { path = "utils/home-dir" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
@@ -127,13 +118,12 @@ axum = { version = "0.8", default-features = false }
|
||||
base64 = "0.22.1"
|
||||
bytes = "1.10.1"
|
||||
chardetng = "0.1.17"
|
||||
chrono = "0.4.43"
|
||||
chrono = "0.4.42"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
crossterm = "0.28.1"
|
||||
crossbeam-channel = "0.5.15"
|
||||
ctor = "0.6.3"
|
||||
ctor = "0.5.0"
|
||||
derive_more = "2"
|
||||
diffy = "0.4.2"
|
||||
dirs = "6"
|
||||
@@ -142,39 +132,38 @@ dunce = "1.0.4"
|
||||
encoding_rs = "0.8.35"
|
||||
env-flags = "0.1.1"
|
||||
env_logger = "0.11.5"
|
||||
escargot = "0.5"
|
||||
eventsource-stream = "0.2.3"
|
||||
futures = { version = "0.3", default-features = false }
|
||||
globset = "0.4"
|
||||
http = "1.3.1"
|
||||
icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
ignore = "0.4.23"
|
||||
indoc = "2.0"
|
||||
image = { version = "^0.25.9", default-features = false }
|
||||
include_dir = "0.7.4"
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.46.0"
|
||||
insta = "1.44.3"
|
||||
itertools = "0.14.0"
|
||||
keyring = { version = "3.6", default-features = false }
|
||||
landlock = "0.4.4"
|
||||
landlock = "0.4.1"
|
||||
lazy_static = "1"
|
||||
libc = "0.2.177"
|
||||
log = "0.4"
|
||||
lru = "0.16.3"
|
||||
lru = "0.16.2"
|
||||
maplit = "1.0.2"
|
||||
mime_guess = "2.0.5"
|
||||
multimap = "0.10.0"
|
||||
notify = "8.2.0"
|
||||
nucleo = { git = "https://github.com/helix-editor/nucleo.git", rev = "4253de9faabb4e5c6d81d946a5e35a90f87347ee" }
|
||||
nucleo-matcher = "0.3.1"
|
||||
once_cell = "1.20.2"
|
||||
openssl-sys = "*"
|
||||
opentelemetry = "0.31.0"
|
||||
opentelemetry-appender-tracing = "0.31.0"
|
||||
opentelemetry-otlp = "0.31.0"
|
||||
opentelemetry-semantic-conventions = "0.31.0"
|
||||
opentelemetry_sdk = "0.31.0"
|
||||
tracing-opentelemetry = "0.32.0"
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-appender-tracing = "0.30.0"
|
||||
opentelemetry-otlp = "0.30.0"
|
||||
opentelemetry-semantic-conventions = "0.30.0"
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
tracing-opentelemetry = "0.31.0"
|
||||
os_info = "3.12.0"
|
||||
owo-colors = "4.2.0"
|
||||
path-absolutize = "3.1.1"
|
||||
@@ -187,51 +176,46 @@ rand = "0.9"
|
||||
ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex = "1.12.2"
|
||||
regex-lite = "0.1.8"
|
||||
regex-lite = "0.1.7"
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
runfiles = { git = "https://github.com/dzbarsky/rules_rust", rev = "b56cbaa8465e74127f1ea216f813cd377295ad81" }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
sentry = "0.46.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_path_to_error = "0.1.20"
|
||||
serde_with = "3.16"
|
||||
serde_yaml = "0.9"
|
||||
serial_test = "3.2.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
semver = "1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.7.0"
|
||||
socket2 = "0.6.1"
|
||||
sqlx = { version = "0.8.6", default-features = false, features = ["chrono", "json", "macros", "migrate", "runtime-tokio-rustls", "sqlite", "time", "uuid"] }
|
||||
starlark = "0.13.0"
|
||||
strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.23.0"
|
||||
test-log = "0.2.19"
|
||||
test-log = "0.2.18"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.17"
|
||||
time = "0.3"
|
||||
tiny_http = "0.12"
|
||||
tokio = "1"
|
||||
tokio-stream = "0.1.18"
|
||||
tokio-stream = "0.1.17"
|
||||
tokio-test = "0.4"
|
||||
tokio-tungstenite = { version = "0.28.0", features = ["proxy", "rustls-tls-native-roots"] }
|
||||
tokio-util = "0.7.18"
|
||||
tokio-util = "0.7.16"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.44"
|
||||
toml_edit = "0.23.5"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.43"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25"
|
||||
zstd = "0.13"
|
||||
tree-sitter-highlight = "0.25.10"
|
||||
ts-rs = "11"
|
||||
uds_windows = "1.1.0"
|
||||
@@ -243,7 +227,7 @@ uuid = "1"
|
||||
vt100 = "0.16.2"
|
||||
walkdir = "2.5.0"
|
||||
webbrowser = "1.0"
|
||||
which = "8"
|
||||
which = "6"
|
||||
wildmatch = "2.6.1"
|
||||
|
||||
wiremock = "0.6"
|
||||
@@ -311,10 +295,6 @@ opt-level = 0
|
||||
# ratatui = { path = "../../ratatui" }
|
||||
crossterm = { git = "https://github.com/nornagon/crossterm", branch = "nornagon/color-query" }
|
||||
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }
|
||||
tokio-tungstenite = { git = "https://github.com/JakkuSakura/tokio-tungstenite", rev = "2ae536b0de793f3ddf31fc2f22d445bf1ef2023d" }
|
||||
|
||||
# Uncomment to debug local changes.
|
||||
# rmcp = { path = "../../rust-sdk/crates/rmcp" }
|
||||
|
||||
[patch."ssh://git@github.com/JakkuSakura/tungstenite-rs.git"]
|
||||
tungstenite = { git = "https://github.com/JakkuSakura/tungstenite-rs", rev = "f514de8644821113e5d18a027d6d28a5c8cc0a6e" }
|
||||
|
||||
@@ -15,8 +15,8 @@ You can also install via Homebrew (`brew install --cask codex`) or download a pl
|
||||
|
||||
## Documentation quickstart
|
||||
|
||||
- First run with Codex? Start with [`docs/getting-started.md`](../docs/getting-started.md) (links to the walkthrough for prompts, keyboard shortcuts, and session management).
|
||||
- Want deeper control? See [`docs/config.md`](../docs/config.md) and [`docs/install.md`](../docs/install.md).
|
||||
- First run with Codex? Follow the walkthrough in [`docs/getting-started.md`](../docs/getting-started.md) for prompts, keyboard shortcuts, and session management.
|
||||
- Already shipping with Codex and want deeper control? Jump to [`docs/advanced.md`](../docs/advanced.md) and the configuration reference at [`docs/config.md`](../docs/config.md).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
@@ -30,7 +30,7 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses
|
||||
|
||||
#### MCP client
|
||||
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#connecting-to-mcp-servers) for details.
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#mcp_servers) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "ansi-escape",
|
||||
crate_name = "codex_ansi_escape",
|
||||
)
|
||||
@@ -1,7 +0,0 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server-protocol",
|
||||
crate_name = "codex_app_server_protocol",
|
||||
test_data_extra = glob(["schema/**"], allow_empty = True),
|
||||
)
|
||||
@@ -27,7 +27,4 @@ uuid = { workspace = true, features = ["serde", "v7"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-utils-cargo-bin = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"FileChange": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"add"
|
||||
],
|
||||
"title": "AddFileChangeType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"content",
|
||||
"type"
|
||||
],
|
||||
"title": "AddFileChange",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"delete"
|
||||
],
|
||||
"title": "DeleteFileChangeType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"content",
|
||||
"type"
|
||||
],
|
||||
"title": "DeleteFileChange",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"move_path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"update"
|
||||
],
|
||||
"title": "UpdateFileChangeType",
|
||||
"type": "string"
|
||||
},
|
||||
"unified_diff": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type",
|
||||
"unified_diff"
|
||||
],
|
||||
"title": "UpdateFileChange",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"callId": {
|
||||
"description": "Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent] and [codex_core::protocol::PatchApplyEndEvent].",
|
||||
"type": "string"
|
||||
},
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"fileChanges": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/FileChange"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"grantRoot": {
|
||||
"description": "When set, the agent is asking the user to allow writes under this root for the remainder of the session (unclear if this is honored today).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"reason": {
|
||||
"description": "Optional explanatory reason (e.g. request for extra write access).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"callId",
|
||||
"conversationId",
|
||||
"fileChanges"
|
||||
],
|
||||
"title": "ApplyPatchApprovalParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ReviewDecision": {
|
||||
"description": "User's decision in response to an ExecApprovalRequest.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "User has approved this command and the agent should execute it.",
|
||||
"enum": [
|
||||
"approved"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"description": "User has approved this command and wants to apply the proposed execpolicy amendment so future matching commands are permitted.",
|
||||
"properties": {
|
||||
"approved_execpolicy_amendment": {
|
||||
"properties": {
|
||||
"proposed_execpolicy_amendment": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"proposed_execpolicy_amendment"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"approved_execpolicy_amendment"
|
||||
],
|
||||
"title": "ApprovedExecpolicyAmendmentReviewDecision",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "User has approved this command and wants to automatically approve any future identical instances (`command` and `cwd` match exactly) for the remainder of the session.",
|
||||
"enum": [
|
||||
"approved_for_session"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User has denied this command and the agent should not execute it, but it should continue the session and try something else.",
|
||||
"enum": [
|
||||
"denied"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User has denied this command and the agent should not do anything until the user's next command.",
|
||||
"enum": [
|
||||
"abort"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"decision": {
|
||||
"$ref": "#/definitions/ReviewDecision"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"decision"
|
||||
],
|
||||
"title": "ApplyPatchApprovalResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ChatgptAuthTokensRefreshReason": {
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Codex attempted a backend request and received `401 Unauthorized`.",
|
||||
"enum": [
|
||||
"unauthorized"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"previousAccountId": {
|
||||
"description": "Workspace/account identifier that Codex was previously using.\n\nClients that manage multiple accounts/workspaces can use this as a hint to refresh the token for the correct workspace.\n\nThis may be `null` when the prior ID token did not include a workspace identifier (`chatgpt_account_id`) or when the token could not be parsed.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"reason": {
|
||||
"$ref": "#/definitions/ChatgptAuthTokensRefreshReason"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"reason"
|
||||
],
|
||||
"title": "ChatgptAuthTokensRefreshParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"accessToken": {
|
||||
"type": "string"
|
||||
},
|
||||
"idToken": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"accessToken",
|
||||
"idToken"
|
||||
],
|
||||
"title": "ChatgptAuthTokensRefreshResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"initialized"
|
||||
],
|
||||
"title": "InitializedNotificationMethod",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method"
|
||||
],
|
||||
"title": "InitializedNotification",
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"title": "ClientNotification"
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,174 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"CommandAction": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"read"
|
||||
],
|
||||
"title": "ReadCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"name",
|
||||
"path",
|
||||
"type"
|
||||
],
|
||||
"title": "ReadCommandAction",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"listFiles"
|
||||
],
|
||||
"title": "ListFilesCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"type"
|
||||
],
|
||||
"title": "ListFilesCommandAction",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"query": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"search"
|
||||
],
|
||||
"title": "SearchCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"type"
|
||||
],
|
||||
"title": "SearchCommandAction",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"unknown"
|
||||
],
|
||||
"title": "UnknownCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"type"
|
||||
],
|
||||
"title": "UnknownCommandAction",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"command": {
|
||||
"description": "The command to be executed.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"commandActions": {
|
||||
"description": "Best-effort parsed command actions for friendly display.",
|
||||
"items": {
|
||||
"$ref": "#/definitions/CommandAction"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"cwd": {
|
||||
"description": "The command's working directory.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"itemId": {
|
||||
"type": "string"
|
||||
},
|
||||
"proposedExecpolicyAmendment": {
|
||||
"description": "Optional proposed execpolicy amendment to allow similar commands without prompting.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"reason": {
|
||||
"description": "Optional explanatory reason (e.g. request for network access).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"itemId",
|
||||
"threadId",
|
||||
"turnId"
|
||||
],
|
||||
"title": "CommandExecutionRequestApprovalParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"CommandExecutionApprovalDecision": {
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "User approved the command.",
|
||||
"enum": [
|
||||
"accept"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User approved the command and future identical commands should run without prompting.",
|
||||
"enum": [
|
||||
"acceptForSession"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"description": "User approved the command, and wants to apply the proposed execpolicy amendment so future matching commands can run without prompting.",
|
||||
"properties": {
|
||||
"acceptWithExecpolicyAmendment": {
|
||||
"properties": {
|
||||
"execpolicy_amendment": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"execpolicy_amendment"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"acceptWithExecpolicyAmendment"
|
||||
],
|
||||
"title": "AcceptWithExecpolicyAmendmentCommandExecutionApprovalDecision",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "User denied the command. The agent will continue the turn.",
|
||||
"enum": [
|
||||
"decline"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User denied the command. The turn will also be immediately interrupted.",
|
||||
"enum": [
|
||||
"cancel"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"decision": {
|
||||
"$ref": "#/definitions/CommandExecutionApprovalDecision"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"decision"
|
||||
],
|
||||
"title": "CommandExecutionRequestApprovalResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"arguments": true,
|
||||
"callId": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"tool": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"arguments",
|
||||
"callId",
|
||||
"threadId",
|
||||
"tool",
|
||||
"turnId"
|
||||
],
|
||||
"title": "DynamicToolCallParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"output": {
|
||||
"type": "string"
|
||||
},
|
||||
"success": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"output",
|
||||
"success"
|
||||
],
|
||||
"title": "DynamicToolCallResponse",
|
||||
"type": "object"
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,158 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ParsedCommand": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"description": "(Best effort) Path to the file being read by the command. When possible, this is an absolute path, though when relative, it should be resolved against the `cwd`` that will be used to run the command to derive the absolute path.",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"read"
|
||||
],
|
||||
"title": "ReadParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"name",
|
||||
"path",
|
||||
"type"
|
||||
],
|
||||
"title": "ReadParsedCommand",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"list_files"
|
||||
],
|
||||
"title": "ListFilesParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"type"
|
||||
],
|
||||
"title": "ListFilesParsedCommand",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"query": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"search"
|
||||
],
|
||||
"title": "SearchParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"type"
|
||||
],
|
||||
"title": "SearchParsedCommand",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"unknown"
|
||||
],
|
||||
"title": "UnknownParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"type"
|
||||
],
|
||||
"title": "UnknownParsedCommand",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"callId": {
|
||||
"description": "Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent] and [codex_core::protocol::ExecCommandEndEvent].",
|
||||
"type": "string"
|
||||
},
|
||||
"command": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"parsedCmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"reason": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"callId",
|
||||
"command",
|
||||
"conversationId",
|
||||
"cwd",
|
||||
"parsedCmd"
|
||||
],
|
||||
"title": "ExecCommandApprovalParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ReviewDecision": {
|
||||
"description": "User's decision in response to an ExecApprovalRequest.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "User has approved this command and the agent should execute it.",
|
||||
"enum": [
|
||||
"approved"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"description": "User has approved this command and wants to apply the proposed execpolicy amendment so future matching commands are permitted.",
|
||||
"properties": {
|
||||
"approved_execpolicy_amendment": {
|
||||
"properties": {
|
||||
"proposed_execpolicy_amendment": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"proposed_execpolicy_amendment"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"approved_execpolicy_amendment"
|
||||
],
|
||||
"title": "ApprovedExecpolicyAmendmentReviewDecision",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "User has approved this command and wants to automatically approve any future identical instances (`command` and `cwd` match exactly) for the remainder of the session.",
|
||||
"enum": [
|
||||
"approved_for_session"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User has denied this command and the agent should not execute it, but it should continue the session and try something else.",
|
||||
"enum": [
|
||||
"denied"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User has denied this command and the agent should not do anything until the user's next command.",
|
||||
"enum": [
|
||||
"abort"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"decision": {
|
||||
"$ref": "#/definitions/ReviewDecision"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"decision"
|
||||
],
|
||||
"title": "ExecCommandApprovalResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"grantRoot": {
|
||||
"description": "[UNSTABLE] When set, the agent is asking the user to allow writes under this root for the remainder of the session (unclear if this is honored today).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"itemId": {
|
||||
"type": "string"
|
||||
},
|
||||
"reason": {
|
||||
"description": "Optional explanatory reason (e.g. request for extra write access).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"itemId",
|
||||
"threadId",
|
||||
"turnId"
|
||||
],
|
||||
"title": "FileChangeRequestApprovalParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"FileChangeApprovalDecision": {
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "User approved the file changes.",
|
||||
"enum": [
|
||||
"accept"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User approved the file changes and future changes to the same files should run without prompting.",
|
||||
"enum": [
|
||||
"acceptForSession"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User denied the file changes. The agent will continue the turn.",
|
||||
"enum": [
|
||||
"decline"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "User denied the file changes. The turn will also be immediately interrupted.",
|
||||
"enum": [
|
||||
"cancel"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"decision": {
|
||||
"$ref": "#/definitions/FileChangeApprovalDecision"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"decision"
|
||||
],
|
||||
"title": "FileChangeRequestApprovalResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"cancellationToken": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"query": {
|
||||
"type": "string"
|
||||
},
|
||||
"roots": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query",
|
||||
"roots"
|
||||
],
|
||||
"title": "FuzzyFileSearchParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"FuzzyFileSearchResult": {
|
||||
"description": "Superset of [`codex_file_search::FileMatch`]",
|
||||
"properties": {
|
||||
"file_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"indices": {
|
||||
"items": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"root": {
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"file_name",
|
||||
"path",
|
||||
"root",
|
||||
"score"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"files": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchResult"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"files"
|
||||
],
|
||||
"title": "FuzzyFileSearchResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"JSONRPCErrorError": {
|
||||
"properties": {
|
||||
"code": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"data": true,
|
||||
"message": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"code",
|
||||
"message"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RequestId": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"description": "A response to a request that indicates an error occurred.",
|
||||
"properties": {
|
||||
"error": {
|
||||
"$ref": "#/definitions/JSONRPCErrorError"
|
||||
},
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"error",
|
||||
"id"
|
||||
],
|
||||
"title": "JSONRPCError",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"code": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"data": true,
|
||||
"message": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"code",
|
||||
"message"
|
||||
],
|
||||
"title": "JSONRPCErrorError",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/JSONRPCRequest"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/JSONRPCNotification"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/JSONRPCResponse"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/JSONRPCError"
|
||||
}
|
||||
],
|
||||
"definitions": {
|
||||
"JSONRPCError": {
|
||||
"description": "A response to a request that indicates an error occurred.",
|
||||
"properties": {
|
||||
"error": {
|
||||
"$ref": "#/definitions/JSONRPCErrorError"
|
||||
},
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"error",
|
||||
"id"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"JSONRPCErrorError": {
|
||||
"properties": {
|
||||
"code": {
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"data": true,
|
||||
"message": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"code",
|
||||
"message"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"JSONRPCNotification": {
|
||||
"description": "A notification which does not expect a response.",
|
||||
"properties": {
|
||||
"method": {
|
||||
"type": "string"
|
||||
},
|
||||
"params": true
|
||||
},
|
||||
"required": [
|
||||
"method"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"JSONRPCRequest": {
|
||||
"description": "A request that expects a response.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"type": "string"
|
||||
},
|
||||
"params": true
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"JSONRPCResponse": {
|
||||
"description": "A successful (non-error) response to a request.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"result": true
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"result"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"RequestId": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"description": "Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent.",
|
||||
"title": "JSONRPCMessage"
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "A notification which does not expect a response.",
|
||||
"properties": {
|
||||
"method": {
|
||||
"type": "string"
|
||||
},
|
||||
"params": true
|
||||
},
|
||||
"required": [
|
||||
"method"
|
||||
],
|
||||
"title": "JSONRPCNotification",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"RequestId": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"description": "A request that expects a response.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"type": "string"
|
||||
},
|
||||
"params": true
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method"
|
||||
],
|
||||
"title": "JSONRPCRequest",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"RequestId": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"description": "A successful (non-error) response to a request.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"result": true
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"result"
|
||||
],
|
||||
"title": "JSONRPCResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
],
|
||||
"title": "RequestId"
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,792 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ApplyPatchApprovalParams": {
|
||||
"properties": {
|
||||
"callId": {
|
||||
"description": "Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent] and [codex_core::protocol::PatchApplyEndEvent].",
|
||||
"type": "string"
|
||||
},
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"fileChanges": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/FileChange"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"grantRoot": {
|
||||
"description": "When set, the agent is asking the user to allow writes under this root for the remainder of the session (unclear if this is honored today).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"reason": {
|
||||
"description": "Optional explanatory reason (e.g. request for extra write access).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"callId",
|
||||
"conversationId",
|
||||
"fileChanges"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ChatgptAuthTokensRefreshParams": {
|
||||
"properties": {
|
||||
"previousAccountId": {
|
||||
"description": "Workspace/account identifier that Codex was previously using.\n\nClients that manage multiple accounts/workspaces can use this as a hint to refresh the token for the correct workspace.\n\nThis may be `null` when the prior ID token did not include a workspace identifier (`chatgpt_account_id`) or when the token could not be parsed.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"reason": {
|
||||
"$ref": "#/definitions/ChatgptAuthTokensRefreshReason"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"reason"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ChatgptAuthTokensRefreshReason": {
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Codex attempted a backend request and received `401 Unauthorized`.",
|
||||
"enum": [
|
||||
"unauthorized"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"CommandAction": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"read"
|
||||
],
|
||||
"title": "ReadCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"name",
|
||||
"path",
|
||||
"type"
|
||||
],
|
||||
"title": "ReadCommandAction",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"listFiles"
|
||||
],
|
||||
"title": "ListFilesCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"type"
|
||||
],
|
||||
"title": "ListFilesCommandAction",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"query": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"search"
|
||||
],
|
||||
"title": "SearchCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"type"
|
||||
],
|
||||
"title": "SearchCommandAction",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"unknown"
|
||||
],
|
||||
"title": "UnknownCommandActionType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command",
|
||||
"type"
|
||||
],
|
||||
"title": "UnknownCommandAction",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"CommandExecutionRequestApprovalParams": {
|
||||
"properties": {
|
||||
"command": {
|
||||
"description": "The command to be executed.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"commandActions": {
|
||||
"description": "Best-effort parsed command actions for friendly display.",
|
||||
"items": {
|
||||
"$ref": "#/definitions/CommandAction"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"cwd": {
|
||||
"description": "The command's working directory.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"itemId": {
|
||||
"type": "string"
|
||||
},
|
||||
"proposedExecpolicyAmendment": {
|
||||
"description": "Optional proposed execpolicy amendment to allow similar commands without prompting.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"reason": {
|
||||
"description": "Optional explanatory reason (e.g. request for network access).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"itemId",
|
||||
"threadId",
|
||||
"turnId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"DynamicToolCallParams": {
|
||||
"properties": {
|
||||
"arguments": true,
|
||||
"callId": {
|
||||
"type": "string"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"tool": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"arguments",
|
||||
"callId",
|
||||
"threadId",
|
||||
"tool",
|
||||
"turnId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ExecCommandApprovalParams": {
|
||||
"properties": {
|
||||
"callId": {
|
||||
"description": "Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent] and [codex_core::protocol::ExecCommandEndEvent].",
|
||||
"type": "string"
|
||||
},
|
||||
"command": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"parsedCmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"reason": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"callId",
|
||||
"command",
|
||||
"conversationId",
|
||||
"cwd",
|
||||
"parsedCmd"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"FileChange": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"add"
|
||||
],
|
||||
"title": "AddFileChangeType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"content",
|
||||
"type"
|
||||
],
|
||||
"title": "AddFileChange",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"delete"
|
||||
],
|
||||
"title": "DeleteFileChangeType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"content",
|
||||
"type"
|
||||
],
|
||||
"title": "DeleteFileChange",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"move_path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"update"
|
||||
],
|
||||
"title": "UpdateFileChangeType",
|
||||
"type": "string"
|
||||
},
|
||||
"unified_diff": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type",
|
||||
"unified_diff"
|
||||
],
|
||||
"title": "UpdateFileChange",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"FileChangeRequestApprovalParams": {
|
||||
"properties": {
|
||||
"grantRoot": {
|
||||
"description": "[UNSTABLE] When set, the agent is asking the user to allow writes under this root for the remainder of the session (unclear if this is honored today).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"itemId": {
|
||||
"type": "string"
|
||||
},
|
||||
"reason": {
|
||||
"description": "Optional explanatory reason (e.g. request for extra write access).",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"itemId",
|
||||
"threadId",
|
||||
"turnId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ParsedCommand": {
|
||||
"oneOf": [
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"description": "(Best effort) Path to the file being read by the command. When possible, this is an absolute path, though when relative, it should be resolved against the `cwd`` that will be used to run the command to derive the absolute path.",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"read"
|
||||
],
|
||||
"title": "ReadParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"name",
|
||||
"path",
|
||||
"type"
|
||||
],
|
||||
"title": "ReadParsedCommand",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"list_files"
|
||||
],
|
||||
"title": "ListFilesParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"type"
|
||||
],
|
||||
"title": "ListFilesParsedCommand",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"query": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"search"
|
||||
],
|
||||
"title": "SearchParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"type"
|
||||
],
|
||||
"title": "SearchParsedCommand",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"cmd": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"unknown"
|
||||
],
|
||||
"title": "UnknownParsedCommandType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cmd",
|
||||
"type"
|
||||
],
|
||||
"title": "UnknownParsedCommand",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"RequestId": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"ToolRequestUserInputOption": {
|
||||
"description": "EXPERIMENTAL. Defines a single selectable option for request_user_input.",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"label": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"description",
|
||||
"label"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ToolRequestUserInputParams": {
|
||||
"description": "EXPERIMENTAL. Params sent with a request_user_input event.",
|
||||
"properties": {
|
||||
"itemId": {
|
||||
"type": "string"
|
||||
},
|
||||
"questions": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ToolRequestUserInputQuestion"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"itemId",
|
||||
"questions",
|
||||
"threadId",
|
||||
"turnId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ToolRequestUserInputQuestion": {
|
||||
"description": "EXPERIMENTAL. Represents one request_user_input question and its required options.",
|
||||
"properties": {
|
||||
"header": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"isOther": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"isSecret": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"options": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ToolRequestUserInputOption"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"question": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"header",
|
||||
"id",
|
||||
"question"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "Request initiated from the server and sent to the client.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "NEW APIs Sent when approval is requested for a specific command execution. This request is used for Turns started via turn/start.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"item/commandExecution/requestApproval"
|
||||
],
|
||||
"title": "Item/commandExecution/requestApprovalRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/CommandExecutionRequestApprovalParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Item/commandExecution/requestApprovalRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Sent when approval is requested for a specific file change. This request is used for Turns started via turn/start.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"item/fileChange/requestApproval"
|
||||
],
|
||||
"title": "Item/fileChange/requestApprovalRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/FileChangeRequestApprovalParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Item/fileChange/requestApprovalRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "EXPERIMENTAL - Request input from the user for a tool call.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"item/tool/requestUserInput"
|
||||
],
|
||||
"title": "Item/tool/requestUserInputRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ToolRequestUserInputParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Item/tool/requestUserInputRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Execute a dynamic tool call on the client.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"item/tool/call"
|
||||
],
|
||||
"title": "Item/tool/callRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/DynamicToolCallParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Item/tool/callRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"account/chatgptAuthTokens/refresh"
|
||||
],
|
||||
"title": "Account/chatgptAuthTokens/refreshRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ChatgptAuthTokensRefreshParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "Account/chatgptAuthTokens/refreshRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"applyPatchApproval"
|
||||
],
|
||||
"title": "ApplyPatchApprovalRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ApplyPatchApprovalParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "ApplyPatchApprovalRequest",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Request to exec a command. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
|
||||
"properties": {
|
||||
"id": {
|
||||
"$ref": "#/definitions/RequestId"
|
||||
},
|
||||
"method": {
|
||||
"enum": [
|
||||
"execCommandApproval"
|
||||
],
|
||||
"title": "ExecCommandApprovalRequestMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/ExecCommandApprovalParams"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "ExecCommandApprovalRequest",
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"title": "ServerRequest"
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ToolRequestUserInputOption": {
|
||||
"description": "EXPERIMENTAL. Defines a single selectable option for request_user_input.",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"label": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"description",
|
||||
"label"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ToolRequestUserInputQuestion": {
|
||||
"description": "EXPERIMENTAL. Represents one request_user_input question and its required options.",
|
||||
"properties": {
|
||||
"header": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"isOther": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"isSecret": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"options": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ToolRequestUserInputOption"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"question": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"header",
|
||||
"id",
|
||||
"question"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "EXPERIMENTAL. Params sent with a request_user_input event.",
|
||||
"properties": {
|
||||
"itemId": {
|
||||
"type": "string"
|
||||
},
|
||||
"questions": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ToolRequestUserInputQuestion"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"threadId": {
|
||||
"type": "string"
|
||||
},
|
||||
"turnId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"itemId",
|
||||
"questions",
|
||||
"threadId",
|
||||
"turnId"
|
||||
],
|
||||
"title": "ToolRequestUserInputParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ToolRequestUserInputAnswer": {
|
||||
"description": "EXPERIMENTAL. Captures a user's answer to a request_user_input question.",
|
||||
"properties": {
|
||||
"answers": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"answers"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"description": "EXPERIMENTAL. Response payload mapping question ids to answers.",
|
||||
"properties": {
|
||||
"answers": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/ToolRequestUserInputAnswer"
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"answers"
|
||||
],
|
||||
"title": "ToolRequestUserInputResponse",
|
||||
"type": "object"
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"experimentalRawEvents": {
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"conversationId"
|
||||
],
|
||||
"title": "AddConversationListenerParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"subscriptionId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"subscriptionId"
|
||||
],
|
||||
"title": "AddConversationSubscriptionResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"rolloutPath": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"conversationId",
|
||||
"rolloutPath"
|
||||
],
|
||||
"title": "ArchiveConversationParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "ArchiveConversationResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"AuthMode": {
|
||||
"description": "Authentication mode for OpenAI-backed providers.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "OpenAI API key provided by the caller and stored by Codex.",
|
||||
"enum": [
|
||||
"apikey"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "ChatGPT OAuth managed by Codex (tokens persisted and refreshed by Codex).",
|
||||
"enum": [
|
||||
"chatgpt"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "[UNSTABLE] FOR OPENAI INTERNAL USE ONLY - DO NOT USE.\n\nChatGPT auth tokens are supplied by an external host app and are only stored in memory. Token refresh must be handled by the external host app.",
|
||||
"enum": [
|
||||
"chatgptAuthTokens"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"description": "Deprecated notification. Use AccountUpdatedNotification instead.",
|
||||
"properties": {
|
||||
"authMethod": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/AuthMode"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "AuthStatusChangeNotification",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"loginId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"loginId"
|
||||
],
|
||||
"title": "CancelLoginChatGptParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "CancelLoginChatGptResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"AbsolutePathBuf": {
|
||||
"description": "A path that is guaranteed to be absolute and normalized (though it is not guaranteed to be canonicalized or exist on the filesystem).\n\nIMPORTANT: When deserializing an `AbsolutePathBuf`, a base path must be set using [AbsolutePathBufGuard::new]. If no base path is set, the deserialization will fail unless the path being deserialized is already absolute.",
|
||||
"type": "string"
|
||||
},
|
||||
"NetworkAccess": {
|
||||
"description": "Represents whether outbound network access is available to the agent.",
|
||||
"enum": [
|
||||
"restricted",
|
||||
"enabled"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SandboxPolicy": {
|
||||
"description": "Determines execution restrictions for model shell commands.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "No restrictions whatsoever. Use with caution.",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"danger-full-access"
|
||||
],
|
||||
"title": "DangerFullAccessSandboxPolicyType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "DangerFullAccessSandboxPolicy",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Read-only access to the entire file-system.",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"read-only"
|
||||
],
|
||||
"title": "ReadOnlySandboxPolicyType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "ReadOnlySandboxPolicy",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Indicates the process is already in an external sandbox. Allows full disk access while honoring the provided network setting.",
|
||||
"properties": {
|
||||
"network_access": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkAccess"
|
||||
}
|
||||
],
|
||||
"default": "restricted",
|
||||
"description": "Whether the external sandbox permits outbound network traffic."
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"external-sandbox"
|
||||
],
|
||||
"title": "ExternalSandboxSandboxPolicyType",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "ExternalSandboxSandboxPolicy",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Same as `ReadOnly` but additionally grants write access to the current working directory (\"workspace\").",
|
||||
"properties": {
|
||||
"exclude_slash_tmp": {
|
||||
"default": false,
|
||||
"description": "When set to `true`, will NOT include the `/tmp` among the default writable roots on UNIX. Defaults to `false`.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"exclude_tmpdir_env_var": {
|
||||
"default": false,
|
||||
"description": "When set to `true`, will NOT include the per-user `TMPDIR` environment variable among the default writable roots. Defaults to `false`.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"network_access": {
|
||||
"default": false,
|
||||
"description": "When set to `true`, outbound network access is allowed. `false` by default.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"workspace-write"
|
||||
],
|
||||
"title": "WorkspaceWriteSandboxPolicyType",
|
||||
"type": "string"
|
||||
},
|
||||
"writable_roots": {
|
||||
"description": "Additional folders (beyond cwd and possibly TMPDIR) that should be writable from within the sandbox.",
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"title": "WorkspaceWriteSandboxPolicy",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"command": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"cwd": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"sandboxPolicy": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SandboxPolicy"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"timeoutMs": {
|
||||
"format": "uint64",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"command"
|
||||
],
|
||||
"title": "ExecOneOffCommandParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"exitCode": {
|
||||
"format": "int32",
|
||||
"type": "integer"
|
||||
},
|
||||
"stderr": {
|
||||
"type": "string"
|
||||
},
|
||||
"stdout": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"exitCode",
|
||||
"stderr",
|
||||
"stdout"
|
||||
],
|
||||
"title": "ExecOneOffCommandResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"AskForApproval": {
|
||||
"description": "Determines the conditions under which the user is consulted to approve running the command proposed by Codex.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Under this policy, only \"known safe\" commands—as determined by `is_safe_command()`—that **only read files** are auto‑approved. Everything else will ask the user to approve.",
|
||||
"enum": [
|
||||
"untrusted"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "The model decides when to ask the user for approval.",
|
||||
"enum": [
|
||||
"on-request"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Never ask the user to approve commands. Failures are immediately returned to the model, and never escalated to the user for approval.",
|
||||
"enum": [
|
||||
"never"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"NewConversationParams": {
|
||||
"properties": {
|
||||
"approvalPolicy": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/AskForApproval"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"baseInstructions": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"compactPrompt": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"additionalProperties": true,
|
||||
"type": [
|
||||
"object",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"cwd": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"developerInstructions": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"includeApplyPatchTool": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"model": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"modelProvider": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"profile": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"sandbox": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SandboxMode"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"SandboxMode": {
|
||||
"enum": [
|
||||
"read-only",
|
||||
"workspace-write",
|
||||
"danger-full-access"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"conversationId": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NewConversationParams"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"path": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "ForkConversationParams",
|
||||
"type": "object"
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"includeToken": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"refreshToken": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "GetAuthStatusParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"AuthMode": {
|
||||
"description": "Authentication mode for OpenAI-backed providers.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "OpenAI API key provided by the caller and stored by Codex.",
|
||||
"enum": [
|
||||
"apikey"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "ChatGPT OAuth managed by Codex (tokens persisted and refreshed by Codex).",
|
||||
"enum": [
|
||||
"chatgpt"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "[UNSTABLE] FOR OPENAI INTERNAL USE ONLY - DO NOT USE.\n\nChatGPT auth tokens are supplied by an external host app and are only stored in memory. Token refresh must be handled by the external host app.",
|
||||
"enum": [
|
||||
"chatgptAuthTokens"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"authMethod": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/AuthMode"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"authToken": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"requiresOpenaiAuth": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "GetAuthStatusResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"anyOf": [
|
||||
{
|
||||
"properties": {
|
||||
"rolloutPath": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"rolloutPath"
|
||||
],
|
||||
"title": "RolloutPathv1::GetConversationSummaryParams",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"conversationId"
|
||||
],
|
||||
"title": "ConversationIdv1::GetConversationSummaryParams",
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"definitions": {
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"title": "GetConversationSummaryParams"
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ConversationGitInfo": {
|
||||
"properties": {
|
||||
"branch": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"origin_url": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"sha": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"ConversationSummary": {
|
||||
"properties": {
|
||||
"cliVersion": {
|
||||
"type": "string"
|
||||
},
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"gitInfo": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ConversationGitInfo"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"preview": {
|
||||
"type": "string"
|
||||
},
|
||||
"source": {
|
||||
"$ref": "#/definitions/SessionSource"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cliVersion",
|
||||
"conversationId",
|
||||
"cwd",
|
||||
"modelProvider",
|
||||
"path",
|
||||
"preview",
|
||||
"source"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SessionSource": {
|
||||
"oneOf": [
|
||||
{
|
||||
"enum": [
|
||||
"cli",
|
||||
"vscode",
|
||||
"exec",
|
||||
"mcp",
|
||||
"unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"subagent": {
|
||||
"$ref": "#/definitions/SubAgentSource"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"subagent"
|
||||
],
|
||||
"title": "SubagentSessionSource",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"SubAgentSource": {
|
||||
"oneOf": [
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"thread_spawn": {
|
||||
"properties": {
|
||||
"depth": {
|
||||
"format": "int32",
|
||||
"type": "integer"
|
||||
},
|
||||
"parent_thread_id": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"depth",
|
||||
"parent_thread_id"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"thread_spawn"
|
||||
],
|
||||
"title": "ThreadSpawnSubAgentSource",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"other": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"other"
|
||||
],
|
||||
"title": "OtherSubAgentSource",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"summary": {
|
||||
"$ref": "#/definitions/ConversationSummary"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"summary"
|
||||
],
|
||||
"title": "GetConversationSummaryResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"userAgent": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"userAgent"
|
||||
],
|
||||
"title": "GetUserAgentResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,330 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"AbsolutePathBuf": {
|
||||
"description": "A path that is guaranteed to be absolute and normalized (though it is not guaranteed to be canonicalized or exist on the filesystem).\n\nIMPORTANT: When deserializing an `AbsolutePathBuf`, a base path must be set using [AbsolutePathBufGuard::new]. If no base path is set, the deserialization will fail unless the path being deserialized is already absolute.",
|
||||
"type": "string"
|
||||
},
|
||||
"AskForApproval": {
|
||||
"description": "Determines the conditions under which the user is consulted to approve running the command proposed by Codex.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Under this policy, only \"known safe\" commands—as determined by `is_safe_command()`—that **only read files** are auto‑approved. Everything else will ask the user to approve.",
|
||||
"enum": [
|
||||
"untrusted"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "The model decides when to ask the user for approval.",
|
||||
"enum": [
|
||||
"on-request"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Never ask the user to approve commands. Failures are immediately returned to the model, and never escalated to the user for approval.",
|
||||
"enum": [
|
||||
"never"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ForcedLoginMethod": {
|
||||
"enum": [
|
||||
"chatgpt",
|
||||
"api"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"Profile": {
|
||||
"properties": {
|
||||
"approvalPolicy": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/AskForApproval"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"chatgptBaseUrl": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"model": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"modelProvider": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"modelReasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReasoningEffort"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"modelReasoningSummary": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReasoningSummary"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"modelVerbosity": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Verbosity"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"ReasoningEffort": {
|
||||
"description": "See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#get-started-with-reasoning",
|
||||
"enum": [
|
||||
"none",
|
||||
"minimal",
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"xhigh"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ReasoningSummary": {
|
||||
"description": "A summary of the reasoning performed by the model. This can be useful for debugging and understanding the model's reasoning process. See https://platform.openai.com/docs/guides/reasoning?api-mode=responses#reasoning-summaries",
|
||||
"oneOf": [
|
||||
{
|
||||
"enum": [
|
||||
"auto",
|
||||
"concise",
|
||||
"detailed"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Option to disable reasoning summaries.",
|
||||
"enum": [
|
||||
"none"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"SandboxMode": {
|
||||
"enum": [
|
||||
"read-only",
|
||||
"workspace-write",
|
||||
"danger-full-access"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"SandboxSettings": {
|
||||
"properties": {
|
||||
"excludeSlashTmp": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"excludeTmpdirEnvVar": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"networkAccess": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"writableRoots": {
|
||||
"default": [],
|
||||
"items": {
|
||||
"$ref": "#/definitions/AbsolutePathBuf"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"Tools": {
|
||||
"properties": {
|
||||
"viewImage": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"webSearch": {
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"UserSavedConfig": {
|
||||
"properties": {
|
||||
"approvalPolicy": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/AskForApproval"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"forcedChatgptWorkspaceId": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"forcedLoginMethod": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ForcedLoginMethod"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"model": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"modelReasoningEffort": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReasoningEffort"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"modelReasoningSummary": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ReasoningSummary"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"modelVerbosity": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Verbosity"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"profile": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"profiles": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/definitions/Profile"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"sandboxMode": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SandboxMode"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"sandboxSettings": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/SandboxSettings"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tools": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Tools"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"profiles"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"Verbosity": {
|
||||
"description": "Controls output length/detail on GPT-5 models via the Responses API. Serialized with lowercase values to match the OpenAI API.",
|
||||
"enum": [
|
||||
"low",
|
||||
"medium",
|
||||
"high"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"config": {
|
||||
"$ref": "#/definitions/UserSavedConfig"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"config"
|
||||
],
|
||||
"title": "GetUserSavedConfigResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cwd"
|
||||
],
|
||||
"title": "GitDiffToRemoteParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"GitSha": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"diff": {
|
||||
"type": "string"
|
||||
},
|
||||
"sha": {
|
||||
"$ref": "#/definitions/GitSha"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"diff",
|
||||
"sha"
|
||||
],
|
||||
"title": "GitDiffToRemoteResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ClientInfo": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"version"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"clientInfo": {
|
||||
"$ref": "#/definitions/ClientInfo"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"clientInfo"
|
||||
],
|
||||
"title": "InitializeParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"userAgent": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"userAgent"
|
||||
],
|
||||
"title": "InitializeResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"conversationId"
|
||||
],
|
||||
"title": "InterruptConversationParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"TurnAbortReason": {
|
||||
"enum": [
|
||||
"interrupted",
|
||||
"replaced",
|
||||
"review_ended"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"abortReason": {
|
||||
"$ref": "#/definitions/TurnAbortReason"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"abortReason"
|
||||
],
|
||||
"title": "InterruptConversationResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"cursor": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"modelProviders": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"pageSize": {
|
||||
"format": "uint",
|
||||
"minimum": 0.0,
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"title": "ListConversationsParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,184 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"ConversationGitInfo": {
|
||||
"properties": {
|
||||
"branch": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"origin_url": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"sha": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"ConversationSummary": {
|
||||
"properties": {
|
||||
"cliVersion": {
|
||||
"type": "string"
|
||||
},
|
||||
"conversationId": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
},
|
||||
"cwd": {
|
||||
"type": "string"
|
||||
},
|
||||
"gitInfo": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ConversationGitInfo"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"modelProvider": {
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"preview": {
|
||||
"type": "string"
|
||||
},
|
||||
"source": {
|
||||
"$ref": "#/definitions/SessionSource"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cliVersion",
|
||||
"conversationId",
|
||||
"cwd",
|
||||
"modelProvider",
|
||||
"path",
|
||||
"preview",
|
||||
"source"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SessionSource": {
|
||||
"oneOf": [
|
||||
{
|
||||
"enum": [
|
||||
"cli",
|
||||
"vscode",
|
||||
"exec",
|
||||
"mcp",
|
||||
"unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"subagent": {
|
||||
"$ref": "#/definitions/SubAgentSource"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"subagent"
|
||||
],
|
||||
"title": "SubagentSessionSource",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"SubAgentSource": {
|
||||
"oneOf": [
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"thread_spawn": {
|
||||
"properties": {
|
||||
"depth": {
|
||||
"format": "int32",
|
||||
"type": "integer"
|
||||
},
|
||||
"parent_thread_id": {
|
||||
"$ref": "#/definitions/ThreadId"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"depth",
|
||||
"parent_thread_id"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"thread_spawn"
|
||||
],
|
||||
"title": "ThreadSpawnSubAgentSource",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"other": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"other"
|
||||
],
|
||||
"title": "OtherSubAgentSource",
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ThreadId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"items": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ConversationSummary"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"nextCursor": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"items"
|
||||
],
|
||||
"title": "ListConversationsResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"apiKey": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"apiKey"
|
||||
],
|
||||
"title": "LoginApiKeyParams",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "LoginApiKeyResponse",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"description": "Deprecated in favor of AccountLoginCompletedNotification.",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"loginId": {
|
||||
"type": "string"
|
||||
},
|
||||
"success": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"loginId",
|
||||
"success"
|
||||
],
|
||||
"title": "LoginChatGptCompleteNotification",
|
||||
"type": "object"
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user