mirror of
https://github.com/openai/codex.git
synced 2026-02-03 07:23:39 +00:00
Compare commits
35 Commits
pr8959
...
queue/stee
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2dfd05b6c2 | ||
|
|
3c20ed8900 | ||
|
|
8ce2488dc2 | ||
|
|
1b26719958 | ||
|
|
6a57d7980b | ||
|
|
198289934f | ||
|
|
6709ad8975 | ||
|
|
cf515142b0 | ||
|
|
74b2238931 | ||
|
|
cc0b5e8504 | ||
|
|
8e49a2c0d1 | ||
|
|
af1ed2685e | ||
|
|
1a0e2e612b | ||
|
|
acfd94f625 | ||
|
|
cabf85aa18 | ||
|
|
bc284669c2 | ||
|
|
fbe883318d | ||
|
|
2a06d64bc9 | ||
|
|
7daaabc795 | ||
|
|
1aed01e99f | ||
|
|
ed64804cb5 | ||
|
|
5c380d5b1e | ||
|
|
46b0c4acbb | ||
|
|
5b5a5b92b5 | ||
|
|
ea56186c2b | ||
|
|
cacdae8c05 | ||
|
|
bc92dc5cf0 | ||
|
|
7e5b3e069e | ||
|
|
e2e3f4490e | ||
|
|
225614d7fb | ||
|
|
16c66c37eb | ||
|
|
e9c548c65e | ||
|
|
fceae86581 | ||
|
|
568b938c80 | ||
|
|
24d6e0114f |
3
.bazelignore
Normal file
3
.bazelignore
Normal file
@@ -0,0 +1,3 @@
|
||||
# Without this, Bazel will consider BUILD.bazel files in
|
||||
# .git/sl/origbackups (which can be populated by Sapling SCM).
|
||||
.git
|
||||
45
.bazelrc
Normal file
45
.bazelrc
Normal file
@@ -0,0 +1,45 @@
|
||||
common --repo_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
|
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
|
||||
|
||||
common --disk_cache=~/.cache/bazel-disk-cache
|
||||
common --repo_contents_cache=~/.cache/bazel-repo-contents-cache
|
||||
common --repository_cache=~/.cache/bazel-repo-cache
|
||||
|
||||
common --experimental_platform_in_output_dir
|
||||
|
||||
common --enable_platform_specific_config
|
||||
# TODO(zbarsky): We need to untangle these libc constraints to get linux remote builds working.
|
||||
common:linux --host_platform=//:local
|
||||
common --@rules_cc//cc/toolchains/args/archiver_flags:use_libtool_on_macos=False
|
||||
common --@toolchains_llvm_bootstrapped//config:experimental_stub_libgcc_s
|
||||
|
||||
# We need to use the sh toolchain on windows so we don't send host bash paths to the linux executor.
|
||||
common:windows --@rules_rust//rust/settings:experimental_use_sh_toolchain_for_bootstrap_process_wrapper
|
||||
|
||||
# TODO(zbarsky): rules_rust doesn't implement this flag properly with remote exec...
|
||||
# common --@rules_rust//rust/settings:pipelined_compilation
|
||||
|
||||
common --incompatible_strict_action_env
|
||||
# Not ideal, but We need to allow dotslash to be found
|
||||
common --test_env=PATH=/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
|
||||
|
||||
common --test_output=errors
|
||||
common --bes_results_url=https://app.buildbuddy.io/invocation/
|
||||
common --bes_backend=grpcs://remote.buildbuddy.io
|
||||
common --remote_cache=grpcs://remote.buildbuddy.io
|
||||
common --remote_download_toplevel
|
||||
common --nobuild_runfile_links
|
||||
common --remote_timeout=3600
|
||||
common --noexperimental_throttle_remote_action_building
|
||||
common --experimental_remote_execution_keepalive
|
||||
common --grpc_keepalive_time=30s
|
||||
|
||||
# This limits both in-flight executions and concurrent downloads. Even with high number
|
||||
# of jobs execution will still be limited by CPU cores, so this just pays a bit of
|
||||
# memory in exchange for higher download concurrency.
|
||||
common --jobs=30
|
||||
|
||||
common:remote --extra_execution_platforms=//:rbe
|
||||
common:remote --remote_executor=grpcs://remote.buildbuddy.io
|
||||
common:remote --jobs=800
|
||||
|
||||
20
.github/workflows/Dockerfile.bazel
vendored
Normal file
20
.github/workflows/Dockerfile.bazel
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
FROM ubuntu:24.04
|
||||
|
||||
# TODO(mbolin): Published to docker.io/mbolin491/codex-bazel:latest for
|
||||
# initial debugging, but we should publish to a more proper location.
|
||||
#
|
||||
# docker buildx create --use
|
||||
# docker buildx build --platform linux/amd64,linux/arm64 -f .github/workflows/Dockerfile.bazel -t mbolin491/codex-bazel:latest --push .
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
curl git python3 ca-certificates && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install dotslash.
|
||||
RUN curl -LSfs "https://github.com/facebook/dotslash/releases/download/v0.5.8/dotslash-ubuntu-22.04.$(uname -m).tar.gz" | tar fxz - -C /usr/local/bin
|
||||
|
||||
# Ubuntu 24.04 ships with user 'ubuntu' already created with UID 1000.
|
||||
USER ubuntu
|
||||
|
||||
WORKDIR /workspace
|
||||
110
.github/workflows/bazel.yml
vendored
Normal file
110
.github/workflows/bazel.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Bazel (experimental)
|
||||
|
||||
# Note this workflow was originally derived from:
|
||||
# https://github.com/cerisier/toolchains_llvm_bootstrapped/blob/main/.github/workflows/ci.yaml
|
||||
|
||||
on:
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Cancel previous actions from the same PR or branch except 'main' branch.
|
||||
# See https://docs.github.com/en/actions/using-jobs/using-concurrency and https://docs.github.com/en/actions/learn-github-actions/contexts for more info.
|
||||
group: concurrency-group::${{ github.workflow }}::${{ github.event.pull_request.number > 0 && format('pr-{0}', github.event.pull_request.number) || github.ref_name }}${{ github.ref_name == 'main' && format('::{0}', github.run_id) || ''}}
|
||||
cancel-in-progress: ${{ github.ref_name != 'main' }}
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# macOS
|
||||
- os: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
- os: macos-15-xlarge
|
||||
target: x86_64-apple-darwin
|
||||
|
||||
# Linux
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- os: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
- os: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
# TODO: Enable Windows once we fix the toolchain issues there.
|
||||
#- os: windows-latest
|
||||
# target: x86_64-pc-windows-gnullvm
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Configure a human readable name for each job
|
||||
name: Local Bazel build on ${{ matrix.os }} for ${{ matrix.target }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
# Some integration tests rely on DotSlash being installed.
|
||||
# See https://github.com/openai/codex/pull/7617.
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Make DotSlash available in PATH (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: cp "$(which dotslash)" /usr/local/bin
|
||||
|
||||
- name: Make DotSlash available in PATH (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: Copy-Item (Get-Command dotslash).Source -Destination "$env:LOCALAPPDATA\Microsoft\WindowsApps\dotslash.exe"
|
||||
|
||||
# Install Bazel via Bazelisk
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@v3
|
||||
|
||||
# TODO(mbolin): Bring this back once we have caching working. Currently,
|
||||
# we never seem to get a cache hit but we still end up paying the cost of
|
||||
# uploading at the end of the build, which takes over a minute!
|
||||
#
|
||||
# Cache build and external artifacts so that the next ci build is incremental.
|
||||
# Because github action caches cannot be updated after a build, we need to
|
||||
# store the contents of each build in a unique cache key, then fall back to loading
|
||||
# it on the next ci run. We use hashFiles(...) in the key and restore-keys- with
|
||||
# the prefix to load the most recent cache for the branch on a cache miss. You
|
||||
# should customize the contents of hashFiles to capture any bazel input sources,
|
||||
# although this doesn't need to be perfect. If none of the input sources change
|
||||
# then a cache hit will load an existing cache and bazel won't have to do any work.
|
||||
# In the case of a cache miss, you want the fallback cache to contain most of the
|
||||
# previously built artifacts to minimize build time. The more precise you are with
|
||||
# hashFiles sources the less work bazel will have to do.
|
||||
# - name: Mount bazel caches
|
||||
# uses: actions/cache@v4
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.cache/bazel-repo-cache
|
||||
# ~/.cache/bazel-repo-contents-cache
|
||||
# key: bazel-cache-${{ matrix.os }}-${{ hashFiles('**/BUILD.bazel', '**/*.bzl', 'MODULE.bazel') }}
|
||||
# restore-keys: |
|
||||
# bazel-cache-${{ matrix.os }}
|
||||
|
||||
- name: Configure Bazel startup args (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Use a very short path to reduce argv/path length issues.
|
||||
"BAZEL_STARTUP_ARGS=--output_user_root=C:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: bazel test //...
|
||||
env:
|
||||
BUILDBUDDY_API_KEY: ${{ secrets.BUILDBUDDY_API_KEY }}
|
||||
shell: bash
|
||||
run: |
|
||||
bazel $BAZEL_STARTUP_ARGS --bazelrc=.github/workflows/ci.bazelrc test //... \
|
||||
--build_metadata=REPO_URL=https://github.com/openai/codex.git \
|
||||
--build_metadata=COMMIT_SHA=$(git rev-parse HEAD) \
|
||||
--build_metadata=ROLE=CI \
|
||||
--build_metadata=VISIBILITY=PUBLIC \
|
||||
"--remote_header=x-buildbuddy-api-key=$BUILDBUDDY_API_KEY"
|
||||
20
.github/workflows/ci.bazelrc
vendored
Normal file
20
.github/workflows/ci.bazelrc
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
common --remote_download_minimal
|
||||
common --nobuild_runfile_links
|
||||
common --keep_going
|
||||
|
||||
# We prefer to run the build actions entirely remotely so we can dial up the concurrency.
|
||||
# We have platform-specific tests, so we want to execute the tests on all platforms using the strongest sandboxing available on each platform.
|
||||
|
||||
# On linux, we can do a full remote build/test, by targeting the right (x86/arm) runners, so we have coverage of both.
|
||||
# Linux crossbuilds don't work until we untangle the libc constraint mess.
|
||||
common:linux --config=remote
|
||||
common:linux --strategy=remote
|
||||
common:linux --platforms=//:rbe
|
||||
|
||||
# On mac, we can run all the build actions remotely but test actions locally.
|
||||
common:macos --config=remote
|
||||
common:macos --strategy=remote
|
||||
common:macos --strategy=TestRunner=darwin-sandbox,local
|
||||
|
||||
common:windows --strategy=TestRunner=local
|
||||
|
||||
19
BUILD.bazel
Normal file
19
BUILD.bazel
Normal file
@@ -0,0 +1,19 @@
|
||||
# We mark the local platform as glibc-compatible so that rust can grab a toolchain for us.
|
||||
# TODO(zbarsky): Upstream a better libc constraint into rules_rust.
|
||||
# We only enable this on linux though for sanity, and because it breaks remote execution.
|
||||
platform(
|
||||
name = "local",
|
||||
constraint_values = [
|
||||
"@toolchains_llvm_bootstrapped//constraints/libc:gnu.2.28",
|
||||
],
|
||||
parents = [
|
||||
"@platforms//host",
|
||||
],
|
||||
)
|
||||
|
||||
alias(
|
||||
name = "rbe",
|
||||
actual = "@rbe_platform",
|
||||
)
|
||||
|
||||
exports_files(["AGENTS.md"])
|
||||
128
MODULE.bazel
Normal file
128
MODULE.bazel
Normal file
@@ -0,0 +1,128 @@
|
||||
bazel_dep(name = "platforms", version = "1.0.0")
|
||||
bazel_dep(name = "toolchains_llvm_bootstrapped", version = "0.3.1")
|
||||
archive_override(
|
||||
module_name = "toolchains_llvm_bootstrapped",
|
||||
integrity = "sha256-9ks21bgEqbQWmwUIvqeLA64+Jk6o4ZVjC8KxjVa2Vw8=",
|
||||
strip_prefix = "toolchains_llvm_bootstrapped-e3775e66a7b6d287c705ca0cd24497ef4a77c503",
|
||||
urls = ["https://github.com/cerisier/toolchains_llvm_bootstrapped/archive/e3775e66a7b6d287c705ca0cd24497ef4a77c503/master.tar.gz"],
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:llvm_toolchain_archive_params.patch",
|
||||
],
|
||||
)
|
||||
|
||||
osx = use_extension("@toolchains_llvm_bootstrapped//toolchain/extension:osx.bzl", "osx")
|
||||
osx.framework(name = "ApplicationServices")
|
||||
osx.framework(name = "AppKit")
|
||||
osx.framework(name = "ColorSync")
|
||||
osx.framework(name = "CoreFoundation")
|
||||
osx.framework(name = "CoreGraphics")
|
||||
osx.framework(name = "CoreServices")
|
||||
osx.framework(name = "CoreText")
|
||||
osx.framework(name = "CFNetwork")
|
||||
osx.framework(name = "Foundation")
|
||||
osx.framework(name = "ImageIO")
|
||||
osx.framework(name = "Kernel")
|
||||
osx.framework(name = "OSLog")
|
||||
osx.framework(name = "Security")
|
||||
osx.framework(name = "SystemConfiguration")
|
||||
|
||||
register_toolchains(
|
||||
"@toolchains_llvm_bootstrapped//toolchain:all",
|
||||
)
|
||||
|
||||
bazel_dep(name = "rules_cc", version = "0.2.16")
|
||||
bazel_dep(name = "rules_platform", version = "0.1.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.68.1")
|
||||
single_version_override(
|
||||
module_name = "rules_rust",
|
||||
patch_strip = 1,
|
||||
patches = [
|
||||
"//patches:rules_rust.patch",
|
||||
"//patches:rules_rust_windows_gnu.patch",
|
||||
"//patches:rules_rust_musl.patch",
|
||||
],
|
||||
)
|
||||
|
||||
RUST_TRIPLES = [
|
||||
"aarch64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-gnullvm",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-gnullvm",
|
||||
]
|
||||
|
||||
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
edition = "2024",
|
||||
extra_target_triples = RUST_TRIPLES,
|
||||
versions = ["1.90.0"],
|
||||
)
|
||||
use_repo(rust, "rust_toolchains")
|
||||
|
||||
register_toolchains("@rust_toolchains//:all")
|
||||
|
||||
bazel_dep(name = "rules_rs", version = "0.0.23")
|
||||
|
||||
crate = use_extension("@rules_rs//rs:extensions.bzl", "crate")
|
||||
crate.from_cargo(
|
||||
cargo_lock = "//codex-rs:Cargo.lock",
|
||||
cargo_toml = "//codex-rs:Cargo.toml",
|
||||
platform_triples = RUST_TRIPLES,
|
||||
)
|
||||
|
||||
bazel_dep(name = "openssl", version = "3.5.4.bcr.0")
|
||||
|
||||
crate.annotation(
|
||||
build_script_data = [
|
||||
"@openssl//:gen_dir",
|
||||
],
|
||||
build_script_env = {
|
||||
"OPENSSL_DIR": "$(execpath @openssl//:gen_dir)",
|
||||
"OPENSSL_NO_VENDOR": "1",
|
||||
"OPENSSL_STATIC": "1",
|
||||
},
|
||||
crate = "openssl-sys",
|
||||
data = ["@openssl//:gen_dir"],
|
||||
)
|
||||
|
||||
inject_repo(crate, "openssl")
|
||||
|
||||
# Fix readme inclusions
|
||||
crate.annotation(
|
||||
crate = "windows-link",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//patches:windows-link.patch"
|
||||
],
|
||||
)
|
||||
|
||||
WINDOWS_IMPORT_LIB = """
|
||||
load("@rules_cc//cc:defs.bzl", "cc_import")
|
||||
|
||||
cc_import(
|
||||
name = "windows_import_lib",
|
||||
static_library = glob(["lib/*.a"])[0],
|
||||
)
|
||||
"""
|
||||
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_x86_64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
crate.annotation(
|
||||
additive_build_file_content = WINDOWS_IMPORT_LIB,
|
||||
crate = "windows_aarch64_gnullvm",
|
||||
gen_build_script = "off",
|
||||
deps = [":windows_import_lib"],
|
||||
)
|
||||
use_repo(crate, "crates")
|
||||
|
||||
rbe_platform_repository = use_repo_rule("//:rbe.bzl", "rbe_platform_repository")
|
||||
|
||||
rbe_platform_repository(
|
||||
name = "rbe_platform",
|
||||
)
|
||||
1097
MODULE.bazel.lock
generated
Normal file
1097
MODULE.bazel.lock
generated
Normal file
File diff suppressed because one or more lines are too long
@@ -10,6 +10,7 @@ from_date = "2024-10-01"
|
||||
to_date = "2024-10-15"
|
||||
target_app = "cli"
|
||||
|
||||
# Test announcement only for local build version until 2026-01-10 excluded (past)
|
||||
[[announcements]]
|
||||
content = "This is a test announcement"
|
||||
version_regex = "^0\\.0\\.0$"
|
||||
|
||||
@@ -95,7 +95,6 @@ function detectPackageManager() {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
|
||||
if (
|
||||
__dirname.includes(".bun/install/global") ||
|
||||
__dirname.includes(".bun\\install\\global")
|
||||
|
||||
1
codex-rs/BUILD.bazel
Normal file
1
codex-rs/BUILD.bazel
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
6
codex-rs/ansi-escape/BUILD.bazel
Normal file
6
codex-rs/ansi-escape/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "ansi-escape",
|
||||
crate_name = "codex_ansi_escape",
|
||||
)
|
||||
6
codex-rs/app-server-protocol/BUILD.bazel
Normal file
6
codex-rs/app-server-protocol/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server-protocol",
|
||||
crate_name = "codex_app_server_protocol",
|
||||
)
|
||||
6
codex-rs/app-server-test-client/BUILD.bazel
Normal file
6
codex-rs/app-server-test-client/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "codex-app-server-test-client",
|
||||
crate_name = "codex_app_server_test_client",
|
||||
)
|
||||
@@ -545,7 +545,7 @@ impl CodexClient {
|
||||
print!("{}", event.delta);
|
||||
std::io::stdout().flush().ok();
|
||||
}
|
||||
EventMsg::TaskComplete(event) => {
|
||||
EventMsg::TurnComplete(event) => {
|
||||
println!("\n[task complete: {event:?}]");
|
||||
break;
|
||||
}
|
||||
|
||||
8
codex-rs/app-server/BUILD.bazel
Normal file
8
codex-rs/app-server/BUILD.bazel
Normal file
@@ -0,0 +1,8 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "app-server",
|
||||
crate_name = "codex_app_server",
|
||||
integration_deps_extra = ["//codex-rs/app-server/tests/common:common"],
|
||||
test_tags = ["no-sandbox"],
|
||||
)
|
||||
@@ -52,6 +52,10 @@ Clients must send a single `initialize` request before invoking any other method
|
||||
|
||||
Applications building on top of `codex app-server` should identify themselves via the `clientInfo` parameter.
|
||||
|
||||
**Important**: `clientInfo.name` is used to identify the client for the OpenAI Compliance Logs Platform. If
|
||||
you are developing a new Codex integration that is intended for enterprise use, please contact us to get it
|
||||
added to a known clients list. For more context: https://chatgpt.com/admin/api-reference#tag/Logs:-Codex
|
||||
|
||||
Example (from OpenAI's official VSCode extension):
|
||||
|
||||
```json
|
||||
@@ -60,7 +64,7 @@ Example (from OpenAI's official VSCode extension):
|
||||
"id": 0,
|
||||
"params": {
|
||||
"clientInfo": {
|
||||
"name": "codex-vscode",
|
||||
"name": "codex_vscode",
|
||||
"title": "Codex VS Code Extension",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
msg,
|
||||
} = event;
|
||||
match msg {
|
||||
EventMsg::TaskComplete(_ev) => {
|
||||
EventMsg::TurnComplete(_ev) => {
|
||||
handle_turn_complete(
|
||||
conversation_id,
|
||||
event_turn_id,
|
||||
|
||||
@@ -3573,7 +3573,11 @@ impl CodexMessageProcessor {
|
||||
// JSON-serializing the `Event` as-is, but these should
|
||||
// be migrated to be variants of `ServerNotification`
|
||||
// instead.
|
||||
let method = format!("codex/event/{}", event.msg);
|
||||
let event_formatted = match &event.msg {
|
||||
EventMsg::TurnStarted(_) => "task_started",
|
||||
EventMsg::TurnComplete(_) => "task_complete",
|
||||
_ => &event.msg.to_string(),
|
||||
};
|
||||
let mut params = match serde_json::to_value(event.clone()) {
|
||||
Ok(serde_json::Value::Object(map)) => map,
|
||||
Ok(_) => {
|
||||
@@ -3592,7 +3596,7 @@ impl CodexMessageProcessor {
|
||||
|
||||
outgoing_for_task
|
||||
.send_notification(OutgoingNotification {
|
||||
method,
|
||||
method: format!("codex/event/{event_formatted}"),
|
||||
params: Some(params.into()),
|
||||
})
|
||||
.await;
|
||||
@@ -3678,6 +3682,16 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn upload_feedback(&self, request_id: RequestId, params: FeedbackUploadParams) {
|
||||
if !self.config.feedback_enabled {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: "sending feedback is disabled by configuration".to_string(),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
|
||||
let FeedbackUploadParams {
|
||||
classification,
|
||||
reason,
|
||||
|
||||
@@ -92,13 +92,18 @@ pub async fn run_main(
|
||||
|
||||
let feedback = CodexFeedback::new();
|
||||
|
||||
let otel =
|
||||
codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION")).map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
let otel = codex_core::otel_init::build_provider(
|
||||
&config,
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
Some("codex_app_server"),
|
||||
false,
|
||||
)
|
||||
.map_err(|e| {
|
||||
std::io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
format!("error loading otel config: {e}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Install a simple subscriber so `tracing` output is visible. Users can
|
||||
// control the log level with `RUST_LOG`.
|
||||
|
||||
@@ -21,8 +21,10 @@ use codex_core::AuthManager;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::default_client::SetOriginatorError;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use toml::Value as TomlValue;
|
||||
@@ -121,6 +123,27 @@ impl MessageProcessor {
|
||||
title: _title,
|
||||
version,
|
||||
} = params.client_info;
|
||||
if let Err(error) = set_default_originator(name.clone()) {
|
||||
match error {
|
||||
SetOriginatorError::InvalidHeaderValue => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!(
|
||||
"Invalid clientInfo.name: '{name}'. Must be a valid HTTP header value."
|
||||
),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
SetOriginatorError::AlreadyInitialized => {
|
||||
// No-op. This is expected to happen if the originator is already set via env var.
|
||||
// TODO(owen): Once we remove support for CODEX_INTERNAL_ORIGINATOR_OVERRIDE,
|
||||
// this will be an unexpected state and we can return a JSON-RPC error indicating
|
||||
// internal server error.
|
||||
}
|
||||
}
|
||||
}
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
|
||||
7
codex-rs/app-server/tests/common/BUILD.bazel
Normal file
7
codex-rs/app-server/tests/common/BUILD.bazel
Normal file
@@ -0,0 +1,7 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "common",
|
||||
crate_name = "app_test_support",
|
||||
crate_srcs = glob(["*.rs"]),
|
||||
)
|
||||
@@ -17,6 +17,7 @@ pub use core_test_support::format_with_current_shell_non_login;
|
||||
pub use core_test_support::test_path_buf_with_windows;
|
||||
pub use core_test_support::test_tmp_path;
|
||||
pub use core_test_support::test_tmp_path_buf;
|
||||
pub use mcp_process::DEFAULT_CLIENT_NAME;
|
||||
pub use mcp_process::McpProcess;
|
||||
pub use mock_model_server::create_mock_responses_server_repeating_assistant;
|
||||
pub use mock_model_server::create_mock_responses_server_sequence;
|
||||
|
||||
@@ -66,6 +66,8 @@ pub struct McpProcess {
|
||||
pending_messages: VecDeque<JSONRPCMessage>,
|
||||
}
|
||||
|
||||
pub const DEFAULT_CLIENT_NAME: &str = "codex-app-server-tests";
|
||||
|
||||
impl McpProcess {
|
||||
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
|
||||
Self::new_with_env(codex_home, &[]).await
|
||||
@@ -136,33 +138,62 @@ impl McpProcess {
|
||||
|
||||
/// Performs the initialization handshake with the MCP server.
|
||||
pub async fn initialize(&mut self) -> anyhow::Result<()> {
|
||||
let params = Some(serde_json::to_value(InitializeParams {
|
||||
client_info: ClientInfo {
|
||||
name: "codex-app-server-tests".to_string(),
|
||||
let initialized = self
|
||||
.initialize_with_client_info(ClientInfo {
|
||||
name: DEFAULT_CLIENT_NAME.to_string(),
|
||||
title: None,
|
||||
version: "0.1.0".to_string(),
|
||||
},
|
||||
})?);
|
||||
let req_id = self.send_request("initialize", params).await?;
|
||||
let initialized = self.read_jsonrpc_message().await?;
|
||||
let JSONRPCMessage::Response(response) = initialized else {
|
||||
})
|
||||
.await?;
|
||||
let JSONRPCMessage::Response(_) = initialized else {
|
||||
unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}");
|
||||
};
|
||||
if response.id != RequestId::Integer(req_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
req_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sends initialize with the provided client info and returns the response/error message.
|
||||
pub async fn initialize_with_client_info(
|
||||
&mut self,
|
||||
client_info: ClientInfo,
|
||||
) -> anyhow::Result<JSONRPCMessage> {
|
||||
let params = Some(serde_json::to_value(InitializeParams { client_info })?);
|
||||
let request_id = self.send_request("initialize", params).await?;
|
||||
let message = self.read_jsonrpc_message().await?;
|
||||
match message {
|
||||
JSONRPCMessage::Response(response) => {
|
||||
if response.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize response id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
response.id
|
||||
);
|
||||
}
|
||||
|
||||
// Send notifications/initialized to ack the response.
|
||||
self.send_notification(ClientNotification::Initialized)
|
||||
.await?;
|
||||
|
||||
Ok(JSONRPCMessage::Response(response))
|
||||
}
|
||||
JSONRPCMessage::Error(error) => {
|
||||
if error.id != RequestId::Integer(request_id) {
|
||||
anyhow::bail!(
|
||||
"initialize error id mismatch: expected {}, got {:?}",
|
||||
request_id,
|
||||
error.id
|
||||
);
|
||||
}
|
||||
Ok(JSONRPCMessage::Error(error))
|
||||
}
|
||||
JSONRPCMessage::Notification(notification) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Notification: {notification:?}");
|
||||
}
|
||||
JSONRPCMessage::Request(request) => {
|
||||
anyhow::bail!("unexpected JSONRPCMessage::Request: {request:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Send a `newConversation` JSON-RPC request.
|
||||
pub async fn send_new_conversation_request(
|
||||
&mut self,
|
||||
|
||||
@@ -283,7 +283,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() -> Result<()> {
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Wait for first TaskComplete
|
||||
// Wait for first TurnComplete
|
||||
let _ = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("codex/event/task_complete"),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::DEFAULT_CLIENT_NAME;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::GetUserAgentResponse;
|
||||
@@ -25,13 +26,13 @@ async fn get_user_agent_returns_current_codex_user_agent() -> Result<()> {
|
||||
.await??;
|
||||
|
||||
let os_info = os_info::get();
|
||||
let originator = codex_core::default_client::originator().value.as_str();
|
||||
let originator = DEFAULT_CLIENT_NAME;
|
||||
let os_type = os_info.os_type();
|
||||
let os_version = os_info.version();
|
||||
let architecture = os_info.architecture().unwrap_or("unknown");
|
||||
let terminal_ua = codex_core::terminal::user_agent();
|
||||
let user_agent = format!(
|
||||
"{originator}/0.0.0 ({os_type} {os_version}; {architecture}) {terminal_ua} (codex-app-server-tests; 0.1.0)"
|
||||
"{originator}/0.0.0 ({os_type} {os_version}; {architecture}) {terminal_ua} ({DEFAULT_CLIENT_NAME}; 0.1.0)"
|
||||
);
|
||||
|
||||
let received: GetUserAgentResponse = to_response(response)?;
|
||||
|
||||
137
codex-rs/app-server/tests/suite/v2/initialize.rs
Normal file
137
codex-rs/app-server/tests/suite/v2/initialize.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::create_mock_responses_server_sequence_unchecked;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::InitializeResponse;
|
||||
use codex_app_server_protocol::JSONRPCMessage;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
|
||||
#[tokio::test]
|
||||
async fn initialize_uses_client_info_name_as_originator() -> Result<()> {
|
||||
let responses = Vec::new();
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
let message = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: "codex_vscode".to_string(),
|
||||
title: Some("Codex VS Code Extension".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let JSONRPCMessage::Response(response) = message else {
|
||||
anyhow::bail!("expected initialize response, got {message:?}");
|
||||
};
|
||||
let InitializeResponse { user_agent } = to_response::<InitializeResponse>(response)?;
|
||||
|
||||
assert!(user_agent.starts_with("codex_vscode/"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn initialize_respects_originator_override_env_var() -> Result<()> {
|
||||
let responses = Vec::new();
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[(
|
||||
"CODEX_INTERNAL_ORIGINATOR_OVERRIDE",
|
||||
Some("codex_originator_via_env_var"),
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
|
||||
let message = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: "codex_vscode".to_string(),
|
||||
title: Some("Codex VS Code Extension".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let JSONRPCMessage::Response(response) = message else {
|
||||
anyhow::bail!("expected initialize response, got {message:?}");
|
||||
};
|
||||
let InitializeResponse { user_agent } = to_response::<InitializeResponse>(response)?;
|
||||
|
||||
assert!(user_agent.starts_with("codex_originator_via_env_var/"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn initialize_rejects_invalid_client_name() -> Result<()> {
|
||||
let responses = Vec::new();
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[("CODEX_INTERNAL_ORIGINATOR_OVERRIDE", None)],
|
||||
)
|
||||
.await?;
|
||||
|
||||
let message = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: "bad\rname".to_string(),
|
||||
title: Some("Bad Client".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let JSONRPCMessage::Error(error) = message else {
|
||||
anyhow::bail!("expected initialize error, got {message:?}");
|
||||
};
|
||||
|
||||
assert_eq!(error.error.code, -32600);
|
||||
assert_eq!(
|
||||
error.error.message,
|
||||
"Invalid clientInfo.name: 'bad\rname'. Must be a valid HTTP header value."
|
||||
);
|
||||
assert_eq!(error.error.data, None);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper to create a config.toml pointing at the mock model server.
|
||||
fn create_config_toml(
|
||||
codex_home: &Path,
|
||||
server_uri: &str,
|
||||
approval_policy: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "mock-model"
|
||||
approval_policy = "{approval_policy}"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
mod account;
|
||||
mod config_rpc;
|
||||
mod initialize;
|
||||
mod model_list;
|
||||
mod output_schema;
|
||||
mod rate_limits;
|
||||
|
||||
@@ -8,6 +8,7 @@ use app_test_support::create_mock_responses_server_sequence_unchecked;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::format_with_current_shell_display;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::CommandExecutionApprovalDecision;
|
||||
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
|
||||
use codex_app_server_protocol::CommandExecutionStatus;
|
||||
@@ -40,6 +41,76 @@ use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const TEST_ORIGINATOR: &str = "codex_vscode";
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_sends_originator_header() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri(), "never")?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.initialize_with_client_info(ClientInfo {
|
||||
name: TEST_ORIGINATOR.to_string(),
|
||||
title: Some("Codex VS Code Extension".to_string()),
|
||||
version: "0.1.0".to_string(),
|
||||
}),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("failed to fetch received requests");
|
||||
assert!(!requests.is_empty());
|
||||
for request in requests {
|
||||
let originator = request
|
||||
.headers
|
||||
.get("originator")
|
||||
.expect("originator header missing");
|
||||
assert_eq!(originator.to_str()?, TEST_ORIGINATOR);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_notifications_and_accepts_model_override() -> Result<()> {
|
||||
|
||||
11
codex-rs/apply-patch/BUILD.bazel
Normal file
11
codex-rs/apply-patch/BUILD.bazel
Normal file
@@ -0,0 +1,11 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
exports_files(["apply_patch_tool_instructions.md"])
|
||||
|
||||
codex_rust_crate(
|
||||
name = "apply-patch",
|
||||
crate_name = "codex_apply_patch",
|
||||
compile_data = [
|
||||
"apply_patch_tool_instructions.md",
|
||||
],
|
||||
)
|
||||
6
codex-rs/arg0/BUILD.bazel
Normal file
6
codex-rs/arg0/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "arg0",
|
||||
crate_name = "codex_arg0",
|
||||
)
|
||||
@@ -145,11 +145,41 @@ where
|
||||
/// that `apply_patch` can be on the PATH without requiring the user to
|
||||
/// install a separate `apply_patch` executable, simplifying the deployment of
|
||||
/// Codex CLI.
|
||||
/// Note: In debug builds the temp-dir guard is disabled to ease local testing.
|
||||
///
|
||||
/// IMPORTANT: This function modifies the PATH environment variable, so it MUST
|
||||
/// be called before multiple threads are spawned.
|
||||
pub fn prepend_path_entry_for_codex_aliases() -> std::io::Result<TempDir> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let codex_home = codex_core::config::find_codex_home()?;
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
// Guard against placing helpers in system temp directories outside debug builds.
|
||||
let temp_root = std::env::temp_dir();
|
||||
if codex_home.starts_with(&temp_root) {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Refusing to create helper binaries under temporary dir {temp_root:?} (codex_home: {codex_home:?})"
|
||||
),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
std::fs::create_dir_all(&codex_home)?;
|
||||
// Use a CODEX_HOME-scoped temp root to avoid cluttering the top-level directory.
|
||||
let temp_root = codex_home.join("tmp").join("path");
|
||||
std::fs::create_dir_all(&temp_root)?;
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
// Ensure only the current user can access the temp directory.
|
||||
std::fs::set_permissions(&temp_root, std::fs::Permissions::from_mode(0o700))?;
|
||||
}
|
||||
|
||||
let temp_dir = tempfile::Builder::new()
|
||||
.prefix("codex-arg0")
|
||||
.tempdir_in(&temp_root)?;
|
||||
let path = temp_dir.path();
|
||||
|
||||
for filename in &[
|
||||
|
||||
6
codex-rs/async-utils/BUILD.bazel
Normal file
6
codex-rs/async-utils/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "async-utils",
|
||||
crate_name = "codex_async_utils",
|
||||
)
|
||||
7
codex-rs/backend-client/BUILD.bazel
Normal file
7
codex-rs/backend-client/BUILD.bazel
Normal file
@@ -0,0 +1,7 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "backend-client",
|
||||
crate_name = "codex_backend_client",
|
||||
compile_data = glob(["tests/fixtures/**"]),
|
||||
)
|
||||
6
codex-rs/chatgpt/BUILD.bazel
Normal file
6
codex-rs/chatgpt/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "chatgpt",
|
||||
crate_name = "codex_chatgpt",
|
||||
)
|
||||
10
codex-rs/cli/BUILD.bazel
Normal file
10
codex-rs/cli/BUILD.bazel
Normal file
@@ -0,0 +1,10 @@
|
||||
load("//:defs.bzl", "codex_rust_crate", "multiplatform_binaries")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "cli",
|
||||
crate_name = "codex_cli",
|
||||
)
|
||||
|
||||
multiplatform_binaries(
|
||||
name = "codex",
|
||||
)
|
||||
@@ -14,6 +14,18 @@ use std::io::IsTerminal;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
|
||||
const CHATGPT_LOGIN_DISABLED_MESSAGE: &str =
|
||||
"ChatGPT login is disabled. Use API key login instead.";
|
||||
const API_KEY_LOGIN_DISABLED_MESSAGE: &str =
|
||||
"API key login is disabled. Use ChatGPT login instead.";
|
||||
const LOGIN_SUCCESS_MESSAGE: &str = "Successfully logged in";
|
||||
|
||||
fn print_login_server_start(actual_port: u16, auth_url: &str) {
|
||||
eprintln!(
|
||||
"Starting local login server on http://localhost:{actual_port}.\nIf your browser did not open, navigate to this URL to authenticate:\n\n{auth_url}"
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn login_with_chatgpt(
|
||||
codex_home: PathBuf,
|
||||
forced_chatgpt_workspace_id: Option<String>,
|
||||
@@ -27,10 +39,7 @@ pub async fn login_with_chatgpt(
|
||||
);
|
||||
let server = run_login_server(opts)?;
|
||||
|
||||
eprintln!(
|
||||
"Starting local login server on http://localhost:{}.\nIf your browser did not open, navigate to this URL to authenticate:\n\n{}",
|
||||
server.actual_port, server.auth_url,
|
||||
);
|
||||
print_login_server_start(server.actual_port, &server.auth_url);
|
||||
|
||||
server.block_until_done().await
|
||||
}
|
||||
@@ -39,7 +48,7 @@ pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) ->
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
eprintln!("ChatGPT login is disabled. Use API key login instead.");
|
||||
eprintln!("{CHATGPT_LOGIN_DISABLED_MESSAGE}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
@@ -53,7 +62,7 @@ pub async fn run_login_with_chatgpt(cli_config_overrides: CliConfigOverrides) ->
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
eprintln!("{LOGIN_SUCCESS_MESSAGE}");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -70,7 +79,7 @@ pub async fn run_login_with_api_key(
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Chatgpt)) {
|
||||
eprintln!("API key login is disabled. Use ChatGPT login instead.");
|
||||
eprintln!("{API_KEY_LOGIN_DISABLED_MESSAGE}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
@@ -80,7 +89,7 @@ pub async fn run_login_with_api_key(
|
||||
config.cli_auth_credentials_store_mode,
|
||||
) {
|
||||
Ok(_) => {
|
||||
eprintln!("Successfully logged in");
|
||||
eprintln!("{LOGIN_SUCCESS_MESSAGE}");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -125,7 +134,7 @@ pub async fn run_login_with_device_code(
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
eprintln!("ChatGPT login is disabled. Use API key login instead.");
|
||||
eprintln!("{CHATGPT_LOGIN_DISABLED_MESSAGE}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
|
||||
@@ -140,7 +149,7 @@ pub async fn run_login_with_device_code(
|
||||
}
|
||||
match run_device_code_login(opts).await {
|
||||
Ok(()) => {
|
||||
eprintln!("Successfully logged in");
|
||||
eprintln!("{LOGIN_SUCCESS_MESSAGE}");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -150,6 +159,68 @@ pub async fn run_login_with_device_code(
|
||||
}
|
||||
}
|
||||
|
||||
/// Prefers device-code login (with `open_browser = false`) when headless environment is detected, but keeps
|
||||
/// `codex login` working in environments where device-code may be disabled/feature-gated.
|
||||
/// If `run_device_code_login` returns `ErrorKind::NotFound` ("device-code unsupported"), this
|
||||
/// falls back to starting the local browser login server.
|
||||
pub async fn run_login_with_device_code_fallback_to_browser(
|
||||
cli_config_overrides: CliConfigOverrides,
|
||||
issuer_base_url: Option<String>,
|
||||
client_id: Option<String>,
|
||||
) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
if matches!(config.forced_login_method, Some(ForcedLoginMethod::Api)) {
|
||||
eprintln!("{CHATGPT_LOGIN_DISABLED_MESSAGE}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let forced_chatgpt_workspace_id = config.forced_chatgpt_workspace_id.clone();
|
||||
let mut opts = ServerOptions::new(
|
||||
config.codex_home,
|
||||
client_id.unwrap_or(CLIENT_ID.to_string()),
|
||||
forced_chatgpt_workspace_id,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
if let Some(iss) = issuer_base_url {
|
||||
opts.issuer = iss;
|
||||
}
|
||||
opts.open_browser = false;
|
||||
|
||||
match run_device_code_login(opts.clone()).await {
|
||||
Ok(()) => {
|
||||
eprintln!("{LOGIN_SUCCESS_MESSAGE}");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
eprintln!("Device code login is not enabled; falling back to browser login.");
|
||||
match run_login_server(opts) {
|
||||
Ok(server) => {
|
||||
print_login_server_start(server.actual_port, &server.auth_url);
|
||||
match server.block_until_done().await {
|
||||
Ok(()) => {
|
||||
eprintln!("{LOGIN_SUCCESS_MESSAGE}");
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error logging in: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error logging in: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("Error logging in with device code: {e}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! {
|
||||
let config = load_config_or_exit(cli_config_overrides).await;
|
||||
|
||||
|
||||
@@ -14,9 +14,11 @@ use codex_cli::login::run_login_status;
|
||||
use codex_cli::login::run_login_with_api_key;
|
||||
use codex_cli::login::run_login_with_chatgpt;
|
||||
use codex_cli::login::run_login_with_device_code;
|
||||
use codex_cli::login::run_login_with_device_code_fallback_to_browser;
|
||||
use codex_cli::login::run_logout;
|
||||
use codex_cloud_tasks::Cli as CloudTasksCli;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::env::is_headless_environment;
|
||||
use codex_exec::Cli as ExecCli;
|
||||
use codex_exec::Command as ExecCommand;
|
||||
use codex_exec::ReviewArgs;
|
||||
@@ -531,6 +533,13 @@ async fn cli_main(codex_linux_sandbox_exe: Option<PathBuf>) -> anyhow::Result<()
|
||||
} else if login_cli.with_api_key {
|
||||
let api_key = read_api_key_from_stdin();
|
||||
run_login_with_api_key(login_cli.config_overrides, api_key).await;
|
||||
} else if is_headless_environment() {
|
||||
run_login_with_device_code_fallback_to_browser(
|
||||
login_cli.config_overrides,
|
||||
login_cli.issuer_base_url,
|
||||
login_cli.client_id,
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
run_login_with_chatgpt(login_cli.config_overrides).await;
|
||||
}
|
||||
|
||||
10
codex-rs/cloud-tasks-client/BUILD.bazel
Normal file
10
codex-rs/cloud-tasks-client/BUILD.bazel
Normal file
@@ -0,0 +1,10 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "cloud-tasks-client",
|
||||
crate_name = "codex_cloud_tasks_client",
|
||||
crate_features = [
|
||||
"mock",
|
||||
"online",
|
||||
],
|
||||
)
|
||||
6
codex-rs/cloud-tasks/BUILD.bazel
Normal file
6
codex-rs/cloud-tasks/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "cloud-tasks",
|
||||
crate_name = "codex_cloud_tasks",
|
||||
)
|
||||
6
codex-rs/codex-api/BUILD.bazel
Normal file
6
codex-rs/codex-api/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "codex-api",
|
||||
crate_name = "codex_api",
|
||||
)
|
||||
@@ -301,7 +301,9 @@ pub async fn process_sse(
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
_ => {
|
||||
trace!("unhandled SSE event: {:#?}", event.kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
6
codex-rs/codex-backend-openapi-models/BUILD.bazel
Normal file
6
codex-rs/codex-backend-openapi-models/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "codex-backend-openapi-models",
|
||||
crate_name = "codex_backend_openapi_models",
|
||||
)
|
||||
6
codex-rs/codex-client/BUILD.bazel
Normal file
6
codex-rs/codex-client/BUILD.bazel
Normal file
@@ -0,0 +1,6 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "codex-client",
|
||||
crate_name = "codex_client",
|
||||
)
|
||||
@@ -7,6 +7,7 @@ pub enum TransportError {
|
||||
#[error("http {status}: {body:?}")]
|
||||
Http {
|
||||
status: StatusCode,
|
||||
url: Option<String>,
|
||||
headers: Option<HeaderMap>,
|
||||
body: Option<String>,
|
||||
},
|
||||
|
||||
@@ -131,6 +131,7 @@ impl HttpTransport for ReqwestTransport {
|
||||
);
|
||||
}
|
||||
|
||||
let url = req.url.clone();
|
||||
let builder = self.build(req)?;
|
||||
let resp = builder.send().await.map_err(Self::map_error)?;
|
||||
let status = resp.status();
|
||||
@@ -140,6 +141,7 @@ impl HttpTransport for ReqwestTransport {
|
||||
let body = String::from_utf8(bytes.to_vec()).ok();
|
||||
return Err(TransportError::Http {
|
||||
status,
|
||||
url: Some(url),
|
||||
headers: Some(headers),
|
||||
body,
|
||||
});
|
||||
@@ -161,6 +163,7 @@ impl HttpTransport for ReqwestTransport {
|
||||
);
|
||||
}
|
||||
|
||||
let url = req.url.clone();
|
||||
let builder = self.build(req)?;
|
||||
let resp = builder.send().await.map_err(Self::map_error)?;
|
||||
let status = resp.status();
|
||||
@@ -169,6 +172,7 @@ impl HttpTransport for ReqwestTransport {
|
||||
let body = resp.text().await.ok();
|
||||
return Err(TransportError::Http {
|
||||
status,
|
||||
url: Some(url),
|
||||
headers: Some(headers),
|
||||
body,
|
||||
});
|
||||
|
||||
11
codex-rs/common/BUILD.bazel
Normal file
11
codex-rs/common/BUILD.bazel
Normal file
@@ -0,0 +1,11 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "common",
|
||||
crate_name = "codex_common",
|
||||
crate_features = [
|
||||
"cli",
|
||||
"elapsed",
|
||||
"sandbox_summary",
|
||||
],
|
||||
)
|
||||
40
codex-rs/core/BUILD.bazel
Normal file
40
codex-rs/core/BUILD.bazel
Normal file
@@ -0,0 +1,40 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "core",
|
||||
crate_name = "codex_core",
|
||||
# TODO(mbolin): Eliminate the use of features in the version of the
|
||||
# rust_library() that is used by rust_binary() rules for release artifacts
|
||||
# such as the Codex CLI.
|
||||
crate_features = ["deterministic_process_ids", "test-support"],
|
||||
compile_data = glob(
|
||||
include = ["**"],
|
||||
exclude = [
|
||||
"**/* *",
|
||||
"BUILD.bazel",
|
||||
"Cargo.toml",
|
||||
],
|
||||
allow_empty = True,
|
||||
),
|
||||
integration_compile_data_extra = [
|
||||
"//codex-rs/apply-patch:apply_patch_tool_instructions.md",
|
||||
"prompt.md",
|
||||
],
|
||||
# This is a bit of a hack, but empirically, some of our integration tests
|
||||
# are relying on the presence of this file as a repo root marker. When
|
||||
# running tests locally, this "just works," but in remote execution,
|
||||
# the working directory is different and so the file is not found unless it
|
||||
# is explicitly added as test data.
|
||||
#
|
||||
# TODO(aibrahim): Update the tests so that `just bazel-remote-test` succeeds
|
||||
# without this workaround.
|
||||
test_data_extra = ["//:AGENTS.md"],
|
||||
integration_deps_extra = ["//codex-rs/core/tests/common:common"],
|
||||
test_tags = ["no-sandbox"],
|
||||
extra_binaries = [
|
||||
"//codex-rs/linux-sandbox:codex-linux-sandbox",
|
||||
"//codex-rs/rmcp-client:test_stdio_server",
|
||||
"//codex-rs/rmcp-client:test_streamable_http_server",
|
||||
"//codex-rs/cli:codex",
|
||||
],
|
||||
)
|
||||
7
codex-rs/core/hierarchical_agents_message.md
Normal file
7
codex-rs/core/hierarchical_agents_message.md
Normal file
@@ -0,0 +1,7 @@
|
||||
Files called AGENTS.md commonly appear in many places inside a container - at "/", in "~", deep within git repositories, or in any other directory; their location is not limited to version-controlled folders.
|
||||
|
||||
Their purpose is to pass along human guidance to you, the agent. Such guidance can include coding standards, explanations of the project layout, steps for building or testing, and even wording that must accompany a GitHub pull-request description produced by the agent; all of it is to be followed.
|
||||
|
||||
Each AGENTS.md governs the entire directory that contains it and every child directory beneath that point. Whenever you change a file, you have to comply with every AGENTS.md whose scope covers that file. Naming conventions, stylistic rules and similar directives are restricted to the code that falls inside that scope unless the document explicitly states otherwise.
|
||||
|
||||
When two AGENTS.md files disagree, the one located deeper in the directory structure overrides the higher-level file, while instructions given directly in the prompt by the system, developer, or user outrank any AGENTS.md content.
|
||||
@@ -116,10 +116,10 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::agent::agent_status_from_event;
|
||||
use codex_protocol::protocol::ErrorEvent;
|
||||
use codex_protocol::protocol::TaskCompleteEvent;
|
||||
use codex_protocol::protocol::TaskStartedEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnAbortedEvent;
|
||||
use codex_protocol::protocol::TurnCompleteEvent;
|
||||
use codex_protocol::protocol::TurnStartedEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[tokio::test]
|
||||
@@ -144,7 +144,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn on_event_updates_status_from_task_started() {
|
||||
let status = agent_status_from_event(&EventMsg::TaskStarted(TaskStartedEvent {
|
||||
let status = agent_status_from_event(&EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: None,
|
||||
}));
|
||||
assert_eq!(status, Some(AgentStatus::Running));
|
||||
@@ -152,7 +152,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn on_event_updates_status_from_task_complete() {
|
||||
let status = agent_status_from_event(&EventMsg::TaskComplete(TaskCompleteEvent {
|
||||
let status = agent_status_from_event(&EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
last_agent_message: Some("done".to_string()),
|
||||
}));
|
||||
let expected = AgentStatus::Completed(Some("done".to_string()));
|
||||
|
||||
@@ -5,8 +5,8 @@ use codex_protocol::protocol::EventMsg;
|
||||
/// Returns `None` when the event does not affect status tracking.
|
||||
pub(crate) fn agent_status_from_event(msg: &EventMsg) -> Option<AgentStatus> {
|
||||
match msg {
|
||||
EventMsg::TaskStarted(_) => Some(AgentStatus::Running),
|
||||
EventMsg::TaskComplete(ev) => Some(AgentStatus::Completed(ev.last_agent_message.clone())),
|
||||
EventMsg::TurnStarted(_) => Some(AgentStatus::Running),
|
||||
EventMsg::TurnComplete(ev) => Some(AgentStatus::Completed(ev.last_agent_message.clone())),
|
||||
EventMsg::TurnAborted(ev) => Some(AgentStatus::Errored(format!("{:?}", ev.reason))),
|
||||
EventMsg::Error(ev) => Some(AgentStatus::Errored(ev.message.clone())),
|
||||
EventMsg::ShutdownComplete => Some(AgentStatus::Shutdown),
|
||||
|
||||
@@ -25,11 +25,13 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
ApiError::Api { status, message } => CodexErr::UnexpectedStatus(UnexpectedResponseError {
|
||||
status,
|
||||
body: message,
|
||||
url: None,
|
||||
request_id: None,
|
||||
}),
|
||||
ApiError::Transport(transport) => match transport {
|
||||
TransportError::Http {
|
||||
status,
|
||||
url,
|
||||
headers,
|
||||
body,
|
||||
} => {
|
||||
@@ -71,6 +73,7 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
CodexErr::UnexpectedStatus(UnexpectedResponseError {
|
||||
status,
|
||||
body: body_text,
|
||||
url,
|
||||
request_id: extract_request_id(headers.as_ref()),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -533,6 +533,7 @@ async fn handle_unauthorized(
|
||||
fn map_unauthorized_status(status: StatusCode) -> CodexErr {
|
||||
map_api_error(ApiError::Transport(TransportError::Http {
|
||||
status,
|
||||
url: None,
|
||||
headers: None,
|
||||
body: None,
|
||||
}))
|
||||
|
||||
@@ -44,9 +44,9 @@ use codex_protocol::protocol::RawResponseItemEvent;
|
||||
use codex_protocol::protocol::ReviewRequest;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::TaskStartedEvent;
|
||||
use codex_protocol::protocol::TurnAbortReason;
|
||||
use codex_protocol::protocol::TurnContextItem;
|
||||
use codex_protocol::protocol::TurnStartedEvent;
|
||||
use codex_rmcp_client::ElicitationResponse;
|
||||
use futures::future::BoxFuture;
|
||||
use futures::prelude::*;
|
||||
@@ -485,7 +485,7 @@ pub(crate) struct SessionSettingsUpdate {
|
||||
|
||||
impl Session {
|
||||
/// Don't expand the number of mutated arguments on config. We are in the process of getting rid of it.
|
||||
fn build_per_turn_config(session_configuration: &SessionConfiguration) -> Config {
|
||||
pub(crate) fn build_per_turn_config(session_configuration: &SessionConfiguration) -> Config {
|
||||
// todo(aibrahim): store this state somewhere else so we don't need to mut config
|
||||
let config = session_configuration.original_config_do_not_use.clone();
|
||||
let mut per_turn_config = (*config).clone();
|
||||
@@ -649,7 +649,7 @@ impl Session {
|
||||
);
|
||||
config.features.emit_metrics(&otel_manager);
|
||||
otel_manager.counter(
|
||||
"codex.session.started",
|
||||
"codex.thread.started",
|
||||
1,
|
||||
&[(
|
||||
"is_git",
|
||||
@@ -1540,6 +1540,24 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the input if there was no task running to inject into
|
||||
pub async fn inject_response_items(
|
||||
&self,
|
||||
input: Vec<ResponseInputItem>,
|
||||
) -> Result<(), Vec<ResponseInputItem>> {
|
||||
let mut active = self.active_turn.lock().await;
|
||||
match active.as_mut() {
|
||||
Some(at) => {
|
||||
let mut ts = at.turn_state.lock().await;
|
||||
for item in input {
|
||||
ts.push_pending_input(item);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
None => Err(input),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_pending_input(&self) -> Vec<ResponseInputItem> {
|
||||
let mut active = self.active_turn.lock().await;
|
||||
match active.as_mut() {
|
||||
@@ -2324,9 +2342,9 @@ fn errors_to_info(errors: &[SkillError]) -> Vec<SkillErrorInfo> {
|
||||
/// - If the model requests a function call, we execute it and send the output
|
||||
/// back to the model in the next turn.
|
||||
/// - If the model sends only an assistant message, we record it in the
|
||||
/// conversation history and consider the task complete.
|
||||
/// conversation history and consider the turn complete.
|
||||
///
|
||||
pub(crate) async fn run_task(
|
||||
pub(crate) async fn run_turn(
|
||||
sess: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
input: Vec<UserInput>,
|
||||
@@ -2342,7 +2360,7 @@ pub(crate) async fn run_task(
|
||||
if total_usage_tokens >= auto_compact_limit {
|
||||
run_auto_compact(&sess, &turn_context).await;
|
||||
}
|
||||
let event = EventMsg::TaskStarted(TaskStartedEvent {
|
||||
let event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
@@ -2407,7 +2425,7 @@ pub(crate) async fn run_task(
|
||||
})
|
||||
.map(|user_message| user_message.message())
|
||||
.collect::<Vec<String>>();
|
||||
match run_turn(
|
||||
match run_model_turn(
|
||||
Arc::clone(&sess),
|
||||
Arc::clone(&turn_context),
|
||||
Arc::clone(&turn_diff_tracker),
|
||||
@@ -2484,7 +2502,7 @@ async fn run_auto_compact(sess: &Arc<Session>, turn_context: &Arc<TurnContext>)
|
||||
cwd = %turn_context.cwd.display()
|
||||
)
|
||||
)]
|
||||
async fn run_turn(
|
||||
async fn run_model_turn(
|
||||
sess: Arc<Session>,
|
||||
turn_context: Arc<TurnContext>,
|
||||
turn_diff_tracker: SharedTurnDiffTracker,
|
||||
|
||||
@@ -135,7 +135,7 @@ pub(crate) async fn run_codex_thread_one_shot(
|
||||
while let Ok(event) = io_for_bridge.next_event().await {
|
||||
let should_shutdown = matches!(
|
||||
event.msg,
|
||||
EventMsg::TaskComplete(_) | EventMsg::TurnAborted(_)
|
||||
EventMsg::TurnComplete(_) | EventMsg::TurnAborted(_)
|
||||
);
|
||||
let _ = tx_bridge.send(event).await;
|
||||
if should_shutdown {
|
||||
@@ -253,7 +253,7 @@ async fn shutdown_delegate(codex: &Codex) {
|
||||
while let Ok(event) = codex.next_event().await {
|
||||
if matches!(
|
||||
event.msg,
|
||||
EventMsg::TurnAborted(_) | EventMsg::TaskComplete(_)
|
||||
EventMsg::TurnAborted(_) | EventMsg::TurnComplete(_)
|
||||
) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ use crate::features::Feature;
|
||||
use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::protocol::TurnContextItem;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
use crate::protocol::WarningEvent;
|
||||
use crate::truncate::TruncationPolicy;
|
||||
use crate::truncate::approx_token_count;
|
||||
@@ -54,7 +54,7 @@ pub(crate) async fn run_compact_task(
|
||||
turn_context: Arc<TurnContext>,
|
||||
input: Vec<UserInput>,
|
||||
) {
|
||||
let start_event = EventMsg::TaskStarted(TaskStartedEvent {
|
||||
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
});
|
||||
sess.send_event(&turn_context, start_event).await;
|
||||
|
||||
@@ -8,7 +8,7 @@ use crate::protocol::CompactedItem;
|
||||
use crate::protocol::ContextCompactedEvent;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::RolloutItem;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
|
||||
pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
@@ -19,7 +19,7 @@ pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
}
|
||||
|
||||
pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Arc<TurnContext>) {
|
||||
let start_event = EventMsg::TaskStarted(TaskStartedEvent {
|
||||
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
});
|
||||
sess.send_event(&turn_context, start_event).await;
|
||||
|
||||
@@ -32,6 +32,7 @@ use crate::protocol::AskForApproval;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use codex_app_server_protocol::Tools;
|
||||
use codex_app_server_protocol::UserSavedConfig;
|
||||
use codex_protocol::config_types::AltScreenMode;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
@@ -236,6 +237,14 @@ pub struct Config {
|
||||
/// consistently to both mouse wheels and trackpads.
|
||||
pub tui_scroll_invert: bool,
|
||||
|
||||
/// Controls whether the TUI uses the terminal's alternate screen buffer.
|
||||
///
|
||||
/// This is the same `tui.alternate_screen` value from `config.toml` (see [`Tui`]).
|
||||
/// - `auto` (default): Disable alternate screen in Zellij, enable elsewhere.
|
||||
/// - `always`: Always use alternate screen (original behavior).
|
||||
/// - `never`: Never use alternate screen (inline mode, preserves scrollback).
|
||||
pub tui_alternate_screen: AltScreenMode,
|
||||
|
||||
/// The directory that should be treated as the current working directory
|
||||
/// for the session. All relative paths inside the business-logic layer are
|
||||
/// resolved against this path.
|
||||
@@ -353,8 +362,12 @@ pub struct Config {
|
||||
pub disable_paste_burst: bool,
|
||||
|
||||
/// When `false`, disables analytics across Codex product surfaces in this machine.
|
||||
/// Voluntarily left as Optional because the default value might depend on the client.
|
||||
pub analytics_enabled: Option<bool>,
|
||||
|
||||
/// When `false`, disables feedback collection across Codex product surfaces.
|
||||
/// Defaults to `true`.
|
||||
pub analytics: bool,
|
||||
pub feedback_enabled: bool,
|
||||
|
||||
/// OTEL configuration (exporter type, endpoint, headers, etc.).
|
||||
pub otel: crate::config::types::OtelConfig,
|
||||
@@ -820,6 +833,10 @@ pub struct ConfigToml {
|
||||
/// Defaults to `true`.
|
||||
pub analytics: Option<crate::config::types::AnalyticsConfigToml>,
|
||||
|
||||
/// When `false`, disables feedback collection across Codex product surfaces.
|
||||
/// Defaults to `true`.
|
||||
pub feedback: Option<crate::config::types::FeedbackConfigToml>,
|
||||
|
||||
/// OTEL configuration.
|
||||
pub otel: Option<crate::config::types::OtelConfigToml>,
|
||||
|
||||
@@ -1397,11 +1414,15 @@ impl Config {
|
||||
notices: cfg.notice.unwrap_or_default(),
|
||||
check_for_update_on_startup,
|
||||
disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false),
|
||||
analytics: config_profile
|
||||
analytics_enabled: config_profile
|
||||
.analytics
|
||||
.as_ref()
|
||||
.and_then(|a| a.enabled)
|
||||
.or(cfg.analytics.as_ref().and_then(|a| a.enabled))
|
||||
.or(cfg.analytics.as_ref().and_then(|a| a.enabled)),
|
||||
feedback_enabled: cfg
|
||||
.feedback
|
||||
.as_ref()
|
||||
.and_then(|feedback| feedback.enabled)
|
||||
.unwrap_or(true),
|
||||
tui_notifications: cfg
|
||||
.tui
|
||||
@@ -1431,6 +1452,11 @@ impl Config {
|
||||
.as_ref()
|
||||
.and_then(|t| t.scroll_wheel_like_max_duration_ms),
|
||||
tui_scroll_invert: cfg.tui.as_ref().map(|t| t.scroll_invert).unwrap_or(false),
|
||||
tui_alternate_screen: cfg
|
||||
.tui
|
||||
.as_ref()
|
||||
.map(|t| t.alternate_screen)
|
||||
.unwrap_or_default(),
|
||||
otel: {
|
||||
let t: OtelConfigToml = cfg.otel.unwrap_or_default();
|
||||
let log_user_prompt = t.log_user_prompt.unwrap_or(false);
|
||||
@@ -1559,6 +1585,7 @@ mod tests {
|
||||
use crate::config::edit::ConfigEdit;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config::edit::apply_blocking;
|
||||
use crate::config::types::FeedbackConfigToml;
|
||||
use crate::config::types::HistoryPersistence;
|
||||
use crate::config::types::McpServerTransportConfig;
|
||||
use crate::config::types::Notifications;
|
||||
@@ -1628,6 +1655,7 @@ persistence = "none"
|
||||
scroll_wheel_tick_detect_max_ms: None,
|
||||
scroll_wheel_like_max_duration_ms: None,
|
||||
scroll_invert: false,
|
||||
alternate_screen: AltScreenMode::Auto,
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -1885,6 +1913,25 @@ trust_level = "trusted"
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn feedback_enabled_defaults_to_true() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let cfg = ConfigToml {
|
||||
feedback: Some(FeedbackConfigToml::default()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config = Config::load_from_base_config_with_overrides(
|
||||
cfg,
|
||||
ConfigOverrides::default(),
|
||||
codex_home.path().to_path_buf(),
|
||||
)?;
|
||||
|
||||
assert_eq!(config.feedback_enabled, true);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -3233,7 +3280,8 @@ model_verbosity = "high"
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
analytics: true,
|
||||
analytics_enabled: Some(true),
|
||||
feedback_enabled: true,
|
||||
tui_scroll_events_per_tick: None,
|
||||
tui_scroll_wheel_lines: None,
|
||||
tui_scroll_trackpad_lines: None,
|
||||
@@ -3243,6 +3291,7 @@ model_verbosity = "high"
|
||||
tui_scroll_wheel_tick_detect_max_ms: None,
|
||||
tui_scroll_wheel_like_max_duration_ms: None,
|
||||
tui_scroll_invert: false,
|
||||
tui_alternate_screen: AltScreenMode::Auto,
|
||||
otel: OtelConfig::default(),
|
||||
},
|
||||
o3_profile_config
|
||||
@@ -3317,7 +3366,8 @@ model_verbosity = "high"
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
analytics: true,
|
||||
analytics_enabled: Some(true),
|
||||
feedback_enabled: true,
|
||||
tui_scroll_events_per_tick: None,
|
||||
tui_scroll_wheel_lines: None,
|
||||
tui_scroll_trackpad_lines: None,
|
||||
@@ -3327,6 +3377,7 @@ model_verbosity = "high"
|
||||
tui_scroll_wheel_tick_detect_max_ms: None,
|
||||
tui_scroll_wheel_like_max_duration_ms: None,
|
||||
tui_scroll_invert: false,
|
||||
tui_alternate_screen: AltScreenMode::Auto,
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
@@ -3416,7 +3467,8 @@ model_verbosity = "high"
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
analytics: false,
|
||||
analytics_enabled: Some(false),
|
||||
feedback_enabled: true,
|
||||
tui_scroll_events_per_tick: None,
|
||||
tui_scroll_wheel_lines: None,
|
||||
tui_scroll_trackpad_lines: None,
|
||||
@@ -3426,6 +3478,7 @@ model_verbosity = "high"
|
||||
tui_scroll_wheel_tick_detect_max_ms: None,
|
||||
tui_scroll_wheel_like_max_duration_ms: None,
|
||||
tui_scroll_invert: false,
|
||||
tui_alternate_screen: AltScreenMode::Auto,
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
@@ -3501,7 +3554,8 @@ model_verbosity = "high"
|
||||
tui_notifications: Default::default(),
|
||||
animations: true,
|
||||
show_tooltips: true,
|
||||
analytics: true,
|
||||
analytics_enabled: Some(true),
|
||||
feedback_enabled: true,
|
||||
tui_scroll_events_per_tick: None,
|
||||
tui_scroll_wheel_lines: None,
|
||||
tui_scroll_trackpad_lines: None,
|
||||
@@ -3511,6 +3565,7 @@ model_verbosity = "high"
|
||||
tui_scroll_wheel_tick_detect_max_ms: None,
|
||||
tui_scroll_wheel_like_max_duration_ms: None,
|
||||
tui_scroll_invert: false,
|
||||
tui_alternate_screen: AltScreenMode::Auto,
|
||||
otel: OtelConfig::default(),
|
||||
};
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
// Note this file should generally be restricted to simple struct/enum
|
||||
// definitions that do not contain business logic.
|
||||
|
||||
pub use codex_protocol::config_types::AltScreenMode;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
@@ -282,6 +283,12 @@ pub struct AnalyticsConfigToml {
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default)]
|
||||
pub struct FeedbackConfigToml {
|
||||
/// When `false`, disables the feedback flow across Codex product surfaces.
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
// ===== OTEL configuration =====
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
||||
@@ -517,6 +524,17 @@ pub struct Tui {
|
||||
/// wheel and trackpad input.
|
||||
#[serde(default)]
|
||||
pub scroll_invert: bool,
|
||||
|
||||
/// Controls whether the TUI uses the terminal's alternate screen buffer.
|
||||
///
|
||||
/// - `auto` (default): Disable alternate screen in Zellij, enable elsewhere.
|
||||
/// - `always`: Always use alternate screen (original behavior).
|
||||
/// - `never`: Never use alternate screen (inline mode only, preserves scrollback).
|
||||
///
|
||||
/// Using alternate screen provides a cleaner fullscreen experience but prevents
|
||||
/// scrollback in terminal multiplexers like Zellij that follow the xterm spec.
|
||||
#[serde(default)]
|
||||
pub alternate_screen: AltScreenMode,
|
||||
}
|
||||
|
||||
const fn default_true() -> bool {
|
||||
|
||||
@@ -4,7 +4,7 @@ pub use codex_client::CodexRequestBuilder;
|
||||
use reqwest::header::HeaderValue;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::OnceLock;
|
||||
use std::sync::RwLock;
|
||||
|
||||
/// Set this to add a suffix to the User-Agent string.
|
||||
///
|
||||
@@ -30,7 +30,7 @@ pub struct Originator {
|
||||
pub value: String,
|
||||
pub header_value: HeaderValue,
|
||||
}
|
||||
static ORIGINATOR: OnceLock<Originator> = OnceLock::new();
|
||||
static ORIGINATOR: LazyLock<RwLock<Option<Originator>>> = LazyLock::new(|| RwLock::new(None));
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetOriginatorError {
|
||||
@@ -60,22 +60,48 @@ fn get_originator_value(provided: Option<String>) -> Originator {
|
||||
}
|
||||
|
||||
pub fn set_default_originator(value: String) -> Result<(), SetOriginatorError> {
|
||||
if HeaderValue::from_str(&value).is_err() {
|
||||
return Err(SetOriginatorError::InvalidHeaderValue);
|
||||
}
|
||||
let originator = get_originator_value(Some(value));
|
||||
ORIGINATOR
|
||||
.set(originator)
|
||||
.map_err(|_| SetOriginatorError::AlreadyInitialized)
|
||||
let Ok(mut guard) = ORIGINATOR.write() else {
|
||||
return Err(SetOriginatorError::AlreadyInitialized);
|
||||
};
|
||||
if guard.is_some() {
|
||||
return Err(SetOriginatorError::AlreadyInitialized);
|
||||
}
|
||||
*guard = Some(originator);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn originator() -> &'static Originator {
|
||||
ORIGINATOR.get_or_init(|| get_originator_value(None))
|
||||
pub fn originator() -> Originator {
|
||||
if let Ok(guard) = ORIGINATOR.read()
|
||||
&& let Some(originator) = guard.as_ref()
|
||||
{
|
||||
return originator.clone();
|
||||
}
|
||||
|
||||
if std::env::var(CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR).is_ok() {
|
||||
let originator = get_originator_value(None);
|
||||
if let Ok(mut guard) = ORIGINATOR.write() {
|
||||
match guard.as_ref() {
|
||||
Some(originator) => return originator.clone(),
|
||||
None => *guard = Some(originator.clone()),
|
||||
}
|
||||
}
|
||||
return originator;
|
||||
}
|
||||
|
||||
get_originator_value(None)
|
||||
}
|
||||
|
||||
pub fn get_codex_user_agent() -> String {
|
||||
let build_version = env!("CARGO_PKG_VERSION");
|
||||
let os_info = os_info::get();
|
||||
let originator = originator();
|
||||
let prefix = format!(
|
||||
"{}/{build_version} ({} {}; {}) {}",
|
||||
originator().value.as_str(),
|
||||
originator.value.as_str(),
|
||||
os_info.os_type(),
|
||||
os_info.version(),
|
||||
os_info.architecture().unwrap_or("unknown"),
|
||||
@@ -123,7 +149,7 @@ fn sanitize_user_agent(candidate: String, fallback: &str) -> String {
|
||||
tracing::warn!(
|
||||
"Falling back to default Codex originator because base user agent string is invalid"
|
||||
);
|
||||
originator().value.clone()
|
||||
originator().value
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,7 +163,7 @@ pub fn build_reqwest_client() -> reqwest::Client {
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("originator", originator().header_value.clone());
|
||||
headers.insert("originator", originator().header_value);
|
||||
let ua = get_codex_user_agent();
|
||||
|
||||
let mut builder = reqwest::Client::builder()
|
||||
@@ -163,7 +189,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_get_codex_user_agent() {
|
||||
let user_agent = get_codex_user_agent();
|
||||
let originator = originator().value.as_str();
|
||||
let originator = originator().value;
|
||||
let prefix = format!("{originator}/");
|
||||
assert!(user_agent.starts_with(&prefix));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
//! Functions for environment detection that need to be shared across crates.
|
||||
|
||||
fn env_var_set(key: &str) -> bool {
|
||||
std::env::var(key).is_ok_and(|v| !v.trim().is_empty())
|
||||
}
|
||||
|
||||
/// Returns true if the current process is running under Windows Subsystem for Linux.
|
||||
pub fn is_wsl() -> bool {
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -17,3 +21,26 @@ pub fn is_wsl() -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true when Codex is likely running in an environment without a usable GUI.
|
||||
///
|
||||
/// This is intentionally conservative and is used by frontends to avoid flows that would try to
|
||||
/// open a browser (e.g. device-code auth fallback).
|
||||
pub fn is_headless_environment() -> bool {
|
||||
if env_var_set("CI")
|
||||
|| env_var_set("SSH_CONNECTION")
|
||||
|| env_var_set("SSH_CLIENT")
|
||||
|| env_var_set("SSH_TTY")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
if !env_var_set("DISPLAY") && !env_var_set("WAYLAND_DISPLAY") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
@@ -277,6 +277,7 @@ pub enum RefreshTokenFailedReason {
|
||||
pub struct UnexpectedResponseError {
|
||||
pub status: StatusCode,
|
||||
pub body: String,
|
||||
pub url: Option<String>,
|
||||
pub request_id: Option<String>,
|
||||
}
|
||||
|
||||
@@ -293,7 +294,11 @@ impl UnexpectedResponseError {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut message = format!("{CLOUDFLARE_BLOCKED_MESSAGE} (status {})", self.status);
|
||||
let status = self.status;
|
||||
let mut message = format!("{CLOUDFLARE_BLOCKED_MESSAGE} (status {status})");
|
||||
if let Some(url) = &self.url {
|
||||
message.push_str(&format!(", url: {url}"));
|
||||
}
|
||||
if let Some(id) = &self.request_id {
|
||||
message.push_str(&format!(", request id: {id}"));
|
||||
}
|
||||
@@ -307,16 +312,16 @@ impl std::fmt::Display for UnexpectedResponseError {
|
||||
if let Some(friendly) = self.friendly_message() {
|
||||
write!(f, "{friendly}")
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"unexpected status {}: {}{}",
|
||||
self.status,
|
||||
self.body,
|
||||
self.request_id
|
||||
.as_ref()
|
||||
.map(|id| format!(", request id: {id}"))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
let status = self.status;
|
||||
let body = &self.body;
|
||||
let mut message = format!("unexpected status {status}: {body}");
|
||||
if let Some(url) = &self.url {
|
||||
message.push_str(&format!(", url: {url}"));
|
||||
}
|
||||
if let Some(id) = &self.request_id {
|
||||
message.push_str(&format!(", request id: {id}"));
|
||||
}
|
||||
write!(f, "{message}")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -826,12 +831,16 @@ mod tests {
|
||||
status: StatusCode::FORBIDDEN,
|
||||
body: "<html><body>Cloudflare error: Sorry, you have been blocked</body></html>"
|
||||
.to_string(),
|
||||
url: Some("http://example.com/blocked".to_string()),
|
||||
request_id: Some("ray-id".to_string()),
|
||||
};
|
||||
let status = StatusCode::FORBIDDEN.to_string();
|
||||
let url = "http://example.com/blocked";
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!("{CLOUDFLARE_BLOCKED_MESSAGE} (status {status}), request id: ray-id")
|
||||
format!(
|
||||
"{CLOUDFLARE_BLOCKED_MESSAGE} (status {status}), url: {url}, request id: ray-id"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -840,12 +849,14 @@ mod tests {
|
||||
let err = UnexpectedResponseError {
|
||||
status: StatusCode::FORBIDDEN,
|
||||
body: "plain text error".to_string(),
|
||||
url: Some("http://example.com/plain".to_string()),
|
||||
request_id: None,
|
||||
};
|
||||
let status = StatusCode::FORBIDDEN.to_string();
|
||||
let url = "http://example.com/plain";
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
format!("unexpected status {status}: plain text error")
|
||||
format!("unexpected status {status}: plain text error, url: {url}")
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,10 @@ use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::models::is_image_close_tag_text;
|
||||
use codex_protocol::models::is_image_open_tag_text;
|
||||
use codex_protocol::models::is_local_image_close_tag_text;
|
||||
use codex_protocol::models::is_local_image_open_tag_text;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use tracing::warn;
|
||||
use uuid::Uuid;
|
||||
@@ -32,9 +36,17 @@ fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
|
||||
let mut content: Vec<UserInput> = Vec::new();
|
||||
|
||||
for content_item in message.iter() {
|
||||
for (idx, content_item) in message.iter().enumerate() {
|
||||
match content_item {
|
||||
ContentItem::InputText { text } => {
|
||||
if (is_local_image_open_tag_text(text) || is_image_open_tag_text(text))
|
||||
&& (matches!(message.get(idx + 1), Some(ContentItem::InputImage { .. })))
|
||||
|| (idx > 0
|
||||
&& (is_local_image_close_tag_text(text) || is_image_close_tag_text(text))
|
||||
&& matches!(message.get(idx - 1), Some(ContentItem::InputImage { .. })))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if is_session_prefix(text) || is_user_shell_command_text(text) {
|
||||
return None;
|
||||
}
|
||||
@@ -177,6 +189,80 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn skips_local_image_label_text() {
|
||||
let image_url = "data:image/png;base64,abc".to_string();
|
||||
let label = codex_protocol::models::local_image_open_tag_text(1);
|
||||
let user_text = "Please review this image.".to_string();
|
||||
|
||||
let item = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![
|
||||
ContentItem::InputText { text: label },
|
||||
ContentItem::InputImage {
|
||||
image_url: image_url.clone(),
|
||||
},
|
||||
ContentItem::InputText {
|
||||
text: "</image>".to_string(),
|
||||
},
|
||||
ContentItem::InputText {
|
||||
text: user_text.clone(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected user message turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::UserMessage(user) => {
|
||||
let expected_content = vec![
|
||||
UserInput::Image { image_url },
|
||||
UserInput::Text { text: user_text },
|
||||
];
|
||||
assert_eq!(user.content, expected_content);
|
||||
}
|
||||
other => panic!("expected TurnItem::UserMessage, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn skips_unnamed_image_label_text() {
|
||||
let image_url = "data:image/png;base64,abc".to_string();
|
||||
let label = codex_protocol::models::image_open_tag_text();
|
||||
let user_text = "Please review this image.".to_string();
|
||||
|
||||
let item = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![
|
||||
ContentItem::InputText { text: label },
|
||||
ContentItem::InputImage {
|
||||
image_url: image_url.clone(),
|
||||
},
|
||||
ContentItem::InputText {
|
||||
text: codex_protocol::models::image_close_tag_text(),
|
||||
},
|
||||
ContentItem::InputText {
|
||||
text: user_text.clone(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected user message turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::UserMessage(user) => {
|
||||
let expected_content = vec![
|
||||
UserInput::Image { image_url },
|
||||
UserInput::Text { text: user_text },
|
||||
];
|
||||
assert_eq!(user.content, expected_content);
|
||||
}
|
||||
other => panic!("expected TurnItem::UserMessage, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn skips_user_instructions_and_env() {
|
||||
let items = vec![
|
||||
|
||||
@@ -86,12 +86,16 @@ pub enum Feature {
|
||||
RemoteModels,
|
||||
/// Experimental shell snapshotting.
|
||||
ShellSnapshot,
|
||||
/// Append additional AGENTS.md guidance to user instructions.
|
||||
HierarchicalAgents,
|
||||
/// Experimental TUI v2 (viewport) implementation.
|
||||
Tui2,
|
||||
/// Enforce UTF8 output in Powershell.
|
||||
PowershellUtf8,
|
||||
/// Compress request bodies (zstd) when sending streaming requests to codex-backend.
|
||||
EnableRequestCompression,
|
||||
/// Enable collab tools.
|
||||
Collab,
|
||||
}
|
||||
|
||||
impl Feature {
|
||||
@@ -350,6 +354,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::HierarchicalAgents,
|
||||
key: "hierarchical_agents",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ApplyPatchFreeform,
|
||||
key: "apply_patch_freeform",
|
||||
@@ -398,6 +408,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Collab,
|
||||
key: "collab",
|
||||
stage: Stage::Experimental,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Tui2,
|
||||
key: "tui2",
|
||||
|
||||
@@ -72,6 +72,12 @@ pub(crate) async fn handle_mcp_tool_call(
|
||||
|
||||
notify_mcp_tool_call_event(sess, turn_context, tool_call_end_event.clone()).await;
|
||||
|
||||
let status = if result.is_ok() { "ok" } else { "error" };
|
||||
turn_context
|
||||
.client
|
||||
.get_otel_manager()
|
||||
.counter("codex.mcp.call", 1, &[("status", status)]);
|
||||
|
||||
ResponseInputItem::McpToolCallOutput { call_id, result }
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ use codex_otel::config::OtelExporter;
|
||||
use codex_otel::config::OtelHttpProtocol;
|
||||
use codex_otel::config::OtelSettings;
|
||||
use codex_otel::config::OtelTlsConfig as OtelTlsSettings;
|
||||
use codex_otel::traces::otel_provider::OtelProvider;
|
||||
use codex_otel::otel_provider::OtelProvider;
|
||||
use std::error::Error;
|
||||
|
||||
/// Build an OpenTelemetry provider from the app Config.
|
||||
@@ -15,6 +15,8 @@ use std::error::Error;
|
||||
pub fn build_provider(
|
||||
config: &Config,
|
||||
service_version: &str,
|
||||
service_name_override: Option<&str>,
|
||||
default_analytics_enabled: bool,
|
||||
) -> Result<Option<OtelProvider>, Box<dyn Error>> {
|
||||
let to_otel_exporter = |kind: &Kind| match kind {
|
||||
Kind::None => OtelExporter::None,
|
||||
@@ -64,14 +66,20 @@ pub fn build_provider(
|
||||
|
||||
let exporter = to_otel_exporter(&config.otel.exporter);
|
||||
let trace_exporter = to_otel_exporter(&config.otel.trace_exporter);
|
||||
let metrics_exporter = if config.analytics {
|
||||
let metrics_exporter = if config
|
||||
.analytics_enabled
|
||||
.unwrap_or(default_analytics_enabled)
|
||||
{
|
||||
to_otel_exporter(&config.otel.metrics_exporter)
|
||||
} else {
|
||||
OtelExporter::None
|
||||
};
|
||||
|
||||
let originator = originator();
|
||||
let service_name = service_name_override.unwrap_or(originator.value.as_str());
|
||||
|
||||
OtelProvider::from(&OtelSettings {
|
||||
service_name: originator().value.to_owned(),
|
||||
service_name: service_name.to_string(),
|
||||
service_version: service_version.to_string(),
|
||||
codex_home: config.codex_home.clone(),
|
||||
environment: config.otel.environment.to_string(),
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
//! 3. We do **not** walk past the Git root.
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::features::Feature;
|
||||
use crate::skills::SkillMetadata;
|
||||
use crate::skills::render_skills_section;
|
||||
use dunce::canonicalize as normalize_path;
|
||||
@@ -21,6 +22,9 @@ use std::path::PathBuf;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tracing::error;
|
||||
|
||||
pub(crate) const HIERARCHICAL_AGENTS_MESSAGE: &str =
|
||||
include_str!("../hierarchical_agents_message.md");
|
||||
|
||||
/// Default filename scanned for project-level docs.
|
||||
pub const DEFAULT_PROJECT_DOC_FILENAME: &str = "AGENTS.md";
|
||||
/// Preferred local override for project-level docs.
|
||||
@@ -36,35 +40,46 @@ pub(crate) async fn get_user_instructions(
|
||||
config: &Config,
|
||||
skills: Option<&[SkillMetadata]>,
|
||||
) -> Option<String> {
|
||||
let skills_section = skills.and_then(render_skills_section);
|
||||
let project_docs = read_project_docs(config).await;
|
||||
|
||||
let project_docs = match read_project_docs(config).await {
|
||||
Ok(docs) => docs,
|
||||
let mut output = String::new();
|
||||
|
||||
if let Some(instructions) = config.user_instructions.clone() {
|
||||
output.push_str(&instructions);
|
||||
}
|
||||
|
||||
match project_docs {
|
||||
Ok(Some(docs)) => {
|
||||
if !output.is_empty() {
|
||||
output.push_str(PROJECT_DOC_SEPARATOR);
|
||||
}
|
||||
output.push_str(&docs);
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(e) => {
|
||||
error!("error trying to find project doc: {e:#}");
|
||||
return config.user_instructions.clone();
|
||||
}
|
||||
};
|
||||
|
||||
let combined_project_docs = merge_project_docs_with_skills(project_docs, skills_section);
|
||||
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
|
||||
if let Some(instructions) = config.user_instructions.clone() {
|
||||
parts.push(instructions);
|
||||
}
|
||||
|
||||
if let Some(project_doc) = combined_project_docs {
|
||||
if !parts.is_empty() {
|
||||
parts.push(PROJECT_DOC_SEPARATOR.to_string());
|
||||
let skills_section = skills.and_then(render_skills_section);
|
||||
if let Some(skills_section) = skills_section {
|
||||
if !output.is_empty() {
|
||||
output.push_str("\n\n");
|
||||
}
|
||||
parts.push(project_doc);
|
||||
output.push_str(&skills_section);
|
||||
}
|
||||
|
||||
if parts.is_empty() {
|
||||
None
|
||||
if config.features.enabled(Feature::HierarchicalAgents) {
|
||||
if !output.is_empty() {
|
||||
output.push_str("\n\n");
|
||||
}
|
||||
output.push_str(HIERARCHICAL_AGENTS_MESSAGE);
|
||||
}
|
||||
|
||||
if !output.is_empty() {
|
||||
Some(output)
|
||||
} else {
|
||||
Some(parts.concat())
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -217,18 +232,6 @@ fn candidate_filenames<'a>(config: &'a Config) -> Vec<&'a str> {
|
||||
names
|
||||
}
|
||||
|
||||
fn merge_project_docs_with_skills(
|
||||
project_doc: Option<String>,
|
||||
skills_section: Option<String>,
|
||||
) -> Option<String> {
|
||||
match (project_doc, skills_section) {
|
||||
(Some(doc), Some(skills)) => Some(format!("{doc}\n\n{skills}")),
|
||||
(Some(doc), None) => Some(doc),
|
||||
(None, Some(skills)) => Some(skills),
|
||||
(None, None) => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -50,8 +50,8 @@ pub(crate) fn should_persist_event_msg(ev: &EventMsg) -> bool {
|
||||
| EventMsg::TurnAborted(_) => true,
|
||||
EventMsg::Error(_)
|
||||
| EventMsg::Warning(_)
|
||||
| EventMsg::TaskStarted(_)
|
||||
| EventMsg::TaskComplete(_)
|
||||
| EventMsg::TurnStarted(_)
|
||||
| EventMsg::TurnComplete(_)
|
||||
| EventMsg::AgentMessageDelta(_)
|
||||
| EventMsg::AgentReasoningDelta(_)
|
||||
| EventMsg::AgentReasoningRawContentDelta(_)
|
||||
|
||||
@@ -143,7 +143,7 @@ impl RolloutRecorder {
|
||||
id: session_id,
|
||||
timestamp,
|
||||
cwd: config.cwd.clone(),
|
||||
originator: originator().value.clone(),
|
||||
originator: originator().value,
|
||||
cli_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
instructions,
|
||||
source,
|
||||
|
||||
@@ -21,9 +21,9 @@ use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::models_manager::manager::ModelsManager;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::TaskCompleteEvent;
|
||||
use crate::protocol::TurnAbortReason;
|
||||
use crate::protocol::TurnAbortedEvent;
|
||||
use crate::protocol::TurnCompleteEvent;
|
||||
use crate::state::ActiveTurn;
|
||||
use crate::state::RunningTask;
|
||||
use crate::state::TaskKind;
|
||||
@@ -180,7 +180,7 @@ impl Session {
|
||||
if should_close_processes {
|
||||
self.close_unified_exec_processes().await;
|
||||
}
|
||||
let event = EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message });
|
||||
let event = EventMsg::TurnComplete(TurnCompleteEvent { last_agent_message });
|
||||
self.send_event(turn_context.as_ref(), event).await;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::codex::TurnContext;
|
||||
use crate::codex::run_task;
|
||||
use crate::codex::run_turn;
|
||||
use crate::state::TaskKind;
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
@@ -29,10 +29,10 @@ impl SessionTask for RegularTask {
|
||||
cancellation_token: CancellationToken,
|
||||
) -> Option<String> {
|
||||
let sess = session.clone_session();
|
||||
let run_task_span =
|
||||
trace_span!(parent: sess.services.otel_manager.current_span(), "run_task");
|
||||
run_task(sess, ctx, input, cancellation_token)
|
||||
.instrument(run_task_span)
|
||||
let run_turn_span =
|
||||
trace_span!(parent: sess.services.otel_manager.current_span(), "run_turn");
|
||||
run_turn(sess, ctx, input, cancellation_token)
|
||||
.instrument(run_turn_span)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -134,7 +134,7 @@ async fn process_review_events(
|
||||
})
|
||||
| EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { .. })
|
||||
| EventMsg::AgentMessageContentDelta(AgentMessageContentDeltaEvent { .. }) => {}
|
||||
EventMsg::TaskComplete(task_complete) => {
|
||||
EventMsg::TurnComplete(task_complete) => {
|
||||
// Parse review output from the last agent message (if present).
|
||||
let out = task_complete
|
||||
.last_agent_message
|
||||
@@ -154,7 +154,7 @@ async fn process_review_events(
|
||||
}
|
||||
}
|
||||
}
|
||||
// Channel closed without TaskComplete: treat as interrupted.
|
||||
// Channel closed without TurnComplete: treat as interrupted.
|
||||
None
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ use crate::protocol::ExecCommandBeginEvent;
|
||||
use crate::protocol::ExecCommandEndEvent;
|
||||
use crate::protocol::ExecCommandSource;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::TaskStartedEvent;
|
||||
use crate::protocol::TurnStartedEvent;
|
||||
use crate::sandboxing::ExecEnv;
|
||||
use crate::sandboxing::SandboxPermissions;
|
||||
use crate::state::TaskKind;
|
||||
@@ -64,7 +64,7 @@ impl SessionTask for UserShellCommandTask {
|
||||
.otel_manager
|
||||
.counter("codex.task.user_shell", 1, &[]);
|
||||
|
||||
let event = EventMsg::TaskStarted(TaskStartedEvent {
|
||||
let event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
});
|
||||
let session = session.clone_session();
|
||||
|
||||
194
codex-rs/core/src/tools/handlers/collab.rs
Normal file
194
codex-rs/core/src/tools/handlers/collab.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use crate::codex::TurnContext;
|
||||
use crate::config::Config;
|
||||
use crate::error::CodexErr;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::tools::context::ToolInvocation;
|
||||
use crate::tools::context::ToolOutput;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::handlers::parse_arguments;
|
||||
use crate::tools::registry::ToolHandler;
|
||||
use crate::tools::registry::ToolKind;
|
||||
use async_trait::async_trait;
|
||||
use codex_protocol::ThreadId;
|
||||
use serde::Deserialize;
|
||||
|
||||
pub struct CollabHandler;
|
||||
|
||||
pub(crate) const DEFAULT_WAIT_TIMEOUT_MS: i64 = 30_000;
|
||||
pub(crate) const MAX_WAIT_TIMEOUT_MS: i64 = 300_000;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct SpawnAgentArgs {
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct SendInputArgs {
|
||||
id: String,
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct WaitArgs {
|
||||
id: String,
|
||||
timeout_ms: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CloseAgentArgs {
|
||||
id: String,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ToolHandler for CollabHandler {
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
fn matches_kind(&self, payload: &ToolPayload) -> bool {
|
||||
matches!(payload, ToolPayload::Function { .. })
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<ToolOutput, FunctionCallError> {
|
||||
let ToolInvocation {
|
||||
session,
|
||||
turn,
|
||||
tool_name,
|
||||
payload,
|
||||
..
|
||||
} = invocation;
|
||||
|
||||
let arguments = match payload {
|
||||
ToolPayload::Function { arguments } => arguments,
|
||||
_ => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"collab handler received unsupported payload".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
match tool_name.as_str() {
|
||||
"spawn_agent" => handle_spawn_agent(session, turn, arguments).await,
|
||||
"send_input" => handle_send_input(session, arguments).await,
|
||||
"wait" => handle_wait(arguments).await,
|
||||
"close_agent" => handle_close_agent(arguments).await,
|
||||
other => Err(FunctionCallError::RespondToModel(format!(
|
||||
"unsupported collab tool {other}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_spawn_agent(
|
||||
session: std::sync::Arc<crate::codex::Session>,
|
||||
turn: std::sync::Arc<TurnContext>,
|
||||
arguments: String,
|
||||
) -> Result<ToolOutput, FunctionCallError> {
|
||||
let args: SpawnAgentArgs = parse_arguments(&arguments)?;
|
||||
if args.message.trim().is_empty() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"Empty message can't be send to an agent".to_string(),
|
||||
));
|
||||
}
|
||||
let config = build_agent_spawn_config(turn.as_ref())?;
|
||||
let result = session
|
||||
.services
|
||||
.agent_control
|
||||
.spawn_agent(config, args.message, true)
|
||||
.await
|
||||
.map_err(|err| FunctionCallError::Fatal(err.to_string()))?;
|
||||
|
||||
Ok(ToolOutput::Function {
|
||||
content: format!("agent_id: {result}"),
|
||||
success: Some(true),
|
||||
content_items: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_send_input(
|
||||
session: std::sync::Arc<crate::codex::Session>,
|
||||
arguments: String,
|
||||
) -> Result<ToolOutput, FunctionCallError> {
|
||||
let args: SendInputArgs = parse_arguments(&arguments)?;
|
||||
let agent_id = agent_id(&args.id)?;
|
||||
if args.message.trim().is_empty() {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"Empty message can't be send to an agent".to_string(),
|
||||
));
|
||||
}
|
||||
let content = session
|
||||
.services
|
||||
.agent_control
|
||||
.send_prompt(agent_id, args.message)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CodexErr::ThreadNotFound(id) => {
|
||||
FunctionCallError::RespondToModel(format!("agent with id {id} not found"))
|
||||
}
|
||||
err => FunctionCallError::Fatal(err.to_string()),
|
||||
})?;
|
||||
|
||||
Ok(ToolOutput::Function {
|
||||
content,
|
||||
success: Some(true),
|
||||
content_items: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_wait(arguments: String) -> Result<ToolOutput, FunctionCallError> {
|
||||
let args: WaitArgs = parse_arguments(&arguments)?;
|
||||
let _agent_id = agent_id(&args.id)?;
|
||||
|
||||
let timeout_ms = args.timeout_ms.unwrap_or(DEFAULT_WAIT_TIMEOUT_MS);
|
||||
if timeout_ms <= 0 {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"timeout_ms must be greater than zero".to_string(),
|
||||
));
|
||||
}
|
||||
let _timeout_ms = timeout_ms.min(MAX_WAIT_TIMEOUT_MS);
|
||||
// TODO(jif): implement agent wait once lifecycle tracking is wired up.
|
||||
Err(FunctionCallError::Fatal("wait not implemented".to_string()))
|
||||
}
|
||||
|
||||
async fn handle_close_agent(arguments: String) -> Result<ToolOutput, FunctionCallError> {
|
||||
let args: CloseAgentArgs = parse_arguments(&arguments)?;
|
||||
let _agent_id = agent_id(&args.id)?;
|
||||
// TODO(jif): implement agent shutdown and return the final status.
|
||||
Err(FunctionCallError::Fatal(
|
||||
"close_agent not implemented".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
fn agent_id(id: &str) -> Result<ThreadId, FunctionCallError> {
|
||||
ThreadId::from_string(id)
|
||||
.map_err(|e| FunctionCallError::RespondToModel(format!("invalid agent id {id}: {e:?}")))
|
||||
}
|
||||
|
||||
fn build_agent_spawn_config(turn: &TurnContext) -> Result<Config, FunctionCallError> {
|
||||
let base_config = turn.client.config();
|
||||
let mut config = (*base_config).clone();
|
||||
config.model = Some(turn.client.get_model());
|
||||
config.model_provider = turn.client.get_provider();
|
||||
config.model_reasoning_effort = turn.client.get_reasoning_effort();
|
||||
config.model_reasoning_summary = turn.client.get_reasoning_summary();
|
||||
config.developer_instructions = turn.developer_instructions.clone();
|
||||
config.base_instructions = turn.base_instructions.clone();
|
||||
config.compact_prompt = turn.compact_prompt.clone();
|
||||
config.user_instructions = turn.user_instructions.clone();
|
||||
config.shell_environment_policy = turn.shell_environment_policy.clone();
|
||||
config.codex_linux_sandbox_exe = turn.codex_linux_sandbox_exe.clone();
|
||||
config.cwd = turn.cwd.clone();
|
||||
config
|
||||
.approval_policy
|
||||
.set(turn.approval_policy)
|
||||
.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("approval_policy is invalid: {err}"))
|
||||
})?;
|
||||
config
|
||||
.sandbox_policy
|
||||
.set(turn.sandbox_policy.clone())
|
||||
.map_err(|err| {
|
||||
FunctionCallError::RespondToModel(format!("sandbox_policy is invalid: {err}"))
|
||||
})?;
|
||||
Ok(config)
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod apply_patch;
|
||||
pub(crate) mod collab;
|
||||
mod grep_files;
|
||||
mod list_dir;
|
||||
mod mcp;
|
||||
@@ -15,6 +16,7 @@ use serde::Deserialize;
|
||||
|
||||
use crate::function_tool::FunctionCallError;
|
||||
pub use apply_patch::ApplyPatchHandler;
|
||||
pub use collab::CollabHandler;
|
||||
pub use grep_files::GrepFilesHandler;
|
||||
pub use list_dir::ListDirHandler;
|
||||
pub use mcp::McpHandler;
|
||||
|
||||
@@ -11,7 +11,9 @@ use crate::tools::context::ToolPayload;
|
||||
use crate::tools::handlers::parse_arguments;
|
||||
use crate::tools::registry::ToolHandler;
|
||||
use crate::tools::registry::ToolKind;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::models::local_image_content_items_with_label_number;
|
||||
|
||||
pub struct ViewImageHandler;
|
||||
|
||||
@@ -63,8 +65,15 @@ impl ToolHandler for ViewImageHandler {
|
||||
}
|
||||
let event_path = abs_path.clone();
|
||||
|
||||
let content: Vec<ContentItem> =
|
||||
local_image_content_items_with_label_number(&abs_path, None);
|
||||
let input = ResponseInputItem::Message {
|
||||
role: "user".to_string(),
|
||||
content,
|
||||
};
|
||||
|
||||
session
|
||||
.inject_input(vec![UserInput::LocalImage { path: abs_path }])
|
||||
.inject_response_items(vec![input])
|
||||
.await
|
||||
.map_err(|_| {
|
||||
FunctionCallError::RespondToModel(
|
||||
|
||||
@@ -111,12 +111,17 @@ impl Approvable<ApplyPatchRequest> for ApplyPatchRuntime {
|
||||
return rx_approve.await.unwrap_or_default();
|
||||
}
|
||||
|
||||
with_cached_approval(&session.services, approval_keys, || async move {
|
||||
let rx_approve = session
|
||||
.request_patch_approval(turn, call_id, changes, None, None)
|
||||
.await;
|
||||
rx_approve.await.unwrap_or_default()
|
||||
})
|
||||
with_cached_approval(
|
||||
&session.services,
|
||||
"apply_patch",
|
||||
approval_keys,
|
||||
|| async move {
|
||||
let rx_approve = session
|
||||
.request_patch_approval(turn, call_id, changes, None, None)
|
||||
.await;
|
||||
rx_approve.await.unwrap_or_default()
|
||||
},
|
||||
)
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
@@ -98,7 +98,7 @@ impl Approvable<ShellRequest> for ShellRuntime {
|
||||
let turn = ctx.turn;
|
||||
let call_id = ctx.call_id.to_string();
|
||||
Box::pin(async move {
|
||||
with_cached_approval(&session.services, keys, move || async move {
|
||||
with_cached_approval(&session.services, "shell", keys, move || async move {
|
||||
session
|
||||
.request_command_approval(
|
||||
turn,
|
||||
|
||||
@@ -116,7 +116,7 @@ impl Approvable<UnifiedExecRequest> for UnifiedExecRuntime<'_> {
|
||||
.clone()
|
||||
.or_else(|| req.justification.clone());
|
||||
Box::pin(async move {
|
||||
with_cached_approval(&session.services, keys, || async move {
|
||||
with_cached_approval(&session.services, "unified_exec", keys, || async move {
|
||||
session
|
||||
.request_command_approval(
|
||||
turn,
|
||||
|
||||
@@ -57,6 +57,8 @@ impl ApprovalStore {
|
||||
/// so future requests touching any subset can also skip prompting.
|
||||
pub(crate) async fn with_cached_approval<K, F, Fut>(
|
||||
services: &SessionServices,
|
||||
// Name of the tool, used for metrics collection.
|
||||
tool_name: &str,
|
||||
keys: Vec<K>,
|
||||
fetch: F,
|
||||
) -> ReviewDecision
|
||||
@@ -82,6 +84,15 @@ where
|
||||
|
||||
let decision = fetch().await;
|
||||
|
||||
services.otel_manager.counter(
|
||||
"codex.approval.requested",
|
||||
1,
|
||||
&[
|
||||
("tool", tool_name),
|
||||
("approved", decision.to_opaque_string()),
|
||||
],
|
||||
);
|
||||
|
||||
if matches!(decision, ReviewDecision::ApprovedForSession) {
|
||||
let mut store = services.tool_approvals.lock().await;
|
||||
for key in keys {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
7
codex-rs/core/tests/common/BUILD.bazel
Normal file
7
codex-rs/core/tests/common/BUILD.bazel
Normal file
@@ -0,0 +1,7 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "common",
|
||||
crate_name = "core_test_support",
|
||||
crate_srcs = glob(["*.rs"]),
|
||||
)
|
||||
@@ -265,7 +265,7 @@ impl TestCodex {
|
||||
.await?;
|
||||
|
||||
wait_for_event(&self.codex, |event| {
|
||||
matches!(event, EventMsg::TaskComplete(_))
|
||||
matches!(event, EventMsg::TurnComplete(_))
|
||||
})
|
||||
.await;
|
||||
Ok(())
|
||||
|
||||
@@ -124,7 +124,7 @@ async fn interrupt_tool_records_history_entries() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let requests = response_mock.requests();
|
||||
assert!(
|
||||
|
||||
@@ -319,7 +319,7 @@ async fn apply_patch_cli_move_without_content_change_has_no_turn_diff(
|
||||
saw_turn_diff = true;
|
||||
false
|
||||
}
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::TurnComplete(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
@@ -917,7 +917,7 @@ async fn apply_patch_shell_command_heredoc_with_cd_emits_turn_diff() -> Result<(
|
||||
saw_turn_diff = Some(ev.unified_diff.clone());
|
||||
false
|
||||
}
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::TurnComplete(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
@@ -982,7 +982,7 @@ async fn apply_patch_shell_command_failure_propagates_error_and_skips_diff() ->
|
||||
saw_turn_diff = true;
|
||||
false
|
||||
}
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::TurnComplete(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
@@ -1129,7 +1129,7 @@ async fn apply_patch_emits_turn_diff_event_with_unified_diff(
|
||||
saw_turn_diff = Some(ev.unified_diff.clone());
|
||||
false
|
||||
}
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::TurnComplete(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
@@ -1189,7 +1189,7 @@ async fn apply_patch_turn_diff_for_rename_with_content_change(
|
||||
last_diff = Some(ev.unified_diff.clone());
|
||||
false
|
||||
}
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::TurnComplete(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
@@ -1257,7 +1257,7 @@ async fn apply_patch_aggregates_diff_across_multiple_tool_calls() -> Result<()>
|
||||
last_diff = Some(ev.unified_diff.clone());
|
||||
false
|
||||
}
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::TurnComplete(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
@@ -1325,7 +1325,7 @@ async fn apply_patch_aggregates_diff_preserves_success_after_failure() -> Result
|
||||
last_diff = Some(ev.unified_diff.clone());
|
||||
false
|
||||
}
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::TurnComplete(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -553,7 +553,7 @@ async fn expect_exec_approval(
|
||||
let event = wait_for_event(&test.codex, |event| {
|
||||
matches!(
|
||||
event,
|
||||
EventMsg::ExecApprovalRequest(_) | EventMsg::TaskComplete(_)
|
||||
EventMsg::ExecApprovalRequest(_) | EventMsg::TurnComplete(_)
|
||||
)
|
||||
})
|
||||
.await;
|
||||
@@ -568,7 +568,7 @@ async fn expect_exec_approval(
|
||||
assert_eq!(last_arg, expected_command);
|
||||
approval
|
||||
}
|
||||
EventMsg::TaskComplete(_) => panic!("expected approval request before completion"),
|
||||
EventMsg::TurnComplete(_) => panic!("expected approval request before completion"),
|
||||
other => panic!("unexpected event: {other:?}"),
|
||||
}
|
||||
}
|
||||
@@ -580,7 +580,7 @@ async fn expect_patch_approval(
|
||||
let event = wait_for_event(&test.codex, |event| {
|
||||
matches!(
|
||||
event,
|
||||
EventMsg::ApplyPatchApprovalRequest(_) | EventMsg::TaskComplete(_)
|
||||
EventMsg::ApplyPatchApprovalRequest(_) | EventMsg::TurnComplete(_)
|
||||
)
|
||||
})
|
||||
.await;
|
||||
@@ -590,7 +590,7 @@ async fn expect_patch_approval(
|
||||
assert_eq!(approval.call_id, expected_call_id);
|
||||
approval
|
||||
}
|
||||
EventMsg::TaskComplete(_) => panic!("expected patch approval request before completion"),
|
||||
EventMsg::TurnComplete(_) => panic!("expected patch approval request before completion"),
|
||||
other => panic!("unexpected event: {other:?}"),
|
||||
}
|
||||
}
|
||||
@@ -599,13 +599,13 @@ async fn wait_for_completion_without_approval(test: &TestCodex) {
|
||||
let event = wait_for_event(&test.codex, |event| {
|
||||
matches!(
|
||||
event,
|
||||
EventMsg::ExecApprovalRequest(_) | EventMsg::TaskComplete(_)
|
||||
EventMsg::ExecApprovalRequest(_) | EventMsg::TurnComplete(_)
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
match event {
|
||||
EventMsg::TaskComplete(_) => {}
|
||||
EventMsg::TurnComplete(_) => {}
|
||||
EventMsg::ExecApprovalRequest(event) => {
|
||||
panic!("unexpected approval request: {:?}", event.command)
|
||||
}
|
||||
@@ -615,7 +615,7 @@ async fn wait_for_completion_without_approval(test: &TestCodex) {
|
||||
|
||||
async fn wait_for_completion(test: &TestCodex) {
|
||||
wait_for_event(&test.codex, |event| {
|
||||
matches!(event, EventMsg::TaskComplete(_))
|
||||
matches!(event, EventMsg::TurnComplete(_))
|
||||
})
|
||||
.await;
|
||||
}
|
||||
@@ -1660,12 +1660,12 @@ async fn approving_apply_patch_for_session_skips_future_prompts_for_same_file()
|
||||
let event = wait_for_event(&test.codex, |event| {
|
||||
matches!(
|
||||
event,
|
||||
EventMsg::ApplyPatchApprovalRequest(_) | EventMsg::TaskComplete(_)
|
||||
EventMsg::ApplyPatchApprovalRequest(_) | EventMsg::TurnComplete(_)
|
||||
)
|
||||
})
|
||||
.await;
|
||||
match event {
|
||||
EventMsg::TaskComplete(_) => {}
|
||||
EventMsg::TurnComplete(_) => {}
|
||||
EventMsg::ApplyPatchApprovalRequest(event) => {
|
||||
panic!("unexpected patch approval request: {:?}", event.call_id)
|
||||
}
|
||||
|
||||
@@ -294,7 +294,7 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -362,7 +362,7 @@ async fn includes_conversation_id_and_model_headers_in_request() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
assert_eq!(request.path(), "/v1/responses");
|
||||
@@ -417,7 +417,7 @@ async fn includes_base_instructions_override_in_request() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -473,7 +473,7 @@ async fn chatgpt_auth_sends_correct_request() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
assert_eq!(request.path(), "/api/codex/responses");
|
||||
@@ -566,7 +566,7 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
@@ -607,7 +607,7 @@ async fn includes_user_instructions_message_in_request() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -677,7 +677,7 @@ async fn skills_append_to_instructions() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -727,7 +727,7 @@ async fn includes_configured_effort_in_request() -> anyhow::Result<()> {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -764,7 +764,7 @@ async fn includes_no_effort_in_request() -> anyhow::Result<()> {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -799,7 +799,7 @@ async fn includes_default_reasoning_effort_in_request_when_defined_by_model_info
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -838,7 +838,7 @@ async fn configured_reasoning_summary_is_sent() -> anyhow::Result<()> {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -877,7 +877,7 @@ async fn reasoning_summary_is_omitted_when_disabled() -> anyhow::Result<()> {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -910,7 +910,7 @@ async fn includes_default_verbosity_in_request() -> anyhow::Result<()> {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -950,7 +950,7 @@ async fn configured_verbosity_not_sent_for_models_without_support() -> anyhow::R
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -989,7 +989,7 @@ async fn configured_verbosity_is_sent() -> anyhow::Result<()> {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -1044,7 +1044,7 @@ async fn includes_developer_instructions_message_in_request() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let request_body = request.body_json();
|
||||
@@ -1381,7 +1381,7 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
Some(1704069000)
|
||||
);
|
||||
|
||||
wait_for_event(&codex, |msg| matches!(msg, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |msg| matches!(msg, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
@@ -1509,7 +1509,7 @@ async fn context_window_error_sets_total_tokens_to_model_window() -> anyhow::Res
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1556,7 +1556,7 @@ async fn context_window_error_sets_total_tokens_to_model_window() -> anyhow::Res
|
||||
"expected context window error; got {error_event:?}"
|
||||
);
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1641,7 +1641,7 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
@@ -1724,7 +1724,7 @@ async fn env_var_overrides_loaded_auth() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
fn create_dummy_codex_auth() -> CodexAuth {
|
||||
@@ -1795,7 +1795,7 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// Turn 2: user sends U2; wait for completion.
|
||||
codex
|
||||
@@ -1805,7 +1805,7 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// Turn 3: user sends U3; wait for completion.
|
||||
codex
|
||||
@@ -1815,7 +1815,7 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// Inspect the three captured requests.
|
||||
let requests = request_log.requests();
|
||||
|
||||
@@ -80,7 +80,7 @@ async fn codex_delegate_forwards_exec_approval_and_proceeds_on_approval() {
|
||||
.await
|
||||
.expect("submit review");
|
||||
|
||||
// Lifecycle: Entered -> ExecApprovalRequest -> Exited(Some) -> TaskComplete.
|
||||
// Lifecycle: Entered -> ExecApprovalRequest -> Exited(Some) -> TurnComplete.
|
||||
wait_for_event(&test.codex, |ev| {
|
||||
matches!(ev, EventMsg::EnteredReviewMode(_))
|
||||
})
|
||||
@@ -105,7 +105,7 @@ async fn codex_delegate_forwards_exec_approval_and_proceeds_on_approval() {
|
||||
matches!(ev, EventMsg::ExitedReviewMode(_))
|
||||
})
|
||||
.await;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
/// Delegate should surface ApplyPatchApprovalRequest and honor parent decision
|
||||
@@ -179,7 +179,7 @@ async fn codex_delegate_forwards_patch_approval_and_proceeds_on_decision() {
|
||||
matches!(ev, EventMsg::ExitedReviewMode(_))
|
||||
})
|
||||
.await;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
@@ -221,7 +221,7 @@ async fn codex_delegate_ignores_legacy_deltas() {
|
||||
match ev {
|
||||
EventMsg::ReasoningContentDelta(_) => reasoning_delta_count += 1,
|
||||
EventMsg::AgentReasoningDelta(_) => legacy_reasoning_delta_count += 1,
|
||||
EventMsg::TaskComplete(_) => break,
|
||||
EventMsg::TurnComplete(_) => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -164,7 +164,7 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// 2) Summarize – second hit should include the summarization prompt.
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
@@ -173,7 +173,7 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
panic!("expected warning event after compact");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// 3) Next user input – third hit; history should include only the summary.
|
||||
codex
|
||||
@@ -185,7 +185,7 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// Inspect the three captured requests.
|
||||
let requests = request_log.requests();
|
||||
@@ -355,7 +355,7 @@ async fn manual_compact_uses_custom_prompt() {
|
||||
panic!("expected warning event after compact");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let body = response_mock.single_request().body_json();
|
||||
|
||||
@@ -445,7 +445,7 @@ async fn manual_compact_emits_api_and_local_token_usage_events() {
|
||||
.await;
|
||||
|
||||
// Ensure the compact task itself completes.
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
assert_eq!(
|
||||
first, 0,
|
||||
@@ -578,7 +578,7 @@ async fn multiple_auto_compact_per_task_runs_after_token_limit_hit() {
|
||||
})
|
||||
.await
|
||||
.expect("submit user input");
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// collect the requests payloads from the model
|
||||
let requests_payloads = request_log.requests();
|
||||
@@ -1049,7 +1049,7 @@ async fn auto_compact_runs_after_token_limit_hit() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1061,7 +1061,7 @@ async fn auto_compact_runs_after_token_limit_hit() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1073,7 +1073,7 @@ async fn auto_compact_runs_after_token_limit_hit() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let requests = request_log.requests();
|
||||
let request_bodies: Vec<String> = requests
|
||||
@@ -1284,7 +1284,7 @@ async fn auto_compact_runs_after_resume_when_token_usage_is_over_limit() {
|
||||
})
|
||||
.await;
|
||||
wait_for_event(&resumed.codex, |event| {
|
||||
matches!(event, EventMsg::TaskComplete(_))
|
||||
matches!(event, EventMsg::TurnComplete(_))
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -1381,7 +1381,7 @@ async fn auto_compact_persists_rollout_entries() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1392,7 +1392,7 @@ async fn auto_compact_persists_rollout_entries() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1403,7 +1403,7 @@ async fn auto_compact_persists_rollout_entries() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Shutdown).await.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::ShutdownComplete)).await;
|
||||
@@ -1495,7 +1495,7 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
let EventMsg::BackgroundEvent(event) =
|
||||
@@ -1513,7 +1513,7 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
panic!("expected warning event after compact retry");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let requests = request_log.requests();
|
||||
assert_eq!(
|
||||
@@ -1628,10 +1628,10 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1642,10 +1642,10 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1656,7 +1656,7 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let requests = responses_mock.requests();
|
||||
assert_eq!(
|
||||
@@ -1838,13 +1838,13 @@ async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_
|
||||
if event.id.starts_with("auto-compact-")
|
||||
&& matches!(
|
||||
event.msg,
|
||||
EventMsg::TaskStarted(_) | EventMsg::TaskComplete(_)
|
||||
EventMsg::TurnStarted(_) | EventMsg::TurnComplete(_)
|
||||
)
|
||||
{
|
||||
auto_compact_lifecycle_events.push(event);
|
||||
continue;
|
||||
}
|
||||
if let EventMsg::TaskComplete(_) = &event.msg
|
||||
if let EventMsg::TurnComplete(_) = &event.msg
|
||||
&& !event.id.starts_with("auto-compact-")
|
||||
{
|
||||
break;
|
||||
@@ -1946,7 +1946,7 @@ async fn auto_compact_triggers_after_function_call_over_95_percent_usage() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |msg| matches!(msg, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |msg| matches!(msg, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1958,7 +1958,7 @@ async fn auto_compact_triggers_after_function_call_over_95_percent_usage() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |msg| matches!(msg, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |msg| matches!(msg, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// Assert first request captured expected user message that triggers function call.
|
||||
let first_request = first_turn_mock.single_request().input();
|
||||
@@ -2072,7 +2072,7 @@ async fn auto_compact_counts_encrypted_reasoning_before_last_user() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
if idx < 2 {
|
||||
assert!(
|
||||
|
||||
@@ -77,10 +77,10 @@ async fn remote_compact_replaces_history_for_followups() -> Result<()> {
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -90,7 +90,7 @@ async fn remote_compact_replaces_history_for_followups() -> Result<()> {
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let compact_request = compact_mock.single_request();
|
||||
assert_eq!(compact_request.path(), "/v1/responses/compact");
|
||||
@@ -201,7 +201,7 @@ async fn remote_compact_runs_automatically() -> Result<()> {
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
assert!(message);
|
||||
assert_eq!(compact_mock.requests().len(), 1);
|
||||
@@ -269,10 +269,10 @@ async fn remote_compact_persists_replacement_history_in_rollout() -> Result<()>
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Compact).await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex.submit(Op::Shutdown).await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::ShutdownComplete)).await;
|
||||
|
||||
@@ -896,7 +896,7 @@ async fn user_turn(conversation: &Arc<CodexThread>, text: &str) {
|
||||
})
|
||||
.await
|
||||
.expect("submit user turn");
|
||||
wait_for_event(conversation, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(conversation, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
async fn compact_conversation(conversation: &Arc<CodexThread>) {
|
||||
@@ -909,7 +909,7 @@ async fn compact_conversation(conversation: &Arc<CodexThread>) {
|
||||
panic!("expected warning event after compact");
|
||||
};
|
||||
assert_eq!(message, COMPACT_WARNING_MESSAGE);
|
||||
wait_for_event(conversation, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(conversation, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
async fn fetch_conversation_path(conversation: &Arc<CodexThread>) -> std::path::PathBuf {
|
||||
|
||||
@@ -91,7 +91,7 @@ async fn execpolicy_blocks_shell_invocation() -> Result<()> {
|
||||
unreachable!()
|
||||
};
|
||||
wait_for_event(&test.codex, |event| {
|
||||
matches!(event, EventMsg::TaskComplete(_))
|
||||
matches!(event, EventMsg::TurnComplete(_))
|
||||
})
|
||||
.await;
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ async fn fork_thread_twice_drops_to_first_message() {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let _ = wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
let _ = wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
}
|
||||
|
||||
// Request history from the base conversation to obtain rollout path.
|
||||
|
||||
71
codex-rs/core/tests/suite/hierarchical_agents.rs
Normal file
71
codex-rs/core/tests/suite/hierarchical_agents.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
use codex_core::features::Feature;
|
||||
use core_test_support::load_sse_fixture_with_id;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
|
||||
const HIERARCHICAL_AGENTS_SNIPPET: &str =
|
||||
"Files called AGENTS.md commonly appear in many places inside a container";
|
||||
|
||||
fn sse_completed(id: &str) -> String {
|
||||
load_sse_fixture_with_id("../fixtures/completed_template.json", id)
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn hierarchical_agents_appends_to_project_doc_in_user_instructions() {
|
||||
let server = start_mock_server().await;
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.enable(Feature::HierarchicalAgents);
|
||||
std::fs::write(config.cwd.join("AGENTS.md"), "be nice").expect("write AGENTS.md");
|
||||
});
|
||||
let test = builder.build(&server).await.expect("build test codex");
|
||||
|
||||
test.submit_turn("hello").await.expect("submit turn");
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let user_messages = request.message_input_texts("user");
|
||||
let instructions = user_messages
|
||||
.iter()
|
||||
.find(|text| text.starts_with("# AGENTS.md instructions for "))
|
||||
.expect("instructions message");
|
||||
assert!(
|
||||
instructions.contains("be nice"),
|
||||
"expected AGENTS.md text included: {instructions}"
|
||||
);
|
||||
let snippet_pos = instructions
|
||||
.find(HIERARCHICAL_AGENTS_SNIPPET)
|
||||
.expect("expected hierarchical agents snippet");
|
||||
let base_pos = instructions
|
||||
.find("be nice")
|
||||
.expect("expected AGENTS.md text");
|
||||
assert!(
|
||||
snippet_pos > base_pos,
|
||||
"expected hierarchical agents message appended after base instructions: {instructions}"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn hierarchical_agents_emits_when_no_project_doc() {
|
||||
let server = start_mock_server().await;
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.features.enable(Feature::HierarchicalAgents);
|
||||
});
|
||||
let test = builder.build(&server).await.expect("build test codex");
|
||||
|
||||
test.submit_turn("hello").await.expect("submit turn");
|
||||
|
||||
let request = resp_mock.single_request();
|
||||
let user_messages = request.message_input_texts("user");
|
||||
let instructions = user_messages
|
||||
.iter()
|
||||
.find(|text| text.starts_with("# AGENTS.md instructions for "))
|
||||
.expect("instructions message");
|
||||
assert!(
|
||||
instructions.contains(HIERARCHICAL_AGENTS_SNIPPET),
|
||||
"expected hierarchical agents message appended: {instructions}"
|
||||
);
|
||||
}
|
||||
183
codex-rs/core/tests/suite/mid_turn_input.rs
Normal file
183
codex-rs/core/tests/suite/mid_turn_input.rs
Normal file
@@ -0,0 +1,183 @@
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use anyhow::Result;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_function_call;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::test_codex::TestCodexHarness;
|
||||
use core_test_support::wait_for_event;
|
||||
use core_test_support::wait_for_event_match;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum MidTurnOp {
|
||||
UserInput,
|
||||
UserTurn,
|
||||
}
|
||||
|
||||
fn message_contains_text(item: &Value, text: &str) -> bool {
|
||||
item.get("type").and_then(Value::as_str) == Some("message")
|
||||
&& item.get("role").and_then(Value::as_str) == Some("user")
|
||||
&& item
|
||||
.get("content")
|
||||
.and_then(Value::as_array)
|
||||
.map(|content| {
|
||||
content.iter().any(|span| {
|
||||
span.get("type").and_then(Value::as_str) == Some("input_text")
|
||||
&& span.get("text").and_then(Value::as_str) == Some(text)
|
||||
})
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
async fn run_mid_turn_injection_test(mid_turn_op: MidTurnOp) -> Result<()> {
|
||||
let harness = TestCodexHarness::new().await?;
|
||||
let test = harness.test();
|
||||
let codex = test.codex.clone();
|
||||
let session_model = test.session_configured.model.clone();
|
||||
let cwd = test.cwd_path().to_path_buf();
|
||||
|
||||
let call_id = "shell-mid-turn";
|
||||
let first_message = "first message";
|
||||
let mid_turn_message = "mid-turn message";
|
||||
let workdir = cwd.to_string_lossy().to_string();
|
||||
|
||||
let args = json!({
|
||||
"command": ["bash", "-lc", "sleep 2; echo finished"],
|
||||
"workdir": workdir,
|
||||
"timeout_ms": 10_000,
|
||||
});
|
||||
|
||||
let first_response = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_function_call(call_id, "shell", &serde_json::to_string(&args)?),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
let second_response = sse(vec![
|
||||
ev_response_created("resp-2"),
|
||||
ev_assistant_message("msg-1", "follow up"),
|
||||
ev_completed("resp-2"),
|
||||
]);
|
||||
|
||||
mount_sse_once(harness.server(), first_response).await;
|
||||
let request_log = mount_sse_once(harness.server(), second_response).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: first_message.to_string(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model.clone(),
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let _ = wait_for_event_match(&codex, |event| match event {
|
||||
EventMsg::ExecCommandBegin(ev) if ev.call_id == call_id => Some(ev.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
|
||||
match mid_turn_op {
|
||||
MidTurnOp::UserInput => {
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: mid_turn_message.to_string(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
MidTurnOp::UserTurn => {
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: mid_turn_message.to_string(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: cwd.clone(),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
model: session_model,
|
||||
effort: None,
|
||||
summary: ReasoningSummary::Auto,
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
let end_event = wait_for_event_match(&codex, |event| match event {
|
||||
EventMsg::ExecCommandEnd(ev) if ev.call_id == call_id => Some(ev.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
assert_eq!(end_event.exit_code, 0);
|
||||
assert!(
|
||||
end_event.stdout.contains("finished"),
|
||||
"expected stdout to include finished: {}",
|
||||
end_event.stdout
|
||||
);
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let request = request_log.single_request();
|
||||
let user_messages = request.message_input_texts("user");
|
||||
assert_eq!(
|
||||
user_messages,
|
||||
vec![first_message.to_string(), mid_turn_message.to_string()]
|
||||
);
|
||||
|
||||
let input = request.input();
|
||||
let tool_index = input
|
||||
.iter()
|
||||
.position(|item| {
|
||||
item.get("type").and_then(Value::as_str) == Some("function_call_output")
|
||||
&& item.get("call_id").and_then(Value::as_str) == Some(call_id)
|
||||
})
|
||||
.expect("expected function_call_output in request");
|
||||
let mid_turn_index = input
|
||||
.iter()
|
||||
.position(|item| message_contains_text(item, mid_turn_message))
|
||||
.expect("expected mid-turn user message in request");
|
||||
assert!(
|
||||
tool_index < mid_turn_index,
|
||||
"expected tool output before mid-turn input"
|
||||
);
|
||||
|
||||
let tool_output = request
|
||||
.function_call_output_text(call_id)
|
||||
.expect("expected function_call_output output text");
|
||||
assert!(
|
||||
tool_output.contains("finished"),
|
||||
"expected tool output to include finished: {tool_output}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn mid_turn_input_inserts_user_input_after_tool_output() -> Result<()> {
|
||||
run_mid_turn_injection_test(MidTurnOp::UserInput).await
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn mid_turn_input_inserts_user_turn_after_tool_output() -> Result<()> {
|
||||
run_mid_turn_injection_test(MidTurnOp::UserTurn).await
|
||||
}
|
||||
@@ -30,6 +30,7 @@ mod exec;
|
||||
mod exec_policy;
|
||||
mod fork_thread;
|
||||
mod grep_files;
|
||||
mod hierarchical_agents;
|
||||
mod items;
|
||||
mod json_result;
|
||||
mod list_dir;
|
||||
|
||||
@@ -111,7 +111,7 @@ async fn refresh_models_on_models_etag_mismatch_and_avoid_duplicate_models_fetch
|
||||
})
|
||||
.await?;
|
||||
|
||||
let _ = wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
let _ = wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// Assert /models was refreshed exactly once after the X-Models-Etag mismatch.
|
||||
assert_eq!(refresh_models_mock.requests().len(), 1);
|
||||
|
||||
@@ -51,7 +51,7 @@ async fn responses_api_emits_api_request_event() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -93,7 +93,7 @@ async fn process_sse_emits_tracing_for_output_item() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -132,7 +132,7 @@ async fn process_sse_emits_failed_event_on_parse_error() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -172,7 +172,7 @@ async fn process_sse_records_failed_event_when_stream_closes_without_completed()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -232,7 +232,7 @@ async fn process_sse_failed_event_records_response_error_message() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -290,7 +290,7 @@ async fn process_sse_failed_event_logs_parse_error() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -335,7 +335,7 @@ async fn process_sse_failed_event_logs_missing_error() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -389,7 +389,7 @@ async fn process_sse_failed_event_logs_response_completed_parse_error() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -440,7 +440,7 @@ async fn process_sse_emits_completed_telemetry() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(|lines: &[&str]| {
|
||||
lines
|
||||
@@ -508,7 +508,7 @@ async fn handle_responses_span_records_response_kind_and_tool_name() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let logs = String::from_utf8(buffer.lock().unwrap().clone()).unwrap();
|
||||
|
||||
@@ -573,7 +573,7 @@ async fn record_responses_sets_span_fields_for_response_events() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let logs = String::from_utf8(buffer.lock().unwrap().clone()).unwrap();
|
||||
|
||||
@@ -966,7 +966,7 @@ async fn handle_container_exec_autoapprove_from_config_records_tool_decision() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
logs_assert(tool_decision_assertion(
|
||||
"auto_config_call",
|
||||
|
||||
@@ -106,7 +106,7 @@ async fn prompt_tools_are_consistent_across_requests() -> anyhow::Result<()> {
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -116,7 +116,7 @@ async fn prompt_tools_are_consistent_across_requests() -> anyhow::Result<()> {
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let expected_tools_names = vec![
|
||||
"shell_command",
|
||||
@@ -178,7 +178,7 @@ async fn codex_mini_latest_tools() -> anyhow::Result<()> {
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
@@ -188,7 +188,7 @@ async fn codex_mini_latest_tools() -> anyhow::Result<()> {
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let expected_instructions = [BASE_INSTRUCTIONS, APPLY_PATCH_TOOL_INSTRUCTIONS].join("\n");
|
||||
|
||||
@@ -238,7 +238,7 @@ async fn prefixes_context_and_instructions_once_and_consistently_across_requests
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -248,7 +248,7 @@ async fn prefixes_context_and_instructions_once_and_consistently_across_requests
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let body1 = req1.single_request().body_json();
|
||||
let input1 = body1["input"].as_array().expect("input array");
|
||||
@@ -309,7 +309,7 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let writable = TempDir::new().unwrap();
|
||||
codex
|
||||
@@ -337,7 +337,7 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let body1 = req1.single_request().body_json();
|
||||
let body2 = req2.single_request().body_json();
|
||||
@@ -417,7 +417,7 @@ async fn override_before_first_turn_emits_environment_context() -> anyhow::Resul
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let body = req.single_request().body_json();
|
||||
let input = body["input"]
|
||||
@@ -509,7 +509,7 @@ async fn per_turn_overrides_keep_cached_prefix_and_key_constant() -> anyhow::Res
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
// Second turn using per-turn overrides via UserTurn
|
||||
let new_cwd = TempDir::new().unwrap();
|
||||
@@ -533,7 +533,7 @@ async fn per_turn_overrides_keep_cached_prefix_and_key_constant() -> anyhow::Res
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let body1 = req1.single_request().body_json();
|
||||
let body2 = req2.single_request().body_json();
|
||||
@@ -627,7 +627,7 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() -> a
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
@@ -643,7 +643,7 @@ async fn send_user_turn_with_no_changes_does_not_send_environment_context() -> a
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let body1 = req1.single_request().body_json();
|
||||
let body2 = req2.single_request().body_json();
|
||||
@@ -717,7 +717,7 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserTurn {
|
||||
@@ -733,7 +733,7 @@ async fn send_user_turn_with_changes_sends_environment_context() -> anyhow::Resu
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
|
||||
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let body1 = req1.single_request().body_json();
|
||||
let body2 = req2.single_request().body_json();
|
||||
|
||||
@@ -62,7 +62,7 @@ async fn quota_exceeded_emits_single_error_event() -> Result<()> {
|
||||
"Quota exceeded. Check your plan and billing details."
|
||||
);
|
||||
}
|
||||
EventMsg::TaskComplete(_) => break,
|
||||
EventMsg::TurnComplete(_) => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user