mirror of
https://github.com/openai/codex.git
synced 2026-02-03 07:23:39 +00:00
Compare commits
24 Commits
dev/zhao/e
...
codex-stat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e6016f5489 | ||
|
|
2721498ec9 | ||
|
|
47ef2cd9ca | ||
|
|
db70faab42 | ||
|
|
1e7796570a | ||
|
|
11b13914a8 | ||
|
|
567844fe05 | ||
|
|
6eb0474455 | ||
|
|
af1254fc4e | ||
|
|
764aff6753 | ||
|
|
e2a55921ec | ||
|
|
08b6d9ef1f | ||
|
|
bbf536847c | ||
|
|
0d0779d08a | ||
|
|
087e571198 | ||
|
|
70b613be81 | ||
|
|
53ff941cf3 | ||
|
|
6b66534356 | ||
|
|
1938116a5d | ||
|
|
075d50677d | ||
|
|
5c40534e98 | ||
|
|
f05492fc94 | ||
|
|
44ff9fcb69 | ||
|
|
b4a1a500ec |
5
.github/workflows/cla.yml
vendored
5
.github/workflows/cla.yml
vendored
@@ -46,4 +46,7 @@ jobs:
|
||||
path-to-document: https://github.com/openai/codex/blob/main/docs/CLA.md
|
||||
path-to-signatures: signatures/cla.json
|
||||
branch: cla-signatures
|
||||
allowlist: codex,dependabot,dependabot[bot],github-actions[bot]
|
||||
allowlist: |
|
||||
codex
|
||||
dependabot
|
||||
dependabot[bot]
|
||||
|
||||
73
codex-rs/Cargo.lock
generated
73
codex-rs/Cargo.lock
generated
@@ -198,9 +198,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "arboard"
|
||||
version = "3.6.1"
|
||||
version = "3.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0348a1c054491f4bfe6ab86a7b6ab1e44e45d899005de92f58b3df180b36ddaf"
|
||||
checksum = "55f533f8e0af236ffe5eb979b99381df3258853f00ba2e44b6e1955292c75227"
|
||||
dependencies = [
|
||||
"clipboard-win",
|
||||
"image",
|
||||
@@ -212,7 +212,7 @@ dependencies = [
|
||||
"objc2-foundation",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.52.0",
|
||||
"wl-clipboard-rs",
|
||||
"x11rb",
|
||||
]
|
||||
@@ -1144,6 +1144,7 @@ dependencies = [
|
||||
"codex-apply-patch",
|
||||
"codex-arg0",
|
||||
"codex-async-utils",
|
||||
"codex-client",
|
||||
"codex-core",
|
||||
"codex-execpolicy",
|
||||
"codex-file-search",
|
||||
@@ -1254,7 +1255,6 @@ dependencies = [
|
||||
"async-trait",
|
||||
"clap",
|
||||
"codex-core",
|
||||
"codex-execpolicy",
|
||||
"libc",
|
||||
"path-absolutize",
|
||||
"pretty_assertions",
|
||||
@@ -1264,7 +1264,6 @@ dependencies = [
|
||||
"shlex",
|
||||
"socket2 0.6.0",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
@@ -2538,7 +2537,7 @@ checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3295,9 +3294,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "image"
|
||||
version = "0.25.9"
|
||||
version = "0.25.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6506c6c10786659413faa717ceebcb8f70731c0a60cbae39795fdf114519c1a"
|
||||
checksum = "529feb3e6769d234375c4cf1ee2ce713682b8e76538cb13f9fc23e1400a591e7"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"byteorder-lite",
|
||||
@@ -3305,8 +3304,8 @@ dependencies = [
|
||||
"num-traits",
|
||||
"png",
|
||||
"tiff",
|
||||
"zune-core 0.5.0",
|
||||
"zune-jpeg 0.5.5",
|
||||
"zune-core",
|
||||
"zune-jpeg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3442,7 +3441,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5081,9 +5080,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.24"
|
||||
version = "0.12.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
|
||||
checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -5219,7 +5218,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5738,9 +5737,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with"
|
||||
version = "3.16.1"
|
||||
version = "3.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
|
||||
checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"chrono",
|
||||
@@ -5749,7 +5748,8 @@ dependencies = [
|
||||
"indexmap 2.12.0",
|
||||
"schemars 0.9.0",
|
||||
"schemars 1.0.4",
|
||||
"serde_core",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"serde_with_macros",
|
||||
"time",
|
||||
@@ -5757,11 +5757,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_with_macros"
|
||||
version = "3.16.1"
|
||||
version = "3.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c"
|
||||
checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
|
||||
dependencies = [
|
||||
"darling 0.21.3",
|
||||
"darling 0.20.11",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.104",
|
||||
@@ -6410,7 +6410,7 @@ dependencies = [
|
||||
"half",
|
||||
"quick-error",
|
||||
"weezl",
|
||||
"zune-jpeg 0.4.19",
|
||||
"zune-jpeg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6715,9 +6715,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.43"
|
||||
version = "0.1.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
|
||||
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
@@ -6739,9 +6739,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-attributes"
|
||||
version = "0.1.31"
|
||||
version = "0.1.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
|
||||
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -6750,9 +6750,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.35"
|
||||
version = "0.1.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
|
||||
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
@@ -7370,7 +7370,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8095,28 +8095,13 @@ version = "0.4.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a"
|
||||
|
||||
[[package]]
|
||||
name = "zune-core"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "111f7d9820f05fd715df3144e254d6fc02ee4088b0644c0ffd0efc9e6d9d2773"
|
||||
|
||||
[[package]]
|
||||
name = "zune-jpeg"
|
||||
version = "0.4.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c9e525af0a6a658e031e95f14b7f889976b74a11ba0eca5a5fc9ac8a1c43a6a"
|
||||
dependencies = [
|
||||
"zune-core 0.4.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zune-jpeg"
|
||||
version = "0.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc6fb7703e32e9a07fb3f757360338b3a567a5054f21b5f52a666752e333d58e"
|
||||
dependencies = [
|
||||
"zune-core 0.5.0",
|
||||
"zune-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -136,7 +136,7 @@ icu_decimal = "2.1"
|
||||
icu_locale_core = "2.1"
|
||||
icu_provider = { version = "2.1", features = ["sync"] }
|
||||
ignore = "0.4.23"
|
||||
image = { version = "^0.25.9", default-features = false }
|
||||
image = { version = "^0.25.8", default-features = false }
|
||||
indexmap = "2.12.0"
|
||||
insta = "1.43.2"
|
||||
itertools = "0.14.0"
|
||||
@@ -178,7 +178,7 @@ seccompiler = "0.5.0"
|
||||
sentry = "0.34.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_with = "3.16"
|
||||
serde_with = "3.14"
|
||||
serial_test = "3.2.0"
|
||||
sha1 = "0.10.6"
|
||||
sha2 = "0.10"
|
||||
@@ -203,7 +203,7 @@ tokio-util = "0.7.16"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.23.5"
|
||||
tonic = "0.13.1"
|
||||
tracing = "0.1.43"
|
||||
tracing = "0.1.41"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tracing-test = "0.2.5"
|
||||
|
||||
@@ -131,7 +131,7 @@ client_request_definitions! {
|
||||
},
|
||||
ReviewStart => "review/start" {
|
||||
params: v2::ReviewStartParams,
|
||||
response: v2::ReviewStartResponse,
|
||||
response: v2::TurnStartResponse,
|
||||
},
|
||||
|
||||
ModelList => "model/list" {
|
||||
@@ -506,7 +506,6 @@ server_notification_definitions! {
|
||||
TurnStarted => "turn/started" (v2::TurnStartedNotification),
|
||||
TurnCompleted => "turn/completed" (v2::TurnCompletedNotification),
|
||||
TurnDiffUpdated => "turn/diff/updated" (v2::TurnDiffUpdatedNotification),
|
||||
TurnPlanUpdated => "turn/plan/updated" (v2::TurnPlanUpdatedNotification),
|
||||
ItemStarted => "item/started" (v2::ItemStartedNotification),
|
||||
ItemCompleted => "item/completed" (v2::ItemCompletedNotification),
|
||||
AgentMessageDelta => "item/agentMessage/delta" (v2::AgentMessageDeltaNotification),
|
||||
|
||||
@@ -11,8 +11,6 @@ use codex_protocol::items::AgentMessageContent as CoreAgentMessageContent;
|
||||
use codex_protocol::items::TurnItem as CoreTurnItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::parse_command::ParsedCommand as CoreParsedCommand;
|
||||
use codex_protocol::plan_tool::PlanItemArg as CorePlanItemArg;
|
||||
use codex_protocol::plan_tool::StepStatus as CorePlanStepStatus;
|
||||
use codex_protocol::protocol::CodexErrorInfo as CoreCodexErrorInfo;
|
||||
use codex_protocol::protocol::CreditsSnapshot as CoreCreditsSnapshot;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
@@ -132,12 +130,6 @@ v2_enum_from_core!(
|
||||
}
|
||||
);
|
||||
|
||||
v2_enum_from_core!(
|
||||
pub enum ReviewDelivery from codex_protocol::protocol::ReviewDelivery {
|
||||
Inline, Detached
|
||||
}
|
||||
);
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -916,22 +908,9 @@ pub struct ReviewStartParams {
|
||||
pub thread_id: String,
|
||||
pub target: ReviewTarget,
|
||||
|
||||
/// Where to run the review: inline (default) on the current thread or
|
||||
/// detached on a new thread (returned in `reviewThreadId`).
|
||||
/// When true, also append the final review message to the original thread.
|
||||
#[serde(default)]
|
||||
pub delivery: Option<ReviewDelivery>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ReviewStartResponse {
|
||||
pub turn: Turn,
|
||||
/// Identifies the thread where the review runs.
|
||||
///
|
||||
/// For inline reviews, this is the original thread id.
|
||||
/// For detached reviews, this is the id of the new review thread.
|
||||
pub review_thread_id: String,
|
||||
pub append_to_original_thread: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1084,10 +1063,7 @@ pub enum ThreadItem {
|
||||
ImageView { id: String, path: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
EnteredReviewMode { id: String, review: String },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ExitedReviewMode { id: String, review: String },
|
||||
CodeReview { id: String, review: String },
|
||||
}
|
||||
|
||||
impl From<CoreTurnItem> for ThreadItem {
|
||||
@@ -1232,56 +1208,10 @@ pub struct TurnCompletedNotification {
|
||||
/// Notification that the turn-level unified diff has changed.
|
||||
/// Contains the latest aggregated diff across all file changes in the turn.
|
||||
pub struct TurnDiffUpdatedNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub diff: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnPlanUpdatedNotification {
|
||||
pub turn_id: String,
|
||||
pub explanation: Option<String>,
|
||||
pub plan: Vec<TurnPlanStep>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct TurnPlanStep {
|
||||
pub step: String,
|
||||
pub status: TurnPlanStepStatus,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum TurnPlanStepStatus {
|
||||
Pending,
|
||||
InProgress,
|
||||
Completed,
|
||||
}
|
||||
|
||||
impl From<CorePlanItemArg> for TurnPlanStep {
|
||||
fn from(value: CorePlanItemArg) -> Self {
|
||||
Self {
|
||||
step: value.step,
|
||||
status: value.status.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CorePlanStepStatus> for TurnPlanStepStatus {
|
||||
fn from(value: CorePlanStepStatus) -> Self {
|
||||
match value {
|
||||
CorePlanStepStatus::Pending => Self::Pending,
|
||||
CorePlanStepStatus::InProgress => Self::InProgress,
|
||||
CorePlanStepStatus::Completed => Self::Completed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -1305,8 +1235,6 @@ pub struct ItemCompletedNotification {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct AgentMessageDeltaNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
@@ -1315,8 +1243,6 @@ pub struct AgentMessageDeltaNotification {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ReasoningSummaryTextDeltaNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
pub summary_index: i64,
|
||||
@@ -1326,8 +1252,6 @@ pub struct ReasoningSummaryTextDeltaNotification {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ReasoningSummaryPartAddedNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
pub summary_index: i64,
|
||||
}
|
||||
@@ -1336,8 +1260,6 @@ pub struct ReasoningSummaryPartAddedNotification {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct ReasoningTextDeltaNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
pub content_index: i64,
|
||||
@@ -1347,8 +1269,6 @@ pub struct ReasoningTextDeltaNotification {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CommandExecutionOutputDeltaNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
pub delta: String,
|
||||
}
|
||||
@@ -1357,8 +1277,6 @@ pub struct CommandExecutionOutputDeltaNotification {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct McpToolCallProgressNotification {
|
||||
pub thread_id: String,
|
||||
pub turn_id: String,
|
||||
pub item_id: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ The JSON-RPC API exposes dedicated methods for managing Codex conversations. Thr
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
- `turn/start` — add user input to a thread and begin Codex generation; responds with the initial `turn` object and streams `turn/started`, `item/*`, and `turn/completed` notifications.
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits `item/started`/`item/completed` notifications with `enteredReviewMode` and `exitedReviewMode` items, plus a final assistant `agentMessage` containing the review.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits a `item/completed` notification with a `codeReview` item when results are ready.
|
||||
|
||||
### 1) Start or resume a thread
|
||||
|
||||
@@ -190,56 +190,49 @@ Use `review/start` to run Codex’s reviewer on the currently checked-out projec
|
||||
- `{"type":"baseBranch","branch":"main"}` — diff against the provided branch’s upstream (see prompt for the exact `git merge-base`/`git diff` instructions Codex will run).
|
||||
- `{"type":"commit","sha":"abc1234","title":"Optional subject"}` — review a specific commit.
|
||||
- `{"type":"custom","instructions":"Free-form reviewer instructions"}` — fallback prompt equivalent to the legacy manual review request.
|
||||
- `delivery` (`"inline"` or `"detached"`, default `"inline"`) — where the review runs:
|
||||
- `"inline"`: run the review as a new turn on the existing thread. The response’s `reviewThreadId` equals the original `threadId`, and no new `thread/started` notification is emitted.
|
||||
- `"detached"`: fork a new review thread from the parent conversation and run the review there. The response’s `reviewThreadId` is the id of this new review thread, and the server emits a `thread/started` notification for it before streaming review items.
|
||||
- `appendToOriginalThread` (bool, default `false`) — when `true`, Codex also records a final assistant-style message with the review summary in the original thread. When `false`, only the `codeReview` item is emitted for the review run and no extra message is added to the original thread.
|
||||
|
||||
Example request/response:
|
||||
|
||||
```json
|
||||
{ "method": "review/start", "id": 40, "params": {
|
||||
"threadId": "thr_123",
|
||||
"delivery": "inline",
|
||||
"appendToOriginalThread": true,
|
||||
"target": { "type": "commit", "sha": "1234567deadbeef", "title": "Polish tui colors" }
|
||||
} }
|
||||
{ "id": 40, "result": {
|
||||
"turn": {
|
||||
"id": "turn_900",
|
||||
"status": "inProgress",
|
||||
"items": [
|
||||
{ "type": "userMessage", "id": "turn_900", "content": [ { "type": "text", "text": "Review commit 1234567: Polish tui colors" } ] }
|
||||
],
|
||||
"error": null
|
||||
},
|
||||
"reviewThreadId": "thr_123"
|
||||
} }
|
||||
{ "id": 40, "result": { "turn": {
|
||||
"id": "turn_900",
|
||||
"status": "inProgress",
|
||||
"items": [
|
||||
{ "type": "userMessage", "id": "turn_900", "content": [ { "type": "text", "text": "Review commit 1234567: Polish tui colors" } ] }
|
||||
],
|
||||
"error": null
|
||||
} } }
|
||||
```
|
||||
|
||||
For a detached review, use `"delivery": "detached"`. The response is the same shape, but `reviewThreadId` will be the id of the new review thread (different from the original `threadId`). The server also emits a `thread/started` notification for that new thread before streaming the review turn.
|
||||
|
||||
Codex streams the usual `turn/started` notification followed by an `item/started`
|
||||
with an `enteredReviewMode` item so clients can show progress:
|
||||
with the same `codeReview` item id so clients can show progress:
|
||||
|
||||
```json
|
||||
{ "method": "item/started", "params": { "item": {
|
||||
"type": "enteredReviewMode",
|
||||
"type": "codeReview",
|
||||
"id": "turn_900",
|
||||
"review": "current changes"
|
||||
} } }
|
||||
```
|
||||
|
||||
When the reviewer finishes, the server emits `item/started` and `item/completed`
|
||||
containing an `exitedReviewMode` item with the final review text:
|
||||
When the reviewer finishes, the server emits `item/completed` containing the same
|
||||
`codeReview` item with the final review text:
|
||||
|
||||
```json
|
||||
{ "method": "item/completed", "params": { "item": {
|
||||
"type": "exitedReviewMode",
|
||||
"type": "codeReview",
|
||||
"id": "turn_900",
|
||||
"review": "Looks solid overall...\n\n- Prefer Stylize helpers — app.rs:10-20\n ..."
|
||||
} } }
|
||||
```
|
||||
|
||||
The `review` string is plain text that already bundles the overall explanation plus a bullet list for each structured finding (matching `ThreadItem::ExitedReviewMode` in the generated schema). Use this notification to render the reviewer output in your client.
|
||||
The `review` string is plain text that already bundles the overall explanation plus a bullet list for each structured finding (matching `ThreadItem::CodeReview` in the generated schema). Use this notification to render the reviewer output in your client.
|
||||
|
||||
## Events (work-in-progress)
|
||||
|
||||
@@ -251,7 +244,6 @@ The app-server streams JSON-RPC notifications while a turn is running. Each turn
|
||||
|
||||
- `turn/started` — `{ turn }` with the turn id, empty `items`, and `status: "inProgress"`.
|
||||
- `turn/completed` — `{ turn }` where `turn.status` is `completed`, `interrupted`, or `failed`; failures carry `{ error: { message, codexErrorInfo? } }`.
|
||||
- `turn/plan/updated` — `{ turnId, explanation?, plan }` whenever the agent shares or changes its plan; each `plan` entry is `{ step, status }` with `status` in `pending`, `inProgress`, or `completed`.
|
||||
|
||||
Today both notifications carry an empty `items` array even when item events were streamed; rely on `item/*` notifications for the canonical item list until this is fixed.
|
||||
|
||||
|
||||
@@ -43,8 +43,6 @@ use codex_app_server_protocol::TurnCompletedNotification;
|
||||
use codex_app_server_protocol::TurnDiffUpdatedNotification;
|
||||
use codex_app_server_protocol::TurnError;
|
||||
use codex_app_server_protocol::TurnInterruptResponse;
|
||||
use codex_app_server_protocol::TurnPlanStep;
|
||||
use codex_app_server_protocol::TurnPlanUpdatedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_core::CodexConversation;
|
||||
use codex_core::parse_command::shlex_join;
|
||||
@@ -62,7 +60,6 @@ use codex_core::protocol::TokenCountEvent;
|
||||
use codex_core::protocol::TurnDiffEvent;
|
||||
use codex_core::review_format::format_review_findings_block;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::plan_tool::UpdatePlanArgs;
|
||||
use codex_protocol::protocol::ReviewOutputEvent;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
@@ -260,8 +257,6 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
EventMsg::AgentMessageContentDelta(event) => {
|
||||
let notification = AgentMessageDeltaNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item_id: event.item_id,
|
||||
delta: event.delta,
|
||||
};
|
||||
@@ -280,8 +275,6 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
EventMsg::ReasoningContentDelta(event) => {
|
||||
let notification = ReasoningSummaryTextDeltaNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item_id: event.item_id,
|
||||
delta: event.delta,
|
||||
summary_index: event.summary_index,
|
||||
@@ -294,8 +287,6 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
EventMsg::ReasoningRawContentDelta(event) => {
|
||||
let notification = ReasoningTextDeltaNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item_id: event.item_id,
|
||||
delta: event.delta,
|
||||
content_index: event.content_index,
|
||||
@@ -306,8 +297,6 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
EventMsg::AgentReasoningSectionBreak(event) => {
|
||||
let notification = ReasoningSummaryPartAddedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item_id: event.item_id,
|
||||
summary_index: event.summary_index,
|
||||
};
|
||||
@@ -351,26 +340,16 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.await;
|
||||
}
|
||||
EventMsg::EnteredReviewMode(review_request) => {
|
||||
let review = review_request.user_facing_hint;
|
||||
let item = ThreadItem::EnteredReviewMode {
|
||||
id: event_turn_id.clone(),
|
||||
review,
|
||||
};
|
||||
let started = ItemStartedNotification {
|
||||
let notification = ItemStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item: item.clone(),
|
||||
item: ThreadItem::CodeReview {
|
||||
id: event_turn_id.clone(),
|
||||
review: review_request.user_facing_hint,
|
||||
},
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(started))
|
||||
.await;
|
||||
let completed = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(completed))
|
||||
.send_server_notification(ServerNotification::ItemStarted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::ItemStarted(item_started_event) => {
|
||||
@@ -396,29 +375,21 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.await;
|
||||
}
|
||||
EventMsg::ExitedReviewMode(review_event) => {
|
||||
let review = match review_event.review_output {
|
||||
let review_text = match review_event.review_output {
|
||||
Some(output) => render_review_output_text(&output),
|
||||
None => REVIEW_FALLBACK_MESSAGE.to_string(),
|
||||
};
|
||||
let item = ThreadItem::ExitedReviewMode {
|
||||
id: event_turn_id.clone(),
|
||||
review,
|
||||
};
|
||||
let started = ItemStartedNotification {
|
||||
let review_item_id = event_turn_id.clone();
|
||||
let notification = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item: item.clone(),
|
||||
item: ThreadItem::CodeReview {
|
||||
id: review_item_id,
|
||||
review: review_text,
|
||||
},
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(started))
|
||||
.await;
|
||||
let completed = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(completed))
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
EventMsg::PatchApplyBegin(patch_begin_event) => {
|
||||
@@ -502,8 +473,6 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
EventMsg::ExecCommandOutputDelta(exec_command_output_delta_event) => {
|
||||
let notification = CommandExecutionOutputDeltaNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item_id: exec_command_output_delta_event.call_id.clone(),
|
||||
delta: String::from_utf8_lossy(&exec_command_output_delta_event.chunk).to_string(),
|
||||
};
|
||||
@@ -598,7 +567,6 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
}
|
||||
EventMsg::TurnDiff(turn_diff_event) => {
|
||||
handle_turn_diff(
|
||||
conversation_id,
|
||||
&event_turn_id,
|
||||
turn_diff_event,
|
||||
api_version,
|
||||
@@ -606,22 +574,12 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
)
|
||||
.await;
|
||||
}
|
||||
EventMsg::PlanUpdate(plan_update_event) => {
|
||||
handle_turn_plan_update(
|
||||
&event_turn_id,
|
||||
plan_update_event,
|
||||
api_version,
|
||||
outgoing.as_ref(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_turn_diff(
|
||||
conversation_id: ConversationId,
|
||||
event_turn_id: &str,
|
||||
turn_diff_event: TurnDiffEvent,
|
||||
api_version: ApiVersion,
|
||||
@@ -629,7 +587,6 @@ async fn handle_turn_diff(
|
||||
) {
|
||||
if let ApiVersion::V2 = api_version {
|
||||
let notification = TurnDiffUpdatedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.to_string(),
|
||||
diff: turn_diff_event.unified_diff,
|
||||
};
|
||||
@@ -639,28 +596,6 @@ async fn handle_turn_diff(
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_turn_plan_update(
|
||||
event_turn_id: &str,
|
||||
plan_update_event: UpdatePlanArgs,
|
||||
api_version: ApiVersion,
|
||||
outgoing: &OutgoingMessageSender,
|
||||
) {
|
||||
if let ApiVersion::V2 = api_version {
|
||||
let notification = TurnPlanUpdatedNotification {
|
||||
turn_id: event_turn_id.to_string(),
|
||||
explanation: plan_update_event.explanation,
|
||||
plan: plan_update_event
|
||||
.plan
|
||||
.into_iter()
|
||||
.map(TurnPlanStep::from)
|
||||
.collect(),
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::TurnPlanUpdated(notification))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn emit_turn_completed_with_status(
|
||||
conversation_id: ConversationId,
|
||||
event_turn_id: String,
|
||||
@@ -1180,15 +1115,12 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::bail;
|
||||
use codex_app_server_protocol::TurnPlanStepStatus;
|
||||
use codex_core::protocol::CreditsSnapshot;
|
||||
use codex_core::protocol::McpInvocation;
|
||||
use codex_core::protocol::RateLimitSnapshot;
|
||||
use codex_core::protocol::RateLimitWindow;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
use codex_core::protocol::TokenUsageInfo;
|
||||
use codex_protocol::plan_tool::PlanItemArg;
|
||||
use codex_protocol::plan_tool::StepStatus;
|
||||
use mcp_types::CallToolResult;
|
||||
use mcp_types::ContentBlock;
|
||||
use mcp_types::TextContent;
|
||||
@@ -1348,46 +1280,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_turn_plan_update_emits_notification_for_v2() -> Result<()> {
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = OutgoingMessageSender::new(tx);
|
||||
let update = UpdatePlanArgs {
|
||||
explanation: Some("need plan".to_string()),
|
||||
plan: vec![
|
||||
PlanItemArg {
|
||||
step: "first".to_string(),
|
||||
status: StepStatus::Pending,
|
||||
},
|
||||
PlanItemArg {
|
||||
step: "second".to_string(),
|
||||
status: StepStatus::Completed,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
handle_turn_plan_update("turn-123", update, ApiVersion::V2, &outgoing).await;
|
||||
|
||||
let msg = rx
|
||||
.recv()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("should send one notification"))?;
|
||||
match msg {
|
||||
OutgoingMessage::AppServerNotification(ServerNotification::TurnPlanUpdated(n)) => {
|
||||
assert_eq!(n.turn_id, "turn-123");
|
||||
assert_eq!(n.explanation.as_deref(), Some("need plan"));
|
||||
assert_eq!(n.plan.len(), 2);
|
||||
assert_eq!(n.plan[0].step, "first");
|
||||
assert_eq!(n.plan[0].status, TurnPlanStepStatus::Pending);
|
||||
assert_eq!(n.plan[1].step, "second");
|
||||
assert_eq!(n.plan[1].status, TurnPlanStepStatus::Completed);
|
||||
}
|
||||
other => bail!("unexpected message: {other:?}"),
|
||||
}
|
||||
assert!(rx.try_recv().is_err(), "no extra messages expected");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_token_count_event_emits_usage_and_rate_limits() -> Result<()> {
|
||||
let conversation_id = ConversationId::new();
|
||||
@@ -1787,10 +1679,8 @@ mod tests {
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = OutgoingMessageSender::new(tx);
|
||||
let unified_diff = "--- a\n+++ b\n".to_string();
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
handle_turn_diff(
|
||||
conversation_id,
|
||||
"turn-1",
|
||||
TurnDiffEvent {
|
||||
unified_diff: unified_diff.clone(),
|
||||
@@ -1808,7 +1698,6 @@ mod tests {
|
||||
OutgoingMessage::AppServerNotification(ServerNotification::TurnDiffUpdated(
|
||||
notification,
|
||||
)) => {
|
||||
assert_eq!(notification.thread_id, conversation_id.to_string());
|
||||
assert_eq!(notification.turn_id, "turn-1");
|
||||
assert_eq!(notification.diff, unified_diff);
|
||||
}
|
||||
@@ -1822,10 +1711,8 @@ mod tests {
|
||||
async fn test_handle_turn_diff_is_noop_for_v1() -> Result<()> {
|
||||
let (tx, mut rx) = mpsc::channel(CHANNEL_CAPACITY);
|
||||
let outgoing = OutgoingMessageSender::new(tx);
|
||||
let conversation_id = ConversationId::new();
|
||||
|
||||
handle_turn_diff(
|
||||
conversation_id,
|
||||
"turn-1",
|
||||
TurnDiffEvent {
|
||||
unified_diff: "diff".to_string(),
|
||||
|
||||
@@ -61,9 +61,7 @@ use codex_app_server_protocol::RemoveConversationSubscriptionResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ResumeConversationParams;
|
||||
use codex_app_server_protocol::ResumeConversationResponse;
|
||||
use codex_app_server_protocol::ReviewDelivery as ApiReviewDelivery;
|
||||
use codex_app_server_protocol::ReviewStartParams;
|
||||
use codex_app_server_protocol::ReviewStartResponse;
|
||||
use codex_app_server_protocol::ReviewTarget;
|
||||
use codex_app_server_protocol::SandboxMode;
|
||||
use codex_app_server_protocol::SendUserMessageParams;
|
||||
@@ -122,7 +120,6 @@ use codex_core::git_info::git_diff_to_remote;
|
||||
use codex_core::parse_cursor;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::ReviewDelivery as CoreReviewDelivery;
|
||||
use codex_core::protocol::ReviewRequest;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::read_head_for_summary;
|
||||
@@ -256,6 +253,7 @@ impl CodexMessageProcessor {
|
||||
|
||||
fn review_request_from_target(
|
||||
target: ReviewTarget,
|
||||
append_to_original_thread: bool,
|
||||
) -> Result<(ReviewRequest, String), JSONRPCErrorError> {
|
||||
fn invalid_request(message: String) -> JSONRPCErrorError {
|
||||
JSONRPCErrorError {
|
||||
@@ -271,6 +269,7 @@ impl CodexMessageProcessor {
|
||||
ReviewRequest {
|
||||
prompt: "Review the current code changes (staged, unstaged, and untracked files) and provide prioritized findings.".to_string(),
|
||||
user_facing_hint: "current changes".to_string(),
|
||||
append_to_original_thread,
|
||||
},
|
||||
"Review uncommitted changes".to_string(),
|
||||
)),
|
||||
@@ -286,6 +285,7 @@ impl CodexMessageProcessor {
|
||||
ReviewRequest {
|
||||
prompt,
|
||||
user_facing_hint: hint,
|
||||
append_to_original_thread,
|
||||
},
|
||||
display,
|
||||
))
|
||||
@@ -314,6 +314,7 @@ impl CodexMessageProcessor {
|
||||
ReviewRequest {
|
||||
prompt,
|
||||
user_facing_hint: hint,
|
||||
append_to_original_thread,
|
||||
},
|
||||
display,
|
||||
))
|
||||
@@ -327,6 +328,7 @@ impl CodexMessageProcessor {
|
||||
ReviewRequest {
|
||||
prompt: trimmed.clone(),
|
||||
user_facing_hint: trimmed.clone(),
|
||||
append_to_original_thread,
|
||||
},
|
||||
trimmed,
|
||||
))
|
||||
@@ -2495,220 +2497,60 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
fn build_review_turn(turn_id: String, display_text: &str) -> Turn {
|
||||
let items = if display_text.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
vec![ThreadItem::UserMessage {
|
||||
id: turn_id.clone(),
|
||||
content: vec![V2UserInput::Text {
|
||||
text: display_text.to_string(),
|
||||
}],
|
||||
}]
|
||||
};
|
||||
|
||||
Turn {
|
||||
id: turn_id,
|
||||
items,
|
||||
status: TurnStatus::InProgress,
|
||||
}
|
||||
}
|
||||
|
||||
async fn emit_review_started(
|
||||
&self,
|
||||
request_id: &RequestId,
|
||||
turn: Turn,
|
||||
parent_thread_id: String,
|
||||
review_thread_id: String,
|
||||
) {
|
||||
let response = ReviewStartResponse {
|
||||
turn: turn.clone(),
|
||||
review_thread_id,
|
||||
};
|
||||
self.outgoing
|
||||
.send_response(request_id.clone(), response)
|
||||
.await;
|
||||
|
||||
let notif = TurnStartedNotification {
|
||||
thread_id: parent_thread_id,
|
||||
turn,
|
||||
};
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::TurnStarted(notif))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn start_inline_review(
|
||||
&self,
|
||||
request_id: &RequestId,
|
||||
parent_conversation: Arc<CodexConversation>,
|
||||
review_request: ReviewRequest,
|
||||
display_text: &str,
|
||||
parent_thread_id: String,
|
||||
) -> std::result::Result<(), JSONRPCErrorError> {
|
||||
let turn_id = parent_conversation
|
||||
.submit(Op::Review { review_request })
|
||||
.await;
|
||||
|
||||
match turn_id {
|
||||
Ok(turn_id) => {
|
||||
let turn = Self::build_review_turn(turn_id, display_text);
|
||||
self.emit_review_started(
|
||||
request_id,
|
||||
turn,
|
||||
parent_thread_id.clone(),
|
||||
parent_thread_id,
|
||||
)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to start review: {err}"),
|
||||
data: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
async fn start_detached_review(
|
||||
&mut self,
|
||||
request_id: &RequestId,
|
||||
parent_conversation_id: ConversationId,
|
||||
review_request: ReviewRequest,
|
||||
display_text: &str,
|
||||
) -> std::result::Result<(), JSONRPCErrorError> {
|
||||
let rollout_path = find_conversation_path_by_id_str(
|
||||
&self.config.codex_home,
|
||||
&parent_conversation_id.to_string(),
|
||||
)
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to locate conversation id {parent_conversation_id}: {err}"),
|
||||
data: None,
|
||||
})?
|
||||
.ok_or_else(|| JSONRPCErrorError {
|
||||
code: INVALID_REQUEST_ERROR_CODE,
|
||||
message: format!("no rollout found for conversation id {parent_conversation_id}"),
|
||||
data: None,
|
||||
})?;
|
||||
|
||||
let mut config = self.config.as_ref().clone();
|
||||
config.model = self.config.review_model.clone();
|
||||
|
||||
let NewConversation {
|
||||
conversation_id,
|
||||
conversation,
|
||||
session_configured,
|
||||
..
|
||||
} = self
|
||||
.conversation_manager
|
||||
.fork_conversation(usize::MAX, config, rollout_path)
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("error creating detached review conversation: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
|
||||
if let Err(err) = self
|
||||
.attach_conversation_listener(conversation_id, false, ApiVersion::V2)
|
||||
.await
|
||||
{
|
||||
tracing::warn!(
|
||||
"failed to attach listener for review conversation {}: {}",
|
||||
conversation_id,
|
||||
err.message
|
||||
);
|
||||
}
|
||||
|
||||
let rollout_path = conversation.rollout_path();
|
||||
let fallback_provider = self.config.model_provider_id.as_str();
|
||||
match read_summary_from_rollout(rollout_path.as_path(), fallback_provider).await {
|
||||
Ok(summary) => {
|
||||
let thread = summary_to_thread(summary);
|
||||
let notif = ThreadStartedNotification { thread };
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::ThreadStarted(notif))
|
||||
.await;
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::warn!(
|
||||
"failed to load summary for review conversation {}: {}",
|
||||
session_configured.session_id,
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let turn_id = conversation
|
||||
.submit(Op::Review { review_request })
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to start detached review turn: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
|
||||
let turn = Self::build_review_turn(turn_id, display_text);
|
||||
let review_thread_id = conversation_id.to_string();
|
||||
self.emit_review_started(request_id, turn, review_thread_id.clone(), review_thread_id)
|
||||
.await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn review_start(&mut self, request_id: RequestId, params: ReviewStartParams) {
|
||||
async fn review_start(&self, request_id: RequestId, params: ReviewStartParams) {
|
||||
let ReviewStartParams {
|
||||
thread_id,
|
||||
target,
|
||||
delivery,
|
||||
append_to_original_thread,
|
||||
} = params;
|
||||
let (parent_conversation_id, parent_conversation) =
|
||||
match self.conversation_from_thread_id(&thread_id).await {
|
||||
Ok(v) => v,
|
||||
Err(error) => {
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let (review_request, display_text) = match Self::review_request_from_target(target) {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
self.outgoing.send_error(request_id, err).await;
|
||||
let (_, conversation) = match self.conversation_from_thread_id(&thread_id).await {
|
||||
Ok(v) => v,
|
||||
Err(error) => {
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let delivery = delivery.unwrap_or(ApiReviewDelivery::Inline).to_core();
|
||||
match delivery {
|
||||
CoreReviewDelivery::Inline => {
|
||||
if let Err(err) = self
|
||||
.start_inline_review(
|
||||
&request_id,
|
||||
parent_conversation,
|
||||
review_request,
|
||||
display_text.as_str(),
|
||||
thread_id.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
let (review_request, display_text) =
|
||||
match Self::review_request_from_target(target, append_to_original_thread) {
|
||||
Ok(value) => value,
|
||||
Err(err) => {
|
||||
self.outgoing.send_error(request_id, err).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let turn_id = conversation.submit(Op::Review { review_request }).await;
|
||||
|
||||
match turn_id {
|
||||
Ok(turn_id) => {
|
||||
let mut items = Vec::new();
|
||||
if !display_text.is_empty() {
|
||||
items.push(ThreadItem::UserMessage {
|
||||
id: turn_id.clone(),
|
||||
content: vec![V2UserInput::Text { text: display_text }],
|
||||
});
|
||||
}
|
||||
let turn = Turn {
|
||||
id: turn_id.clone(),
|
||||
items,
|
||||
status: TurnStatus::InProgress,
|
||||
};
|
||||
let response = TurnStartResponse { turn: turn.clone() };
|
||||
self.outgoing.send_response(request_id, response).await;
|
||||
|
||||
let notif = TurnStartedNotification { thread_id, turn };
|
||||
self.outgoing
|
||||
.send_server_notification(ServerNotification::TurnStarted(notif))
|
||||
.await;
|
||||
}
|
||||
CoreReviewDelivery::Detached => {
|
||||
if let Err(err) = self
|
||||
.start_detached_review(
|
||||
&request_id,
|
||||
parent_conversation_id,
|
||||
review_request,
|
||||
display_text.as_str(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
self.outgoing.send_error(request_id, err).await;
|
||||
}
|
||||
Err(err) => {
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to start review: {err}"),
|
||||
data: None,
|
||||
};
|
||||
self.outgoing.send_error(request_id, error).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,13 +9,12 @@ use codex_app_server_protocol::JSONRPCError;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ReviewDelivery;
|
||||
use codex_app_server_protocol::ReviewStartParams;
|
||||
use codex_app_server_protocol::ReviewStartResponse;
|
||||
use codex_app_server_protocol::ReviewTarget;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
@@ -60,7 +59,7 @@ async fn review_start_runs_review_turn_and_emits_code_review_item() -> Result<()
|
||||
let review_req = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id: thread_id.clone(),
|
||||
delivery: Some(ReviewDelivery::Inline),
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::Commit {
|
||||
sha: "1234567deadbeef".to_string(),
|
||||
title: Some("Tidy UI colors".to_string()),
|
||||
@@ -72,43 +71,43 @@ async fn review_start_runs_review_turn_and_emits_code_review_item() -> Result<()
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(review_req)),
|
||||
)
|
||||
.await??;
|
||||
let ReviewStartResponse {
|
||||
turn,
|
||||
review_thread_id,
|
||||
} = to_response::<ReviewStartResponse>(review_resp)?;
|
||||
assert_eq!(review_thread_id, thread_id.clone());
|
||||
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(review_resp)?;
|
||||
let turn_id = turn.id.clone();
|
||||
assert_eq!(turn.status, TurnStatus::InProgress);
|
||||
|
||||
// Confirm we see the EnteredReviewMode marker on the main thread.
|
||||
let mut saw_entered_review_mode = false;
|
||||
for _ in 0..10 {
|
||||
let item_started: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(item_started.params.expect("params must be present"))?;
|
||||
match started.item {
|
||||
ThreadItem::EnteredReviewMode { id, review } => {
|
||||
assert_eq!(id, turn_id);
|
||||
assert_eq!(review, "commit 1234567");
|
||||
saw_entered_review_mode = true;
|
||||
break;
|
||||
}
|
||||
_ => continue,
|
||||
assert_eq!(turn.items.len(), 1);
|
||||
match &turn.items[0] {
|
||||
ThreadItem::UserMessage { content, .. } => {
|
||||
assert_eq!(content.len(), 1);
|
||||
assert!(matches!(
|
||||
&content[0],
|
||||
codex_app_server_protocol::UserInput::Text { .. }
|
||||
));
|
||||
}
|
||||
other => panic!("expected user message, got {other:?}"),
|
||||
}
|
||||
|
||||
let _started: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
let item_started: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/started"),
|
||||
)
|
||||
.await??;
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(item_started.params.expect("params must be present"))?;
|
||||
match started.item {
|
||||
ThreadItem::CodeReview { id, review } => {
|
||||
assert_eq!(id, turn_id);
|
||||
assert_eq!(review, "commit 1234567");
|
||||
}
|
||||
other => panic!("expected code review item, got {other:?}"),
|
||||
}
|
||||
assert!(
|
||||
saw_entered_review_mode,
|
||||
"did not observe enteredReviewMode item"
|
||||
);
|
||||
|
||||
// Confirm we see the ExitedReviewMode marker (with review text)
|
||||
// on the same turn. Ignore any other items the stream surfaces.
|
||||
let mut review_body: Option<String> = None;
|
||||
for _ in 0..10 {
|
||||
for _ in 0..5 {
|
||||
let review_notif: JSONRPCNotification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("item/completed"),
|
||||
@@ -117,12 +116,13 @@ async fn review_start_runs_review_turn_and_emits_code_review_item() -> Result<()
|
||||
let completed: ItemCompletedNotification =
|
||||
serde_json::from_value(review_notif.params.expect("params must be present"))?;
|
||||
match completed.item {
|
||||
ThreadItem::ExitedReviewMode { id, review } => {
|
||||
ThreadItem::CodeReview { id, review } => {
|
||||
assert_eq!(id, turn_id);
|
||||
review_body = Some(review);
|
||||
break;
|
||||
}
|
||||
_ => continue,
|
||||
ThreadItem::UserMessage { .. } => continue,
|
||||
other => panic!("unexpected item/completed payload: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ async fn review_start_rejects_empty_base_branch() -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id,
|
||||
delivery: Some(ReviewDelivery::Inline),
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::BaseBranch {
|
||||
branch: " ".to_string(),
|
||||
},
|
||||
@@ -167,56 +167,6 @@ async fn review_start_rejects_empty_base_branch() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_with_detached_delivery_returns_new_thread_id() -> Result<()> {
|
||||
let review_payload = json!({
|
||||
"findings": [],
|
||||
"overall_correctness": "ok",
|
||||
"overall_explanation": "detached review",
|
||||
"overall_confidence_score": 0.5
|
||||
})
|
||||
.to_string();
|
||||
let responses = vec![create_final_assistant_message_sse_response(
|
||||
&review_payload,
|
||||
)?];
|
||||
let server = create_mock_chat_completions_server_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_id = start_default_thread(&mut mcp).await?;
|
||||
|
||||
let review_req = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id: thread_id.clone(),
|
||||
delivery: Some(ReviewDelivery::Detached),
|
||||
target: ReviewTarget::Custom {
|
||||
instructions: "detached review".to_string(),
|
||||
},
|
||||
})
|
||||
.await?;
|
||||
let review_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(review_req)),
|
||||
)
|
||||
.await??;
|
||||
let ReviewStartResponse {
|
||||
turn,
|
||||
review_thread_id,
|
||||
} = to_response::<ReviewStartResponse>(review_resp)?;
|
||||
|
||||
assert_eq!(turn.status, TurnStatus::InProgress);
|
||||
assert_ne!(
|
||||
review_thread_id, thread_id,
|
||||
"detached review should run on a different thread"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn review_start_rejects_empty_commit_sha() -> Result<()> {
|
||||
let server = create_mock_chat_completions_server_unchecked(vec![]).await;
|
||||
@@ -230,7 +180,7 @@ async fn review_start_rejects_empty_commit_sha() -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id,
|
||||
delivery: Some(ReviewDelivery::Inline),
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::Commit {
|
||||
sha: "\t".to_string(),
|
||||
title: None,
|
||||
@@ -265,7 +215,7 @@ async fn review_start_rejects_empty_custom_instructions() -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_review_start_request(ReviewStartParams {
|
||||
thread_id,
|
||||
delivery: Some(ReviewDelivery::Inline),
|
||||
append_to_original_thread: true,
|
||||
target: ReviewTarget::Custom {
|
||||
instructions: "\n\n".to_string(),
|
||||
},
|
||||
|
||||
206
codex-rs/client.md
Normal file
206
codex-rs/client.md
Normal file
@@ -0,0 +1,206 @@
|
||||
# Client Extraction Plan
|
||||
|
||||
## Goals
|
||||
- Split the HTTP transport/client code out of `codex-core` into a reusable crate that is agnostic of Codex/OpenAI business logic and API schemas.
|
||||
- Create a separate API library crate that houses typed requests/responses for well-known APIs (Responses, Chat Completions, Compact) and plugs into the transport crate via minimal traits.
|
||||
- Preserve current behaviour (auth headers, retries, SSE handling, rate-limit parsing, compaction, fixtures) while making the APIs symmetric and avoiding code duplication.
|
||||
- Keep existing consumers (`codex-core`, tests, and tools) stable by providing a small compatibility layer during the transition.
|
||||
|
||||
## Snapshot of Today
|
||||
- `core/src/client.rs (ModelClient)` owns config/auth/session state, chooses wire API, builds payloads, drives retries, parses SSE, compaction, and rate-limit headers.
|
||||
- `core/src/chat_completions.rs` implements the Chat Completions call + SSE parser + aggregation helper.
|
||||
- `core/src/client_common.rs` holds `Prompt`, tool specs, shared request structs (`ResponsesApiRequest`, `TextControls`), and `ResponseEvent`/`ResponseStream`.
|
||||
- `core/src/default_client.rs` wraps `reqwest` with Codex UA/originator defaults.
|
||||
- `core/src/model_provider_info.rs` models providers (base URL, headers, env keys, retry/timeout tuning) and builds `CodexRequestBuilder`s.
|
||||
- Current retry logic is co-located with API handling; streaming SSE parsing is duplicated across Responses/Chat.
|
||||
|
||||
## Target Crates (with interfaces)
|
||||
|
||||
- `codex-client` (generic transport)
|
||||
- Owns the generic HTTP machinery: a `CodexHttpClient`/`CodexRequestBuilder`-style wrapper, retry/backoff hooks, streaming connector (SSE framing + idle timeout), header injection, and optional telemetry callbacks.
|
||||
- Does **not** know about OpenAI/Codex-specific paths, headers, or error codes; it only exposes HTTP-level concepts (status, headers, bodies, connection errors).
|
||||
- Minimal surface:
|
||||
```rust
|
||||
pub trait HttpTransport {
|
||||
fn execute(&self, req: Request) -> Result<Response, TransportError>;
|
||||
fn stream(&self, req: Request) -> Result<ByteStream, TransportError>;
|
||||
}
|
||||
|
||||
pub struct Request {
|
||||
pub method: Method,
|
||||
pub url: String,
|
||||
pub headers: HeaderMap,
|
||||
pub body: Option<serde_json::Value>,
|
||||
pub timeout: Option<Duration>,
|
||||
}
|
||||
```
|
||||
- Generic client traits (request/response/chunk are abstract over the transport):
|
||||
```rust
|
||||
#[async_trait::async_trait]
|
||||
pub trait UnaryClient<Req, Resp> {
|
||||
async fn run(&self, req: Req) -> Result<Resp, TransportError>;
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait StreamClient<Req, Chunk> {
|
||||
async fn run(&self, req: Req) -> Result<ResponseStream<Chunk>, TransportError>;
|
||||
}
|
||||
|
||||
pub struct RetryPolicy {
|
||||
pub max_attempts: u64,
|
||||
pub base_delay: Duration,
|
||||
pub retry_on: RetryOn, // e.g., transport errors + 429/5xx
|
||||
}
|
||||
```
|
||||
- `RetryOn` lives in `codex-client` and captures HTTP status classes and transport failures that qualify for retry.
|
||||
- Implementations in `codex-api` plug in their own request types, parsers, and retry policies while reusing the transport’s backoff and error types.
|
||||
- Planned runtime helper:
|
||||
```rust
|
||||
pub async fn run_with_retry<T, F, Fut>(
|
||||
policy: RetryPolicy,
|
||||
make_req: impl Fn() -> Request,
|
||||
op: F,
|
||||
) -> Result<T, TransportError>
|
||||
where
|
||||
F: Fn(Request) -> Fut,
|
||||
Fut: Future<Output = Result<T, TransportError>>,
|
||||
{
|
||||
for attempt in 0..=policy.max_attempts {
|
||||
let req = make_req();
|
||||
match op(req).await {
|
||||
Ok(resp) => return Ok(resp),
|
||||
Err(err) if policy.retry_on.should_retry(&err, attempt) => {
|
||||
tokio::time::sleep(backoff(policy.base_delay, attempt + 1)).await;
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
}
|
||||
Err(TransportError::RetryLimit)
|
||||
}
|
||||
```
|
||||
- Unary clients wrap `transport.execute` with this helper and then deserialize.
|
||||
- Stream clients wrap the **initial** `transport.stream` call with this helper. Mid-stream disconnects are surfaced as `StreamError`s; automatic resume/reconnect can be added later on top of this primitive if we introduce cursor support.
|
||||
- Common helpers: `retry::backoff(attempt)`, `errors::{TransportError, StreamError}`.
|
||||
- Streaming utility (SSE framing only):
|
||||
```rust
|
||||
pub fn sse_stream<S>(
|
||||
bytes: S,
|
||||
idle_timeout: Duration,
|
||||
tx: mpsc::Sender<Result<String, StreamError>>,
|
||||
telemetry: Option<Box<dyn Telemetry>>,
|
||||
)
|
||||
where
|
||||
S: Stream<Item = Result<Bytes, TransportError>> + Unpin + Send + 'static;
|
||||
```
|
||||
- `sse_stream` is responsible for timeouts, connection-level errors, and emitting raw `data:` chunks as UTF-8 strings; parsing those strings into structured events is done in `codex-api`.
|
||||
|
||||
- `codex-api` (OpenAI/Codex API library)
|
||||
- Owns typed models for Responses/Chat/Compact plus shared helpers (`Prompt`, tool specs, text controls, `ResponsesApiRequest`, etc.).
|
||||
- Knows about OpenAI/Codex semantics:
|
||||
- URL shapes (`/v1/responses`, `/v1/chat/completions`, `/responses/compact`).
|
||||
- Provider configuration (`WireApi`, base URLs, query params, per-provider retry knobs).
|
||||
- Rate-limit headers (`x-codex-*`) and their mapping into `RateLimitSnapshot` / `CreditsSnapshot`.
|
||||
- Error body formats (`{ error: { type, code, message, plan_type, resets_at } }`) and how they become API errors (context window exceeded, quota/usage limit, etc.).
|
||||
- SSE event names (`response.output_item.done`, `response.completed`, `response.failed`, etc.) and their mapping into high-level events.
|
||||
- Provides a provider abstraction (conceptually similar to `ModelProviderInfo`):
|
||||
```rust
|
||||
pub struct Provider {
|
||||
pub name: String,
|
||||
pub base_url: String,
|
||||
pub wire: WireApi, // Responses | Chat
|
||||
pub headers: HeaderMap,
|
||||
pub retry: RetryConfig,
|
||||
pub stream_idle_timeout: Duration,
|
||||
}
|
||||
|
||||
pub trait AuthProvider {
|
||||
/// Returns a bearer token to use for this request (if any).
|
||||
/// Implementations are expected to be cheap and to surface already-refreshed tokens;
|
||||
/// higher layers (`codex-core`) remain responsible for token refresh flows.
|
||||
fn bearer_token(&self) -> Option<String>;
|
||||
|
||||
/// Optional ChatGPT account id header for Chat mode.
|
||||
fn account_id(&self) -> Option<String>;
|
||||
}
|
||||
```
|
||||
- Ready-made clients built on `HttpTransport`:
|
||||
```rust
|
||||
pub struct ResponsesClient<T: HttpTransport, A: AuthProvider> { /* ... */ }
|
||||
impl<T, A> ResponsesClient<T, A> {
|
||||
pub async fn stream(&self, prompt: &Prompt) -> ApiResult<ResponseStream<ApiEvent>>;
|
||||
pub async fn compact(&self, prompt: &Prompt) -> ApiResult<Vec<ResponseItem>>;
|
||||
}
|
||||
|
||||
pub struct ChatClient<T: HttpTransport, A: AuthProvider> { /* ... */ }
|
||||
impl<T, A> ChatClient<T, A> {
|
||||
pub async fn stream(&self, prompt: &Prompt) -> ApiResult<ResponseStream<ApiEvent>>;
|
||||
}
|
||||
|
||||
pub struct CompactClient<T: HttpTransport, A: AuthProvider> { /* ... */ }
|
||||
impl<T, A> CompactClient<T, A> {
|
||||
pub async fn compact(&self, prompt: &Prompt) -> ApiResult<Vec<ResponseItem>>;
|
||||
}
|
||||
```
|
||||
- Streaming events unified across wire APIs (this can closely mirror `ResponseEvent` today, and we may type-alias one to the other during migration):
|
||||
```rust
|
||||
pub enum ApiEvent {
|
||||
Created,
|
||||
OutputItemAdded(ResponseItem),
|
||||
OutputItemDone(ResponseItem),
|
||||
OutputTextDelta(String),
|
||||
ReasoningContentDelta { delta: String, content_index: i64 },
|
||||
ReasoningSummaryDelta { delta: String, summary_index: i64 },
|
||||
RateLimits(RateLimitSnapshot),
|
||||
Completed { response_id: String, token_usage: Option<TokenUsage> },
|
||||
}
|
||||
```
|
||||
- Error layering:
|
||||
- `codex-client`: defines `TransportError` / `StreamError` (status codes, IO, timeouts).
|
||||
- `codex-api`: defines `ApiError` that wraps `TransportError` plus API-specific errors parsed from bodies and headers.
|
||||
- `codex-core`: maps `ApiError` into existing `CodexErr` variants so downstream callers remain unchanged.
|
||||
- Aggregation strategies (today’s `AggregateStreamExt`) live here as adapters (`Aggregated`, `Streaming`) that transform `ResponseStream<ApiEvent>` into the higher-level views used by `codex-core`.
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
1. **Create crates**: add `codex-client` and `codex-api` (names keep the `codex-` prefix). Stub lib files with feature flags/tests wired into the workspace; wire them into `Cargo.toml`.
|
||||
2. **Extract API-level SSE + rate limits into `codex-api`**:
|
||||
- Move the Responses SSE parser (`process_sse`), rate-limit parsing, and related tests from `core/src/client.rs` into `codex-api`, keeping the behavior identical.
|
||||
- Introduce `ApiEvent` (initially equivalent to `ResponseEvent`) and `ApiError`, and adjust the parser to emit those.
|
||||
- Provide test-only helpers for fixture streams (replacement for `CODEX_RS_SSE_FIXTURE`) in `codex-api`.
|
||||
3. **Lift transport layer into `codex-client`**:
|
||||
- Move `CodexHttpClient`/`CodexRequestBuilder`, UA/originator plumbing, and backoff helpers from `core/src/default_client.rs` into `codex-client` (or a thin wrapper on top of it).
|
||||
- Introduce `HttpTransport`, `Request`, `RetryPolicy`, `RetryOn`, and `run_with_retry` as described above.
|
||||
- Keep sandbox/no-proxy toggles behind injected configuration so `codex-client` stays generic and does not depend on Codex-specific env vars.
|
||||
4. **Model provider abstraction in `codex-api`**:
|
||||
- Relocate `ModelProviderInfo` (base URL, env/header resolution, retry knobs, wire API enum) into `codex-api`, expressed in terms of `Provider` and `AuthProvider`.
|
||||
- Ensure provider logic handles:
|
||||
- URL building for Responses/Chat/Compact (including Azure special cases).
|
||||
- Static and env-based headers.
|
||||
- Per-provider retry and idle-timeout settings that map cleanly into `RetryPolicy`/`RetryOn`.
|
||||
5. **API crate wiring**:
|
||||
- Move `Prompt`, tool specs, `ResponsesApiRequest`, `TextControls`, and `ResponseEvent/ResponseStream` into `codex-api` under modules (`common`, `responses`, `chat`, `compact`), keeping public types stable or re-exported through `codex-core` as needed.
|
||||
- Rebuild Responses and Chat clients on top of `HttpTransport` + `StreamClient`, reusing shared retry + SSE helpers; keep aggregation adapters as reusable strategies instead of `ModelClient`-local logic.
|
||||
- Implement Compact on top of `UnaryClient` and the unary `execute` path with JSON deserialization, sharing the same retry policy.
|
||||
- Keep request builders symmetric: each client prepares a `Request<serde_json::Value>`, attaches headers/auth via `AuthProvider`, and plugs in its parser (streaming clients) or deserializer (unary) while sharing retry/backoff configuration derived from `Provider`.
|
||||
6. **Core integration layer**:
|
||||
- Replace `core::ModelClient` internals with thin adapters that construct `codex-api` clients using `Config`, `AuthManager`, and `OtelEventManager`.
|
||||
- Keep the public `ModelClient` API and `ResponseEvent`/`ResponseStream` types stable by re-exporting `codex-api` types or providing type aliases.
|
||||
- Preserve existing auth flows (including ChatGPT token refresh) inside `codex-core` or a thin adapter, using `AuthProvider` to surface bearer tokens to `codex-api` and handling 401/refresh semantics at this layer.
|
||||
7. **Tests/migration**:
|
||||
- Move unit tests for SSE parsing, retry/backoff decisions, and provider/header behavior into the new crates; keep integration tests in `core` using the compatibility layer.
|
||||
- Update fixtures to be consumed via test-only adapters in `codex-api`.
|
||||
- Run targeted `just fmt`, `just fix -p` for the touched crates, and scoped `cargo test -p codex-client`, `-p codex-api`, and existing `codex-core` suites.
|
||||
|
||||
## Design Decisions
|
||||
|
||||
- **UA construction**
|
||||
- `codex-client` exposes an optional UA suffix/provider hook (tiny feature) and remains unaware of the CLI; `codex-core` / the CLI compute the full UA (including `terminal::user_agent()`) and pass the suffix or builder down.
|
||||
- **Config vs provider**
|
||||
- Most configuration stays in `codex-core`. `codex-api::Provider` only contains what is strictly required for HTTP (base URLs, query params, retry/timeout knobs, wire API), while higher-level knobs (reasoning defaults, verbosity flags, etc.) remain core concerns.
|
||||
- **Auth flow ownership**
|
||||
- Auth flows (including ChatGPT token refresh) remain in `codex-core`. `AuthProvider` simply exposes already-fresh tokens/account IDs; 401 handling and refresh retries stay in the existing auth layer.
|
||||
- **Error enums**
|
||||
- `codex-client` continues to define `TransportError` / `StreamError`. `codex-api` defines an `ApiError` (deriving `thiserror::Error`) that wraps `TransportError` and API-specific failures, and `codex-core` maps `ApiError` into existing `CodexErr` variants for callers.
|
||||
- **Streaming reconnection semantics**
|
||||
- For now, mid-stream SSE failures are surfaced as errors and only the initial connection is retried via `run_with_retry`. We will revisit mid-stream reconnect/resume once the underlying APIs support cursor/idempotent event semantics.
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "codex-core"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
name = "codex-core"
|
||||
version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@@ -18,12 +18,13 @@ askama = { workspace = true }
|
||||
async-channel = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
chardetng = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
codex-api = { workspace = true }
|
||||
codex-app-server-protocol = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-async-utils = { workspace = true }
|
||||
codex-api = { workspace = true }
|
||||
codex-client = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-git = { workspace = true }
|
||||
@@ -37,8 +38,8 @@ codex-utils-string = { workspace = true }
|
||||
codex-windows-sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
dirs = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
encoding_rs = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
@@ -46,8 +47,10 @@ indexmap = { workspace = true }
|
||||
keyring = { workspace = true, features = ["crypto-rust"] }
|
||||
libc = { workspace = true }
|
||||
mcp-types = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
os_info = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
regex-lite = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
@@ -57,9 +60,6 @@ sha2 = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
similar = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
url = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
test-case = "3.3.1"
|
||||
test-log = { workspace = true }
|
||||
@@ -83,6 +83,7 @@ toml_edit = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-bash = { workspace = true }
|
||||
url = { workspace = true }
|
||||
uuid = { workspace = true, features = ["serde", "v4", "v5"] }
|
||||
which = { workspace = true }
|
||||
wildmatch = { workspace = true }
|
||||
@@ -92,9 +93,9 @@ deterministic_process_ids = []
|
||||
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
keyring = { workspace = true, features = ["linux-native-async-persistent"] }
|
||||
landlock = { workspace = true }
|
||||
seccompiler = { workspace = true }
|
||||
keyring = { workspace = true, features = ["linux-native-async-persistent"] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation = "0.9"
|
||||
|
||||
@@ -102,11 +102,12 @@ use crate::protocol::TurnDiffEvent;
|
||||
use crate::protocol::WarningEvent;
|
||||
use crate::rollout::RolloutRecorder;
|
||||
use crate::rollout::RolloutRecorderParams;
|
||||
use crate::rollout::map_session_init_error;
|
||||
use crate::shell;
|
||||
use crate::state::ActiveTurn;
|
||||
use crate::state::SessionServices;
|
||||
use crate::state::SessionState;
|
||||
use crate::status::ComponentHealth;
|
||||
use crate::status::maybe_codex_status_warning;
|
||||
use crate::tasks::GhostSnapshotTask;
|
||||
use crate::tasks::ReviewTask;
|
||||
use crate::tasks::SessionTask;
|
||||
@@ -207,7 +208,7 @@ impl Codex {
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!("Failed to create session: {e:#}");
|
||||
map_session_init_error(&e, &config.codex_home)
|
||||
CodexErr::InternalAgentDied
|
||||
})?;
|
||||
let conversation_id = session.conversation_id;
|
||||
|
||||
@@ -452,6 +453,16 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn replace_codex_backend_status(
|
||||
&self,
|
||||
status: ComponentHealth,
|
||||
) -> Option<ComponentHealth> {
|
||||
let mut guard = self.services.codex_backend_status.lock().await;
|
||||
let previous = *guard;
|
||||
*guard = Some(status);
|
||||
previous
|
||||
}
|
||||
|
||||
async fn new(
|
||||
session_configuration: SessionConfiguration,
|
||||
config: Arc<Config>,
|
||||
@@ -509,7 +520,7 @@ impl Session {
|
||||
|
||||
let rollout_recorder = rollout_recorder.map_err(|e| {
|
||||
error!("failed to initialize rollout recorder: {e:#}");
|
||||
anyhow::Error::from(e)
|
||||
anyhow::anyhow!("failed to initialize rollout recorder: {e:#}")
|
||||
})?;
|
||||
let rollout_path = rollout_recorder.rollout_path.clone();
|
||||
|
||||
@@ -568,6 +579,7 @@ impl Session {
|
||||
auth_manager: Arc::clone(&auth_manager),
|
||||
otel_event_manager,
|
||||
tool_approvals: Mutex::new(ApprovalStore::default()),
|
||||
codex_backend_status: Mutex::new(None),
|
||||
};
|
||||
|
||||
let sess = Arc::new(Session {
|
||||
@@ -1190,17 +1202,22 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn record_response_item_and_emit_turn_item(
|
||||
/// Record a user input item to conversation history and also persist a
|
||||
/// corresponding UserMessage EventMsg to rollout.
|
||||
async fn record_input_and_rollout_usermsg(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
response_item: ResponseItem,
|
||||
response_input: &ResponseInputItem,
|
||||
) {
|
||||
// Add to conversation history and persist response item to rollout.
|
||||
let response_item: ResponseItem = response_input.clone().into();
|
||||
// Add to conversation history and persist response item to rollout
|
||||
self.record_conversation_items(turn_context, std::slice::from_ref(&response_item))
|
||||
.await;
|
||||
|
||||
// Derive a turn item and emit lifecycle events if applicable.
|
||||
if let Some(item) = parse_turn_item(&response_item) {
|
||||
// Derive user message events and persist only UserMessage to rollout
|
||||
let turn_item = parse_turn_item(&response_item);
|
||||
|
||||
if let Some(item @ TurnItem::UserMessage(_)) = turn_item {
|
||||
self.emit_turn_item_started(turn_context, &item).await;
|
||||
self.emit_turn_item_completed(turn_context, item).await;
|
||||
}
|
||||
@@ -1876,7 +1893,12 @@ async fn spawn_review_thread(
|
||||
text: review_prompt,
|
||||
}];
|
||||
let tc = Arc::new(review_turn_context);
|
||||
sess.spawn_task(tc.clone(), input, ReviewTask::new()).await;
|
||||
sess.spawn_task(
|
||||
tc.clone(),
|
||||
input,
|
||||
ReviewTask::new(review_request.append_to_original_thread),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Announce entering review mode so UIs can switch modes.
|
||||
sess.send_event(&tc, EventMsg::EnteredReviewMode(review_request))
|
||||
@@ -1912,8 +1934,7 @@ pub(crate) async fn run_task(
|
||||
sess.send_event(&turn_context, event).await;
|
||||
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
let response_item: ResponseItem = initial_input_for_turn.clone().into();
|
||||
sess.record_response_item_and_emit_turn_item(turn_context.as_ref(), response_item)
|
||||
sess.record_input_and_rollout_usermsg(turn_context.as_ref(), &initial_input_for_turn)
|
||||
.await;
|
||||
|
||||
sess.maybe_start_ghost_snapshot(Arc::clone(&turn_context), cancellation_token.child_token())
|
||||
@@ -2172,6 +2193,19 @@ async fn try_run_turn(
|
||||
});
|
||||
|
||||
sess.persist_rollout_items(&[rollout_item]).await;
|
||||
let sess_clone = Arc::clone(&sess);
|
||||
let turn_context_clone = Arc::clone(&turn_context);
|
||||
tokio::spawn(async move {
|
||||
if let Some(message) = maybe_codex_status_warning(sess_clone.as_ref()).await {
|
||||
sess_clone
|
||||
.send_event(
|
||||
&turn_context_clone,
|
||||
EventMsg::Warning(WarningEvent { message }),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
|
||||
let mut stream = turn_context
|
||||
.client
|
||||
.clone()
|
||||
@@ -2686,6 +2720,7 @@ mod tests {
|
||||
auth_manager: Arc::clone(&auth_manager),
|
||||
otel_event_manager: otel_event_manager.clone(),
|
||||
tool_approvals: Mutex::new(ApprovalStore::default()),
|
||||
codex_backend_status: Mutex::new(None),
|
||||
};
|
||||
|
||||
let turn_context = Session::make_turn_context(
|
||||
@@ -2764,6 +2799,7 @@ mod tests {
|
||||
auth_manager: Arc::clone(&auth_manager),
|
||||
otel_event_manager: otel_event_manager.clone(),
|
||||
tool_approvals: Mutex::new(ApprovalStore::default()),
|
||||
codex_backend_status: Mutex::new(None),
|
||||
};
|
||||
|
||||
let turn_context = Arc::new(Session::make_turn_context(
|
||||
@@ -2878,7 +2914,7 @@ mod tests {
|
||||
let input = vec![UserInput::Text {
|
||||
text: "start review".to_string(),
|
||||
}];
|
||||
sess.spawn_task(Arc::clone(&tc), input, ReviewTask::new())
|
||||
sess.spawn_task(Arc::clone(&tc), input, ReviewTask::new(true))
|
||||
.await;
|
||||
|
||||
sess.abort_all_tasks(TurnAbortReason::Interrupted).await;
|
||||
@@ -2906,8 +2942,6 @@ mod tests {
|
||||
.expect("event");
|
||||
match evt.msg {
|
||||
EventMsg::RawResponseItem(_) => continue,
|
||||
EventMsg::ItemStarted(_) | EventMsg::ItemCompleted(_) => continue,
|
||||
EventMsg::AgentMessage(_) => continue,
|
||||
EventMsg::TurnAborted(e) => {
|
||||
assert_eq!(TurnAbortReason::Interrupted, e.reason);
|
||||
break;
|
||||
@@ -2917,7 +2951,23 @@ mod tests {
|
||||
}
|
||||
|
||||
let history = sess.clone_history().await.get_history();
|
||||
let _ = history;
|
||||
let found = history.iter().any(|item| match item {
|
||||
ResponseItem::Message { role, content, .. } if role == "user" => {
|
||||
content.iter().any(|ci| match ci {
|
||||
ContentItem::InputText { text } => {
|
||||
text.contains("<user_action>")
|
||||
&& text.contains("review")
|
||||
&& text.contains("interrupted")
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
_ => false,
|
||||
});
|
||||
assert!(
|
||||
found,
|
||||
"synthetic review interruption not recorded in history"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
||||
@@ -16,7 +16,7 @@ mod codex_conversation;
|
||||
mod compact_remote;
|
||||
pub use codex_conversation::CodexConversation;
|
||||
mod codex_delegate;
|
||||
pub mod command_safety;
|
||||
mod command_safety;
|
||||
pub mod config;
|
||||
pub mod config_loader;
|
||||
mod context_manager;
|
||||
@@ -42,6 +42,7 @@ pub mod parse_command;
|
||||
pub mod powershell;
|
||||
mod response_processing;
|
||||
pub mod sandboxing;
|
||||
pub mod status;
|
||||
mod text_encoding;
|
||||
pub mod token_data;
|
||||
mod truncate;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::protocol::ReviewFinding;
|
||||
use crate::protocol::ReviewOutputEvent;
|
||||
|
||||
// Note: We keep this module UI-agnostic. It returns plain strings that
|
||||
// higher layers (e.g., TUI) may style as needed.
|
||||
@@ -11,8 +10,6 @@ fn format_location(item: &ReviewFinding) -> String {
|
||||
format!("{path}:{start}-{end}")
|
||||
}
|
||||
|
||||
const REVIEW_FALLBACK_MESSAGE: &str = "Reviewer failed to output a response.";
|
||||
|
||||
/// Format a full review findings block as plain text lines.
|
||||
///
|
||||
/// - When `selection` is `Some`, each item line includes a checkbox marker:
|
||||
@@ -56,27 +53,3 @@ pub fn format_review_findings_block(
|
||||
|
||||
lines.join("\n")
|
||||
}
|
||||
|
||||
/// Render a human-readable review summary suitable for a user-facing message.
|
||||
///
|
||||
/// Returns either the explanation, the formatted findings block, or both
|
||||
/// separated by a blank line. If neither is present, emits a fallback message.
|
||||
pub fn render_review_output_text(output: &ReviewOutputEvent) -> String {
|
||||
let mut sections = Vec::new();
|
||||
let explanation = output.overall_explanation.trim();
|
||||
if !explanation.is_empty() {
|
||||
sections.push(explanation.to_string());
|
||||
}
|
||||
if !output.findings.is_empty() {
|
||||
let findings = format_review_findings_block(&output.findings, None);
|
||||
let trimmed = findings.trim();
|
||||
if !trimmed.is_empty() {
|
||||
sections.push(trimmed.to_string());
|
||||
}
|
||||
}
|
||||
if sections.is_empty() {
|
||||
REVIEW_FALLBACK_MESSAGE.to_string()
|
||||
} else {
|
||||
sections.join("\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::error::CodexErr;
|
||||
use crate::rollout::SESSIONS_SUBDIR;
|
||||
|
||||
pub(crate) fn map_session_init_error(err: &anyhow::Error, codex_home: &Path) -> CodexErr {
|
||||
if let Some(mapped) = err
|
||||
.chain()
|
||||
.filter_map(|cause| cause.downcast_ref::<std::io::Error>())
|
||||
.find_map(|io_err| map_rollout_io_error(io_err, codex_home))
|
||||
{
|
||||
return mapped;
|
||||
}
|
||||
|
||||
CodexErr::Fatal(format!("Failed to initialize session: {err:#}"))
|
||||
}
|
||||
|
||||
fn map_rollout_io_error(io_err: &std::io::Error, codex_home: &Path) -> Option<CodexErr> {
|
||||
let sessions_dir = codex_home.join(SESSIONS_SUBDIR);
|
||||
let hint = match io_err.kind() {
|
||||
ErrorKind::PermissionDenied => format!(
|
||||
"Codex cannot access session files at {} (permission denied). If sessions were created using sudo, fix ownership: sudo chown -R $(whoami) {}",
|
||||
sessions_dir.display(),
|
||||
codex_home.display()
|
||||
),
|
||||
ErrorKind::NotFound => format!(
|
||||
"Session storage missing at {}. Create the directory or choose a different Codex home.",
|
||||
sessions_dir.display()
|
||||
),
|
||||
ErrorKind::AlreadyExists => format!(
|
||||
"Session storage path {} is blocked by an existing file. Remove or rename it so Codex can create sessions.",
|
||||
sessions_dir.display()
|
||||
),
|
||||
ErrorKind::InvalidData | ErrorKind::InvalidInput => format!(
|
||||
"Session data under {} looks corrupt or unreadable. Clearing the sessions directory may help (this will remove saved conversations).",
|
||||
sessions_dir.display()
|
||||
),
|
||||
ErrorKind::IsADirectory | ErrorKind::NotADirectory => format!(
|
||||
"Session storage path {} has an unexpected type. Ensure it is a directory Codex can use for session files.",
|
||||
sessions_dir.display()
|
||||
),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(CodexErr::Fatal(format!(
|
||||
"{hint} (underlying error: {io_err})"
|
||||
)))
|
||||
}
|
||||
@@ -7,13 +7,11 @@ pub const ARCHIVED_SESSIONS_SUBDIR: &str = "archived_sessions";
|
||||
pub const INTERACTIVE_SESSION_SOURCES: &[SessionSource] =
|
||||
&[SessionSource::Cli, SessionSource::VSCode];
|
||||
|
||||
pub(crate) mod error;
|
||||
pub mod list;
|
||||
pub(crate) mod policy;
|
||||
pub mod recorder;
|
||||
|
||||
pub use codex_protocol::protocol::SessionMeta;
|
||||
pub(crate) use error::map_session_init_error;
|
||||
pub use list::find_conversation_path_by_id_str;
|
||||
pub use recorder::RolloutRecorder;
|
||||
pub use recorder::RolloutRecorderParams;
|
||||
|
||||
@@ -14,7 +14,6 @@ use crate::protocol::SandboxPolicy;
|
||||
use askama::Template;
|
||||
use codex_otel::otel_event_manager::OtelEventManager;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::SandboxCommandAssessment;
|
||||
@@ -24,8 +23,7 @@ use serde_json::json;
|
||||
use tokio::time::timeout;
|
||||
use tracing::warn;
|
||||
|
||||
const SANDBOX_ASSESSMENT_TIMEOUT: Duration = Duration::from_secs(15);
|
||||
const SANDBOX_ASSESSMENT_REASONING_EFFORT: ReasoningEffortConfig = ReasoningEffortConfig::Medium;
|
||||
const SANDBOX_ASSESSMENT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "sandboxing/assessment_prompt.md", escape = "none")]
|
||||
@@ -132,7 +130,7 @@ pub(crate) async fn assess_command(
|
||||
Some(auth_manager),
|
||||
child_otel,
|
||||
provider,
|
||||
Some(SANDBOX_ASSESSMENT_REASONING_EFFORT),
|
||||
config.model_reasoning_effort,
|
||||
config.model_reasoning_summary,
|
||||
conversation_id,
|
||||
session_source,
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::sync::Arc;
|
||||
use crate::AuthManager;
|
||||
use crate::RolloutRecorder;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::status::ComponentHealth;
|
||||
use crate::tools::sandboxing::ApprovalStore;
|
||||
use crate::unified_exec::UnifiedExecSessionManager;
|
||||
use crate::user_notification::UserNotifier;
|
||||
@@ -22,4 +23,5 @@ pub(crate) struct SessionServices {
|
||||
pub(crate) auth_manager: Arc<AuthManager>,
|
||||
pub(crate) otel_event_manager: OtelEventManager,
|
||||
pub(crate) tool_approvals: Mutex<ApprovalStore>,
|
||||
pub(crate) codex_backend_status: Mutex<Option<ComponentHealth>>,
|
||||
}
|
||||
|
||||
257
codex-rs/core/src/status.rs
Normal file
257
codex-rs/core/src/status.rs
Normal file
@@ -0,0 +1,257 @@
|
||||
use std::sync::OnceLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::codex::Session;
|
||||
use anyhow::Context;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use anyhow::bail;
|
||||
use codex_client::HttpTransport;
|
||||
use codex_client::Request;
|
||||
use codex_client::ReqwestTransport;
|
||||
use codex_client::RetryOn;
|
||||
use codex_client::RetryPolicy;
|
||||
use codex_client::run_with_retry;
|
||||
use http::header::CONTENT_TYPE;
|
||||
use reqwest::Method;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use strum_macros::Display;
|
||||
|
||||
const STATUS_WIDGET_URL: &str = "https://status.openai.com/proxy/status.openai.com";
|
||||
const CODEX_COMPONENT_NAME: &str = "Codex";
|
||||
|
||||
static TEST_STATUS_WIDGET_URL: OnceLock<String> = OnceLock::new();
|
||||
|
||||
#[derive(Debug, Clone, Display, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub(crate) enum ComponentHealth {
|
||||
#[strum(to_string = "operational")]
|
||||
Operational,
|
||||
#[strum(to_string = "degraded performance")]
|
||||
DegradedPerformance,
|
||||
#[strum(to_string = "partial outage")]
|
||||
PartialOutage,
|
||||
#[strum(to_string = "major outage")]
|
||||
MajorOutage,
|
||||
#[strum(to_string = "under maintenance")]
|
||||
UnderMaintenance,
|
||||
#[serde(other)]
|
||||
#[strum(to_string = "unknown")]
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl ComponentHealth {
|
||||
fn operational() -> Self {
|
||||
Self::Operational
|
||||
}
|
||||
|
||||
pub(crate) fn is_operational(self) -> bool {
|
||||
self == Self::Operational
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_codex_status_warning(session: &Session) -> Option<String> {
|
||||
let Ok(status) = fetch_codex_health().await else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let previous = session.replace_codex_backend_status(status).await;
|
||||
if status.is_operational() || previous == Some(status) {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(format!(
|
||||
"Codex is experiencing a {status}. If a response stalls, try again later. You can follow incident updates at status.openai.com."
|
||||
))
|
||||
}
|
||||
|
||||
async fn fetch_codex_health() -> Result<ComponentHealth> {
|
||||
let status_widget_url = status_widget_url();
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.connect_timeout(Duration::from_millis(200))
|
||||
.timeout(Duration::from_millis(300))
|
||||
.build()
|
||||
.context("building HTTP client")?;
|
||||
|
||||
let transport = ReqwestTransport::new(client);
|
||||
let policy = RetryPolicy {
|
||||
max_attempts: 0,
|
||||
base_delay: Duration::from_millis(100),
|
||||
retry_on: RetryOn {
|
||||
retry_429: true,
|
||||
retry_5xx: true,
|
||||
retry_transport: true,
|
||||
},
|
||||
};
|
||||
|
||||
let response = run_with_retry(
|
||||
policy,
|
||||
|| Request::new(Method::GET, status_widget_url.clone()),
|
||||
|req, _attempt| {
|
||||
let transport = transport.clone();
|
||||
async move { transport.execute(req).await }
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("requesting status widget")?;
|
||||
|
||||
let content_type = response
|
||||
.headers
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.unwrap_or_default()
|
||||
.to_ascii_lowercase();
|
||||
|
||||
if !content_type.contains("json") {
|
||||
let snippet = String::from_utf8_lossy(&response.body)
|
||||
.chars()
|
||||
.take(200)
|
||||
.collect::<String>();
|
||||
|
||||
bail!(
|
||||
"Expected JSON from {status_widget_url}: Content-Type={content_type}. Body starts with: {snippet:?}"
|
||||
);
|
||||
}
|
||||
|
||||
let payload: StatusPayload =
|
||||
serde_json::from_slice(&response.body).context("parsing status widget JSON")?;
|
||||
|
||||
derive_component_health(&payload, CODEX_COMPONENT_NAME)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Default)]
|
||||
struct StatusPayload {
|
||||
#[serde(default)]
|
||||
summary: Summary,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Default)]
|
||||
struct Summary {
|
||||
#[serde(default)]
|
||||
components: Vec<Component>,
|
||||
#[serde(default)]
|
||||
affected_components: Vec<AffectedComponent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
struct Component {
|
||||
id: String,
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
struct AffectedComponent {
|
||||
component_id: String,
|
||||
#[serde(default = "ComponentHealth::operational")]
|
||||
status: ComponentHealth,
|
||||
}
|
||||
|
||||
fn derive_component_health(
|
||||
payload: &StatusPayload,
|
||||
component_name: &str,
|
||||
) -> Result<ComponentHealth> {
|
||||
let component = payload
|
||||
.summary
|
||||
.components
|
||||
.iter()
|
||||
.find(|component| component.name == component_name)
|
||||
.ok_or_else(|| anyhow!("Component {component_name:?} not found in status summary"))?;
|
||||
|
||||
let status = payload
|
||||
.summary
|
||||
.affected_components
|
||||
.iter()
|
||||
.find(|affected| affected.component_id == component.id)
|
||||
.map(|affected| affected.status)
|
||||
.unwrap_or(ComponentHealth::Operational);
|
||||
|
||||
Ok(status)
|
||||
}
|
||||
|
||||
fn status_widget_url() -> String {
|
||||
TEST_STATUS_WIDGET_URL
|
||||
.get()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| STATUS_WIDGET_URL.to_string())
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[cfg_attr(not(test), allow(dead_code))]
|
||||
pub fn set_test_status_widget_url(url: impl Into<String>) {
|
||||
let _ = TEST_STATUS_WIDGET_URL.set(url.into());
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn uses_affected_component_status() {
|
||||
let payload = serde_json::from_value::<StatusPayload>(json!({
|
||||
"summary": {
|
||||
"id": "sum-1",
|
||||
"name": "OpenAI",
|
||||
"components": [
|
||||
{"id": "cmp-1", "name": "Codex", "status_page_id": "page-1"}
|
||||
],
|
||||
"affected_components": [
|
||||
{"component_id": "cmp-1", "status": "major_outage"}
|
||||
]
|
||||
}
|
||||
}))
|
||||
.expect("valid payload");
|
||||
|
||||
let status = derive_component_health(&payload, "Codex").expect("codex component exists");
|
||||
|
||||
assert_eq!(status, ComponentHealth::MajorOutage);
|
||||
assert!(!status.is_operational());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_status_is_preserved_as_unknown() {
|
||||
let payload = serde_json::from_value::<StatusPayload>(json!({
|
||||
"summary": {
|
||||
"id": "sum-1",
|
||||
"name": "OpenAI",
|
||||
"components": [
|
||||
{"id": "cmp-1", "name": "Codex", "status_page_id": "page-1"}
|
||||
],
|
||||
"affected_components": [
|
||||
{"component_id": "cmp-1", "status": "custom_status"}
|
||||
]
|
||||
}
|
||||
}))
|
||||
.expect("valid payload");
|
||||
|
||||
let status = derive_component_health(&payload, "Codex").expect("codex component exists");
|
||||
|
||||
assert_eq!(status, ComponentHealth::Unknown);
|
||||
assert!(!status.is_operational());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_component_returns_error() {
|
||||
let payload = serde_json::from_value::<StatusPayload>(json!({
|
||||
"summary": {
|
||||
"id": "sum-1",
|
||||
"name": "OpenAI",
|
||||
"components": [],
|
||||
"affected_components": []
|
||||
}
|
||||
}))
|
||||
.expect("valid payload");
|
||||
|
||||
let error =
|
||||
derive_component_health(&payload, "Codex").expect_err("missing component should error");
|
||||
|
||||
assert!(
|
||||
error
|
||||
.to_string()
|
||||
.contains("Component \"Codex\" not found in status summary")
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -17,7 +17,6 @@ use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::codex_delegate::run_codex_conversation_one_shot;
|
||||
use crate::review_format::format_review_findings_block;
|
||||
use crate::review_format::render_review_output_text;
|
||||
use crate::state::TaskKind;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
|
||||
@@ -25,11 +24,15 @@ use super::SessionTask;
|
||||
use super::SessionTaskContext;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub(crate) struct ReviewTask;
|
||||
pub(crate) struct ReviewTask {
|
||||
append_to_original_thread: bool,
|
||||
}
|
||||
|
||||
impl ReviewTask {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self
|
||||
pub(crate) fn new(append_to_original_thread: bool) -> Self {
|
||||
Self {
|
||||
append_to_original_thread,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,13 +62,25 @@ impl SessionTask for ReviewTask {
|
||||
None => None,
|
||||
};
|
||||
if !cancellation_token.is_cancelled() {
|
||||
exit_review_mode(session.clone_session(), output.clone(), ctx.clone()).await;
|
||||
exit_review_mode(
|
||||
session.clone_session(),
|
||||
output.clone(),
|
||||
ctx.clone(),
|
||||
self.append_to_original_thread,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn abort(&self, session: Arc<SessionTaskContext>, ctx: Arc<TurnContext>) {
|
||||
exit_review_mode(session.clone_session(), None, ctx).await;
|
||||
exit_review_mode(
|
||||
session.clone_session(),
|
||||
None,
|
||||
ctx,
|
||||
self.append_to_original_thread,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,57 +197,39 @@ pub(crate) async fn exit_review_mode(
|
||||
session: Arc<Session>,
|
||||
review_output: Option<ReviewOutputEvent>,
|
||||
ctx: Arc<TurnContext>,
|
||||
append_to_original_thread: bool,
|
||||
) {
|
||||
const REVIEW_USER_MESSAGE_ID: &str = "review:rollout:user";
|
||||
const REVIEW_ASSISTANT_MESSAGE_ID: &str = "review:rollout:assistant";
|
||||
let (user_message, assistant_message) = if let Some(out) = review_output.clone() {
|
||||
let mut findings_str = String::new();
|
||||
let text = out.overall_explanation.trim();
|
||||
if !text.is_empty() {
|
||||
findings_str.push_str(text);
|
||||
}
|
||||
if !out.findings.is_empty() {
|
||||
let block = format_review_findings_block(&out.findings, None);
|
||||
findings_str.push_str(&format!("\n{block}"));
|
||||
}
|
||||
let rendered =
|
||||
crate::client_common::REVIEW_EXIT_SUCCESS_TMPL.replace("{results}", &findings_str);
|
||||
let assistant_message = render_review_output_text(&out);
|
||||
(rendered, assistant_message)
|
||||
} else {
|
||||
let rendered = crate::client_common::REVIEW_EXIT_INTERRUPTED_TMPL.to_string();
|
||||
let assistant_message =
|
||||
"Review was interrupted. Please re-run /review and wait for it to complete."
|
||||
.to_string();
|
||||
(rendered, assistant_message)
|
||||
};
|
||||
if append_to_original_thread {
|
||||
let user_message = if let Some(out) = review_output.clone() {
|
||||
let mut findings_str = String::new();
|
||||
let text = out.overall_explanation.trim();
|
||||
if !text.is_empty() {
|
||||
findings_str.push_str(text);
|
||||
}
|
||||
if !out.findings.is_empty() {
|
||||
let block = format_review_findings_block(&out.findings, None);
|
||||
findings_str.push_str(&format!("\n{block}"));
|
||||
}
|
||||
crate::client_common::REVIEW_EXIT_SUCCESS_TMPL.replace("{results}", &findings_str)
|
||||
} else {
|
||||
crate::client_common::REVIEW_EXIT_INTERRUPTED_TMPL.to_string()
|
||||
};
|
||||
|
||||
session
|
||||
.record_conversation_items(
|
||||
&ctx,
|
||||
&[ResponseItem::Message {
|
||||
id: Some(REVIEW_USER_MESSAGE_ID.to_string()),
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText { text: user_message }],
|
||||
}],
|
||||
)
|
||||
.await;
|
||||
session
|
||||
.record_conversation_items(
|
||||
&ctx,
|
||||
&[ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText { text: user_message }],
|
||||
}],
|
||||
)
|
||||
.await;
|
||||
}
|
||||
session
|
||||
.send_event(
|
||||
ctx.as_ref(),
|
||||
EventMsg::ExitedReviewMode(ExitedReviewModeEvent { review_output }),
|
||||
)
|
||||
.await;
|
||||
session
|
||||
.record_response_item_and_emit_turn_item(
|
||||
ctx.as_ref(),
|
||||
ResponseItem::Message {
|
||||
id: Some(REVIEW_ASSISTANT_MESSAGE_ID.to_string()),
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: assistant_message,
|
||||
}],
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -98,26 +98,8 @@ pub(crate) struct UnifiedExecResponse {
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct UnifiedExecSessionManager {
|
||||
session_store: Mutex<SessionStore>,
|
||||
}
|
||||
|
||||
// Required for mutex sharing.
|
||||
#[derive(Default)]
|
||||
pub(crate) struct SessionStore {
|
||||
sessions: HashMap<String, SessionEntry>,
|
||||
reserved_sessions_id: HashSet<String>,
|
||||
}
|
||||
|
||||
impl SessionStore {
|
||||
fn remove(&mut self, session_id: &str) -> Option<SessionEntry> {
|
||||
self.reserved_sessions_id.remove(session_id);
|
||||
self.sessions.remove(session_id)
|
||||
}
|
||||
|
||||
pub(crate) fn clear(&mut self) {
|
||||
self.reserved_sessions_id.clear();
|
||||
self.sessions.clear();
|
||||
}
|
||||
sessions: Mutex<HashMap<String, SessionEntry>>,
|
||||
used_session_ids: Mutex<HashSet<String>>,
|
||||
}
|
||||
|
||||
struct SessionEntry {
|
||||
@@ -402,10 +384,9 @@ mod tests {
|
||||
session
|
||||
.services
|
||||
.unified_exec_manager
|
||||
.session_store
|
||||
.sessions
|
||||
.lock()
|
||||
.await
|
||||
.sessions
|
||||
.is_empty()
|
||||
);
|
||||
|
||||
@@ -444,10 +425,9 @@ mod tests {
|
||||
session
|
||||
.services
|
||||
.unified_exec_manager
|
||||
.session_store
|
||||
.sessions
|
||||
.lock()
|
||||
.await
|
||||
.sessions
|
||||
.is_empty()
|
||||
);
|
||||
|
||||
|
||||
@@ -36,7 +36,6 @@ use crate::truncate::formatted_truncate_text;
|
||||
use super::ExecCommandRequest;
|
||||
use super::MAX_UNIFIED_EXEC_SESSIONS;
|
||||
use super::SessionEntry;
|
||||
use super::SessionStore;
|
||||
use super::UnifiedExecContext;
|
||||
use super::UnifiedExecError;
|
||||
use super::UnifiedExecResponse;
|
||||
@@ -82,7 +81,7 @@ struct PreparedSessionHandles {
|
||||
impl UnifiedExecSessionManager {
|
||||
pub(crate) async fn allocate_process_id(&self) -> String {
|
||||
loop {
|
||||
let mut store = self.session_store.lock().await;
|
||||
let mut store = self.used_session_ids.lock().await;
|
||||
|
||||
let process_id = if !cfg!(test) && !cfg!(feature = "deterministic_process_ids") {
|
||||
// production mode → random
|
||||
@@ -90,7 +89,6 @@ impl UnifiedExecSessionManager {
|
||||
} else {
|
||||
// test or deterministic mode
|
||||
let next = store
|
||||
.reserved_sessions_id
|
||||
.iter()
|
||||
.filter_map(|s| s.parse::<i32>().ok())
|
||||
.max()
|
||||
@@ -100,11 +98,11 @@ impl UnifiedExecSessionManager {
|
||||
next.to_string()
|
||||
};
|
||||
|
||||
if store.reserved_sessions_id.contains(&process_id) {
|
||||
if store.contains(&process_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
store.reserved_sessions_id.insert(process_id.clone());
|
||||
store.insert(process_id.clone());
|
||||
return process_id;
|
||||
}
|
||||
}
|
||||
@@ -333,8 +331,8 @@ impl UnifiedExecSessionManager {
|
||||
}
|
||||
|
||||
async fn refresh_session_state(&self, process_id: &str) -> SessionStatus {
|
||||
let mut store = self.session_store.lock().await;
|
||||
let Some(entry) = store.sessions.get(process_id) else {
|
||||
let mut sessions = self.sessions.lock().await;
|
||||
let Some(entry) = sessions.get(process_id) else {
|
||||
return SessionStatus::Unknown;
|
||||
};
|
||||
|
||||
@@ -342,7 +340,7 @@ impl UnifiedExecSessionManager {
|
||||
let process_id = entry.process_id.clone();
|
||||
|
||||
if entry.session.has_exited() {
|
||||
let Some(entry) = store.remove(&process_id) else {
|
||||
let Some(entry) = sessions.remove(&process_id) else {
|
||||
return SessionStatus::Unknown;
|
||||
};
|
||||
SessionStatus::Exited {
|
||||
@@ -362,14 +360,12 @@ impl UnifiedExecSessionManager {
|
||||
&self,
|
||||
process_id: &str,
|
||||
) -> Result<PreparedSessionHandles, UnifiedExecError> {
|
||||
let mut store = self.session_store.lock().await;
|
||||
let entry =
|
||||
store
|
||||
.sessions
|
||||
.get_mut(process_id)
|
||||
.ok_or(UnifiedExecError::UnknownSessionId {
|
||||
process_id: process_id.to_string(),
|
||||
})?;
|
||||
let mut sessions = self.sessions.lock().await;
|
||||
let entry = sessions
|
||||
.get_mut(process_id)
|
||||
.ok_or(UnifiedExecError::UnknownSessionId {
|
||||
process_id: process_id.to_string(),
|
||||
})?;
|
||||
entry.last_used = Instant::now();
|
||||
let OutputHandles {
|
||||
output_buffer,
|
||||
@@ -421,9 +417,9 @@ impl UnifiedExecSessionManager {
|
||||
started_at,
|
||||
last_used: started_at,
|
||||
};
|
||||
let mut store = self.session_store.lock().await;
|
||||
Self::prune_sessions_if_needed(&mut store);
|
||||
store.sessions.insert(process_id, entry);
|
||||
let mut sessions = self.sessions.lock().await;
|
||||
Self::prune_sessions_if_needed(&mut sessions);
|
||||
sessions.insert(process_id, entry);
|
||||
}
|
||||
|
||||
async fn emit_exec_end_from_entry(
|
||||
@@ -633,19 +629,18 @@ impl UnifiedExecSessionManager {
|
||||
collected
|
||||
}
|
||||
|
||||
fn prune_sessions_if_needed(store: &mut SessionStore) {
|
||||
if store.sessions.len() < MAX_UNIFIED_EXEC_SESSIONS {
|
||||
fn prune_sessions_if_needed(sessions: &mut HashMap<String, SessionEntry>) {
|
||||
if sessions.len() < MAX_UNIFIED_EXEC_SESSIONS {
|
||||
return;
|
||||
}
|
||||
|
||||
let meta: Vec<(String, Instant, bool)> = store
|
||||
.sessions
|
||||
let meta: Vec<(String, Instant, bool)> = sessions
|
||||
.iter()
|
||||
.map(|(id, entry)| (id.clone(), entry.last_used, entry.session.has_exited()))
|
||||
.collect();
|
||||
|
||||
if let Some(session_id) = Self::session_id_to_prune_from_meta(&meta) {
|
||||
store.remove(&session_id);
|
||||
sessions.remove(&session_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -679,7 +674,7 @@ impl UnifiedExecSessionManager {
|
||||
}
|
||||
|
||||
pub(crate) async fn terminate_all_sessions(&self) {
|
||||
let mut sessions = self.session_store.lock().await;
|
||||
let mut sessions = self.sessions.lock().await;
|
||||
sessions.clear();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use base64::Engine;
|
||||
@@ -546,6 +547,19 @@ pub async fn mount_sse_once(server: &MockServer, body: String) -> ResponseMock {
|
||||
response_mock
|
||||
}
|
||||
|
||||
pub async fn mount_sse_once_with_delay(
|
||||
server: &MockServer,
|
||||
body: String,
|
||||
delay: Duration,
|
||||
) -> ResponseMock {
|
||||
let (mock, response_mock) = base_mock();
|
||||
mock.respond_with(sse_response(body).set_delay(delay))
|
||||
.up_to_n_times(1)
|
||||
.mount(server)
|
||||
.await;
|
||||
response_mock
|
||||
}
|
||||
|
||||
pub async fn mount_compact_json_once_match<M>(
|
||||
server: &MockServer,
|
||||
matcher: M,
|
||||
|
||||
@@ -70,6 +70,7 @@ async fn codex_delegate_forwards_exec_approval_and_proceeds_on_approval() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Please review".to_string(),
|
||||
user_facing_hint: "review".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -145,6 +146,7 @@ async fn codex_delegate_forwards_patch_approval_and_proceeds_on_decision() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Please review".to_string(),
|
||||
user_facing_hint: "review".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -199,6 +201,7 @@ async fn codex_delegate_ignores_legacy_deltas() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Please review".to_string(),
|
||||
user_facing_hint: "review".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
|
||||
197
codex-rs/core/tests/suite/idle_warning.rs
Normal file
197
codex-rs/core/tests/suite/idle_warning.rs
Normal file
@@ -0,0 +1,197 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::status::set_test_status_widget_url;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_once_with_delay;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use wiremock::Mock;
|
||||
use wiremock::Request;
|
||||
use wiremock::Respond;
|
||||
use wiremock::ResponseTemplate;
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::path;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn emits_warning_when_status_is_degraded_at_turn_start() {
|
||||
let status_server = start_mock_server().await;
|
||||
let status_path = "/proxy/status.openai.com";
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(status_path))
|
||||
.respond_with(status_payload("major_outage"))
|
||||
.mount(&status_server)
|
||||
.await;
|
||||
|
||||
set_test_status_widget_url(format!("{}{}", status_server.uri(), status_path));
|
||||
let responses_server = start_mock_server().await;
|
||||
let stalled_response = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_assistant_message("msg-1", "finally"),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
|
||||
let _responses_mock = mount_sse_once_with_delay(
|
||||
&responses_server,
|
||||
stalled_response,
|
||||
Duration::from_millis(10),
|
||||
)
|
||||
.await;
|
||||
|
||||
let test_codex = test_codex().build(&responses_server).await.unwrap();
|
||||
let codex = test_codex.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text { text: "hi".into() }],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let warning = wait_for_event(&codex, |event| matches!(event, EventMsg::Warning(_))).await;
|
||||
let EventMsg::Warning(WarningEvent { message }) = warning else {
|
||||
panic!("expected warning event");
|
||||
};
|
||||
assert_eq!(
|
||||
message,
|
||||
"Codex is experiencing a major outage. If a response stalls, try again later. You can follow incident updates at status.openai.com.",
|
||||
"unexpected warning message"
|
||||
);
|
||||
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn warns_once_per_status_change_only_when_unhealthy() {
|
||||
let status_server = start_mock_server().await;
|
||||
let status_path = "/proxy/status.openai.com";
|
||||
|
||||
let responder = SequenceResponder::new(vec!["major_outage", "partial_outage"]);
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(status_path))
|
||||
.respond_with(responder)
|
||||
.mount(&status_server)
|
||||
.await;
|
||||
|
||||
set_test_status_widget_url(format!("{}{}", status_server.uri(), status_path));
|
||||
let responses_server = start_mock_server().await;
|
||||
let stalled_response = sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_assistant_message("msg-1", "finally"),
|
||||
ev_completed("resp-1"),
|
||||
]);
|
||||
|
||||
let _responses_mock = mount_sse_once_with_delay(
|
||||
&responses_server,
|
||||
stalled_response,
|
||||
Duration::from_millis(300),
|
||||
)
|
||||
.await;
|
||||
|
||||
let test_codex = test_codex().build(&responses_server).await.unwrap();
|
||||
let codex = test_codex.codex;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text { text: "hi".into() }],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let first_warning = wait_for_event(&codex, |event| matches!(event, EventMsg::Warning(_))).await;
|
||||
let EventMsg::Warning(WarningEvent { message }) = first_warning else {
|
||||
panic!("expected warning event");
|
||||
};
|
||||
assert_eq!(
|
||||
message,
|
||||
"Codex is experiencing a major outage. If a response stalls, try again later. You can follow incident updates at status.openai.com.",
|
||||
);
|
||||
wait_for_event(&codex, |event| matches!(event, EventMsg::TaskComplete(_))).await;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "second".into(),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut saw_warning = false;
|
||||
let mut saw_complete = false;
|
||||
while !(saw_warning && saw_complete) {
|
||||
let event = codex.next_event().await.expect("event");
|
||||
match event.msg {
|
||||
EventMsg::Warning(WarningEvent { message }) => {
|
||||
assert_eq!(
|
||||
message,
|
||||
"Codex is experiencing a partial outage. If a response stalls, try again later. You can follow incident updates at status.openai.com.",
|
||||
);
|
||||
saw_warning = true;
|
||||
}
|
||||
EventMsg::TaskComplete(_) => {
|
||||
saw_complete = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn status_payload(status: &str) -> ResponseTemplate {
|
||||
ResponseTemplate::new(200)
|
||||
.insert_header("content-type", "application/json")
|
||||
.set_body_json(serde_json::json!({
|
||||
"summary": {
|
||||
"components": [
|
||||
{"id": "cmp-1", "name": "Codex", "status_page_id": "page-1"}
|
||||
],
|
||||
"affected_components": [
|
||||
{"component_id": "cmp-1", "status": status}
|
||||
]
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct SequenceResponder {
|
||||
statuses: Vec<&'static str>,
|
||||
calls: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
impl SequenceResponder {
|
||||
fn new(statuses: Vec<&'static str>) -> Self {
|
||||
Self {
|
||||
statuses,
|
||||
calls: Arc::new(AtomicUsize::new(0)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Respond for SequenceResponder {
|
||||
fn respond(&self, _request: &Request) -> ResponseTemplate {
|
||||
let call = self.calls.fetch_add(1, Ordering::SeqCst);
|
||||
let idx = usize::try_from(call).unwrap_or(0);
|
||||
let status = self
|
||||
.statuses
|
||||
.get(idx)
|
||||
.copied()
|
||||
.or_else(|| self.statuses.last().copied())
|
||||
.unwrap_or("operational");
|
||||
status_payload(status)
|
||||
}
|
||||
}
|
||||
@@ -31,6 +31,7 @@ mod exec;
|
||||
mod exec_policy;
|
||||
mod fork_conversation;
|
||||
mod grep_files;
|
||||
mod idle_warning;
|
||||
mod items;
|
||||
mod json_result;
|
||||
mod list_dir;
|
||||
|
||||
@@ -18,7 +18,6 @@ use codex_core::protocol::ReviewOutputEvent;
|
||||
use codex_core::protocol::ReviewRequest;
|
||||
use codex_core::protocol::RolloutItem;
|
||||
use codex_core::protocol::RolloutLine;
|
||||
use codex_core::review_format::render_review_output_text;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id_from_str;
|
||||
@@ -83,6 +82,7 @@ async fn review_op_emits_lifecycle_and_review_output() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Please review my changes".to_string(),
|
||||
user_facing_hint: "my changes".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -124,36 +124,22 @@ async fn review_op_emits_lifecycle_and_review_output() {
|
||||
|
||||
let mut saw_header = false;
|
||||
let mut saw_finding_line = false;
|
||||
let expected_assistant_text = render_review_output_text(&expected);
|
||||
let mut saw_assistant_plain = false;
|
||||
let mut saw_assistant_xml = false;
|
||||
for line in text.lines() {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
let v: serde_json::Value = serde_json::from_str(line).expect("jsonl line");
|
||||
let rl: RolloutLine = serde_json::from_value(v).expect("rollout line");
|
||||
if let RolloutItem::ResponseItem(ResponseItem::Message { role, content, .. }) = rl.item {
|
||||
if role == "user" {
|
||||
for c in content {
|
||||
if let ContentItem::InputText { text } = c {
|
||||
if text.contains("full review output from reviewer model") {
|
||||
saw_header = true;
|
||||
}
|
||||
if text.contains("- Prefer Stylize helpers — /tmp/file.rs:10-20") {
|
||||
saw_finding_line = true;
|
||||
}
|
||||
if let RolloutItem::ResponseItem(ResponseItem::Message { role, content, .. }) = rl.item
|
||||
&& role == "user"
|
||||
{
|
||||
for c in content {
|
||||
if let ContentItem::InputText { text } = c {
|
||||
if text.contains("full review output from reviewer model") {
|
||||
saw_header = true;
|
||||
}
|
||||
}
|
||||
} else if role == "assistant" {
|
||||
for c in content {
|
||||
if let ContentItem::OutputText { text } = c {
|
||||
if text.contains("<user_action>") {
|
||||
saw_assistant_xml = true;
|
||||
}
|
||||
if text == expected_assistant_text {
|
||||
saw_assistant_plain = true;
|
||||
}
|
||||
if text.contains("- Prefer Stylize helpers — /tmp/file.rs:10-20") {
|
||||
saw_finding_line = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -164,14 +150,6 @@ async fn review_op_emits_lifecycle_and_review_output() {
|
||||
saw_finding_line,
|
||||
"formatted finding line missing from rollout"
|
||||
);
|
||||
assert!(
|
||||
saw_assistant_plain,
|
||||
"assistant review output missing from rollout"
|
||||
);
|
||||
assert!(
|
||||
!saw_assistant_xml,
|
||||
"assistant review output contains user_action markup"
|
||||
);
|
||||
|
||||
server.verify().await;
|
||||
}
|
||||
@@ -201,6 +179,7 @@ async fn review_op_with_plain_text_emits_review_fallback() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Plain text review".to_string(),
|
||||
user_facing_hint: "plain text review".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -259,6 +238,7 @@ async fn review_filters_agent_message_related_events() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Filter streaming events".to_string(),
|
||||
user_facing_hint: "Filter streaming events".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -267,7 +247,7 @@ async fn review_filters_agent_message_related_events() {
|
||||
let mut saw_entered = false;
|
||||
let mut saw_exited = false;
|
||||
|
||||
// Drain until TaskComplete; assert streaming-related events never surface.
|
||||
// Drain until TaskComplete; assert filtered events never surface.
|
||||
wait_for_event(&codex, |event| match event {
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::EnteredReviewMode(_) => {
|
||||
@@ -285,6 +265,12 @@ async fn review_filters_agent_message_related_events() {
|
||||
EventMsg::AgentMessageDelta(_) => {
|
||||
panic!("unexpected AgentMessageDelta surfaced during review")
|
||||
}
|
||||
EventMsg::ItemCompleted(ev) => match &ev.item {
|
||||
codex_protocol::items::TurnItem::AgentMessage(_) => {
|
||||
panic!("unexpected ItemCompleted for TurnItem::AgentMessage surfaced during review")
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
@@ -293,9 +279,8 @@ async fn review_filters_agent_message_related_events() {
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
/// When the model returns structured JSON in a review, ensure only a single
|
||||
/// non-streaming AgentMessage is emitted; the UI consumes the structured
|
||||
/// result via ExitedReviewMode plus a final assistant message.
|
||||
/// When the model returns structured JSON in a review, ensure no AgentMessage
|
||||
/// is emitted; the UI consumes the structured result via ExitedReviewMode.
|
||||
// Windows CI only: bump to 4 workers to prevent SSE/event starvation and test timeouts.
|
||||
#[cfg_attr(windows, tokio::test(flavor = "multi_thread", worker_threads = 4))]
|
||||
#[cfg_attr(not(windows), tokio::test(flavor = "multi_thread", worker_threads = 2))]
|
||||
@@ -338,21 +323,19 @@ async fn review_does_not_emit_agent_message_on_structured_output() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "check structured".to_string(),
|
||||
user_facing_hint: "check structured".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Drain events until TaskComplete; ensure we only see a final
|
||||
// AgentMessage (no streaming assistant messages).
|
||||
// Drain events until TaskComplete; ensure none are AgentMessage.
|
||||
let mut saw_entered = false;
|
||||
let mut saw_exited = false;
|
||||
let mut agent_messages = 0;
|
||||
wait_for_event(&codex, |event| match event {
|
||||
EventMsg::TaskComplete(_) => true,
|
||||
EventMsg::AgentMessage(_) => {
|
||||
agent_messages += 1;
|
||||
false
|
||||
panic!("unexpected AgentMessage during review with structured output")
|
||||
}
|
||||
EventMsg::EnteredReviewMode(_) => {
|
||||
saw_entered = true;
|
||||
@@ -365,7 +348,6 @@ async fn review_does_not_emit_agent_message_on_structured_output() {
|
||||
_ => false,
|
||||
})
|
||||
.await;
|
||||
assert_eq!(1, agent_messages, "expected exactly one AgentMessage event");
|
||||
assert!(saw_entered && saw_exited, "missing review lifecycle events");
|
||||
|
||||
server.verify().await;
|
||||
@@ -395,6 +377,7 @@ async fn review_uses_custom_review_model_from_config() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "use custom model".to_string(),
|
||||
user_facing_hint: "use custom model".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -512,6 +495,7 @@ async fn review_input_isolated_from_parent_history() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: review_prompt.clone(),
|
||||
user_facing_hint: review_prompt.clone(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -599,10 +583,11 @@ async fn review_input_isolated_from_parent_history() {
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
/// After a review thread finishes, its conversation should be visible in the
|
||||
/// parent session so later turns can reference the results.
|
||||
/// After a review thread finishes, its conversation should not leak into the
|
||||
/// parent session. A subsequent parent turn must not include any review
|
||||
/// messages in its request `input`.
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn review_history_surfaces_in_parent_session() {
|
||||
async fn review_history_does_not_leak_into_parent_session() {
|
||||
skip_if_no_network!();
|
||||
|
||||
// Respond to both the review request and the subsequent parent request.
|
||||
@@ -623,6 +608,7 @@ async fn review_history_surfaces_in_parent_session() {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Start a review".to_string(),
|
||||
user_facing_hint: "Start a review".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
})
|
||||
.await
|
||||
@@ -665,26 +651,20 @@ async fn review_history_surfaces_in_parent_session() {
|
||||
let last_text = last["content"][0]["text"].as_str().unwrap();
|
||||
assert_eq!(last_text, followup);
|
||||
|
||||
// Ensure review-thread content is present for downstream turns.
|
||||
let contains_review_rollout_user = input.iter().any(|msg| {
|
||||
msg["content"][0]["text"]
|
||||
.as_str()
|
||||
.unwrap_or_default()
|
||||
.contains("User initiated a review task.")
|
||||
});
|
||||
// Ensure no review-thread content leaked into the parent request
|
||||
let contains_review_prompt = input
|
||||
.iter()
|
||||
.any(|msg| msg["content"][0]["text"].as_str().unwrap_or_default() == "Start a review");
|
||||
let contains_review_assistant = input.iter().any(|msg| {
|
||||
msg["content"][0]["text"]
|
||||
.as_str()
|
||||
.unwrap_or_default()
|
||||
.contains("review assistant output")
|
||||
msg["content"][0]["text"].as_str().unwrap_or_default() == "review assistant output"
|
||||
});
|
||||
assert!(
|
||||
contains_review_rollout_user,
|
||||
"review rollout user message missing from parent turn input"
|
||||
!contains_review_prompt,
|
||||
"review prompt leaked into parent turn input"
|
||||
);
|
||||
assert!(
|
||||
contains_review_assistant,
|
||||
"review assistant output missing from parent turn input"
|
||||
!contains_review_assistant,
|
||||
"review assistant output leaked into parent turn input"
|
||||
);
|
||||
|
||||
server.verify().await;
|
||||
|
||||
@@ -24,7 +24,6 @@ anyhow = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
path-absolutize = { workspace = true }
|
||||
rmcp = { workspace = true, default-features = false, features = [
|
||||
@@ -52,7 +51,6 @@ tokio = { workspace = true, features = [
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
|
||||
|
||||
@@ -55,21 +55,19 @@
|
||||
//! | |
|
||||
//! o<-----x
|
||||
//!
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Parser;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use tracing_subscriber::{self};
|
||||
|
||||
use crate::posix::exec_policy::ExecPolicyEvaluator;
|
||||
use crate::posix::exec_policy::load_policy_from_codex_home;
|
||||
use crate::posix::mcp_escalation_policy::SharedExecPolicy;
|
||||
use crate::posix::mcp_escalation_policy::ExecPolicyOutcome;
|
||||
|
||||
mod escalate_client;
|
||||
mod escalate_protocol;
|
||||
mod escalate_server;
|
||||
mod escalation_policy;
|
||||
mod exec_policy;
|
||||
mod mcp;
|
||||
mod mcp_escalation_policy;
|
||||
mod socket;
|
||||
@@ -116,10 +114,7 @@ pub async fn main_mcp_server() -> anyhow::Result<()> {
|
||||
};
|
||||
|
||||
tracing::info!("Starting MCP server");
|
||||
let policy = load_policy_from_codex_home().await?;
|
||||
let policy: SharedExecPolicy = std::sync::Arc::new(ExecPolicyEvaluator::new(policy));
|
||||
|
||||
let service = mcp::serve(bash_path, execve_wrapper, policy)
|
||||
let service = mcp::serve(bash_path, execve_wrapper, dummy_exec_policy)
|
||||
.await
|
||||
.inspect_err(|e| {
|
||||
tracing::error!("serving error: {:?}", e);
|
||||
@@ -149,3 +144,27 @@ pub async fn main_execve_wrapper() -> anyhow::Result<()> {
|
||||
let exit_code = escalate_client::run(file, argv).await?;
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
|
||||
// TODO: replace with execpolicy
|
||||
|
||||
fn dummy_exec_policy(file: &Path, argv: &[String], _workdir: &Path) -> ExecPolicyOutcome {
|
||||
if file.ends_with("rm") {
|
||||
ExecPolicyOutcome::Forbidden
|
||||
} else if file.ends_with("git") {
|
||||
ExecPolicyOutcome::Prompt {
|
||||
run_with_escalated_permissions: false,
|
||||
}
|
||||
} else if file == Path::new("/opt/homebrew/bin/gh")
|
||||
&& let [_, arg1, arg2, ..] = argv
|
||||
&& arg1 == "issue"
|
||||
&& arg2 == "list"
|
||||
{
|
||||
ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions: true,
|
||||
}
|
||||
} else {
|
||||
ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,266 +0,0 @@
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex_core::command_safety::is_dangerous_command::command_might_be_dangerous;
|
||||
use codex_execpolicy::Decision;
|
||||
use codex_execpolicy::Evaluation;
|
||||
use codex_execpolicy::Policy;
|
||||
use codex_execpolicy::PolicyParser;
|
||||
use thiserror::Error;
|
||||
use tokio::fs;
|
||||
|
||||
use crate::posix::mcp_escalation_policy::ExecPolicy;
|
||||
use crate::posix::mcp_escalation_policy::ExecPolicyOutcome;
|
||||
|
||||
const POLICY_DIR_NAME: &str = "policy";
|
||||
const POLICY_EXTENSION: &str = "codexpolicy";
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub(crate) enum ExecPolicyError {
|
||||
#[error("failed to resolve CODEX_HOME: {source}")]
|
||||
CodexHome { source: std::io::Error },
|
||||
|
||||
#[error("failed to read execpolicy files from {dir}: {source}")]
|
||||
ReadDir {
|
||||
dir: PathBuf,
|
||||
source: std::io::Error,
|
||||
},
|
||||
|
||||
#[error("failed to read execpolicy file {path}: {source}")]
|
||||
ReadFile {
|
||||
path: PathBuf,
|
||||
source: std::io::Error,
|
||||
},
|
||||
|
||||
#[error("failed to parse execpolicy file {path}: {source}")]
|
||||
ParsePolicy {
|
||||
path: String,
|
||||
source: codex_execpolicy::Error,
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) async fn load_policy_from_codex_home() -> Result<Arc<Policy>, ExecPolicyError> {
|
||||
let codex_home = codex_core::config::find_codex_home()
|
||||
.map_err(|source| ExecPolicyError::CodexHome { source })?;
|
||||
|
||||
let policy_dir = codex_home.join(POLICY_DIR_NAME);
|
||||
let policy_paths = collect_policy_files(&policy_dir).await?;
|
||||
|
||||
let mut parser = PolicyParser::new();
|
||||
for policy_path in &policy_paths {
|
||||
let contents =
|
||||
fs::read_to_string(policy_path)
|
||||
.await
|
||||
.map_err(|source| ExecPolicyError::ReadFile {
|
||||
path: policy_path.clone(),
|
||||
source,
|
||||
})?;
|
||||
let identifier = policy_path.to_string_lossy().to_string();
|
||||
parser
|
||||
.parse(&identifier, &contents)
|
||||
.map_err(|source| ExecPolicyError::ParsePolicy {
|
||||
path: identifier,
|
||||
source,
|
||||
})?;
|
||||
}
|
||||
|
||||
let policy = Arc::new(parser.build());
|
||||
tracing::debug!(
|
||||
"loaded execpolicy from {} files in {}",
|
||||
policy_paths.len(),
|
||||
policy_dir.display()
|
||||
);
|
||||
Ok(policy)
|
||||
}
|
||||
|
||||
pub(crate) struct ExecPolicyEvaluator {
|
||||
policy: Arc<Policy>,
|
||||
}
|
||||
|
||||
impl ExecPolicyEvaluator {
|
||||
pub(crate) fn new(policy: Arc<Policy>) -> Self {
|
||||
Self { policy }
|
||||
}
|
||||
|
||||
fn command_for(file: &Path, argv: &[String]) -> Vec<String> {
|
||||
let cmd0 = argv
|
||||
.first()
|
||||
.and_then(|s| Path::new(s).file_name())
|
||||
.and_then(|s| s.to_str())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(std::string::ToString::to_string)
|
||||
.or_else(|| {
|
||||
file.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(std::string::ToString::to_string)
|
||||
})
|
||||
.unwrap_or_else(|| file.display().to_string());
|
||||
|
||||
let mut command = Vec::with_capacity(argv.len().max(1));
|
||||
command.push(cmd0);
|
||||
if argv.len() > 1 {
|
||||
command.extend(argv.iter().skip(1).cloned());
|
||||
}
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
impl ExecPolicy for ExecPolicyEvaluator {
|
||||
fn evaluate(&self, file: &Path, argv: &[String], _workdir: &Path) -> ExecPolicyOutcome {
|
||||
let command = Self::command_for(file, argv);
|
||||
|
||||
match self.policy.check_multiple(std::iter::once(&command)) {
|
||||
Evaluation::Match { decision, .. } => match decision {
|
||||
Decision::Forbidden => ExecPolicyOutcome::Forbidden,
|
||||
Decision::Prompt => ExecPolicyOutcome::Prompt {
|
||||
run_with_escalated_permissions: true,
|
||||
},
|
||||
Decision::Allow => ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions: true,
|
||||
},
|
||||
},
|
||||
Evaluation::NoMatch { .. } => {
|
||||
if command_might_be_dangerous(&command) {
|
||||
ExecPolicyOutcome::Prompt {
|
||||
run_with_escalated_permissions: false,
|
||||
}
|
||||
} else {
|
||||
ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn collect_policy_files(dir: &Path) -> Result<Vec<PathBuf>, ExecPolicyError> {
|
||||
let mut read_dir = match fs::read_dir(dir).await {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => return Ok(Vec::new()),
|
||||
Err(source) => {
|
||||
return Err(ExecPolicyError::ReadDir {
|
||||
dir: dir.to_path_buf(),
|
||||
source,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
let mut policy_paths = Vec::new();
|
||||
while let Some(entry) =
|
||||
read_dir
|
||||
.next_entry()
|
||||
.await
|
||||
.map_err(|source| ExecPolicyError::ReadDir {
|
||||
dir: dir.to_path_buf(),
|
||||
source,
|
||||
})?
|
||||
{
|
||||
let path = entry.path();
|
||||
let file_type = entry
|
||||
.file_type()
|
||||
.await
|
||||
.map_err(|source| ExecPolicyError::ReadDir {
|
||||
dir: dir.to_path_buf(),
|
||||
source,
|
||||
})?;
|
||||
|
||||
if path
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
.is_some_and(|ext| ext == POLICY_EXTENSION)
|
||||
&& file_type.is_file()
|
||||
{
|
||||
policy_paths.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
policy_paths.sort();
|
||||
Ok(policy_paths)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
struct EnvVarGuard {
|
||||
key: &'static str,
|
||||
original: Option<std::ffi::OsString>,
|
||||
}
|
||||
|
||||
impl EnvVarGuard {
|
||||
fn set(key: &'static str, value: &std::ffi::OsStr) -> Self {
|
||||
let original = std::env::var_os(key);
|
||||
unsafe {
|
||||
std::env::set_var(key, value);
|
||||
}
|
||||
Self { key, original }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for EnvVarGuard {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
match &self.original {
|
||||
Some(value) => std::env::set_var(self.key, value),
|
||||
None => std::env::remove_var(self.key),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_policy_bypasses_sandbox() {
|
||||
let mut parser = PolicyParser::new();
|
||||
parser
|
||||
.parse(
|
||||
"test.codexpolicy",
|
||||
r#"prefix_rule(pattern=["echo"], decision="allow")"#,
|
||||
)
|
||||
.expect("parse policy");
|
||||
let evaluator = ExecPolicyEvaluator::new(Arc::new(parser.build()));
|
||||
|
||||
let outcome = evaluator.evaluate(
|
||||
Path::new("/bin/echo"),
|
||||
&["echo".to_string()],
|
||||
Path::new("/"),
|
||||
);
|
||||
assert!(matches!(
|
||||
outcome,
|
||||
ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions: true
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_match_dangerous_command_prompts() {
|
||||
let evaluator = ExecPolicyEvaluator::new(Arc::new(Policy::empty()));
|
||||
let outcome = evaluator.evaluate(
|
||||
Path::new("/bin/rm"),
|
||||
&["rm".to_string(), "-rf".to_string(), "/".to_string()],
|
||||
Path::new("/"),
|
||||
);
|
||||
assert!(matches!(
|
||||
outcome,
|
||||
ExecPolicyOutcome::Prompt {
|
||||
run_with_escalated_permissions: false
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn missing_policy_dir_loads_empty() {
|
||||
let dir = tempdir().expect("tempdir");
|
||||
let _guard = EnvVarGuard::set("CODEX_HOME", dir.path().as_os_str());
|
||||
let policy = load_policy_from_codex_home().await.expect("load policy");
|
||||
assert!(matches!(
|
||||
policy.check_multiple(std::iter::once(&vec!["rm".to_string()])),
|
||||
Evaluation::NoMatch { .. }
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -27,8 +27,8 @@ use tracing::debug;
|
||||
|
||||
use crate::posix::escalate_server::EscalateServer;
|
||||
use crate::posix::escalate_server::{self};
|
||||
use crate::posix::mcp_escalation_policy::ExecPolicy;
|
||||
use crate::posix::mcp_escalation_policy::McpEscalationPolicy;
|
||||
use crate::posix::mcp_escalation_policy::SharedExecPolicy;
|
||||
use crate::posix::stopwatch::Stopwatch;
|
||||
|
||||
/// Path to our patched bash.
|
||||
@@ -78,13 +78,13 @@ pub struct ExecTool {
|
||||
tool_router: ToolRouter<ExecTool>,
|
||||
bash_path: PathBuf,
|
||||
execve_wrapper: PathBuf,
|
||||
policy: SharedExecPolicy,
|
||||
policy: ExecPolicy,
|
||||
sandbox_state: Arc<RwLock<Option<SandboxState>>>,
|
||||
}
|
||||
|
||||
#[tool_router]
|
||||
impl ExecTool {
|
||||
pub fn new(bash_path: PathBuf, execve_wrapper: PathBuf, policy: SharedExecPolicy) -> Self {
|
||||
pub fn new(bash_path: PathBuf, execve_wrapper: PathBuf, policy: ExecPolicy) -> Self {
|
||||
Self {
|
||||
tool_router: Self::tool_router(),
|
||||
bash_path,
|
||||
@@ -121,7 +121,7 @@ impl ExecTool {
|
||||
let escalate_server = EscalateServer::new(
|
||||
self.bash_path.clone(),
|
||||
self.execve_wrapper.clone(),
|
||||
McpEscalationPolicy::new(self.policy.clone(), context, stopwatch.clone()),
|
||||
McpEscalationPolicy::new(self.policy, context, stopwatch.clone()),
|
||||
);
|
||||
|
||||
let result = escalate_server
|
||||
@@ -198,7 +198,7 @@ impl ServerHandler for ExecTool {
|
||||
pub(crate) async fn serve(
|
||||
bash_path: PathBuf,
|
||||
execve_wrapper: PathBuf,
|
||||
policy: SharedExecPolicy,
|
||||
policy: ExecPolicy,
|
||||
) -> Result<RunningService<RoleServer, ExecTool>, rmcp::service::ServerInitializeError> {
|
||||
let tool = ExecTool::new(bash_path, execve_wrapper, policy);
|
||||
tool.serve(stdio()).await
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rmcp::ErrorData as McpError;
|
||||
use rmcp::RoleServer;
|
||||
@@ -19,11 +18,7 @@ use crate::posix::stopwatch::Stopwatch;
|
||||
/// `argv` is the argv, including the program name (`argv[0]`).
|
||||
/// `workdir` is the absolute, canonical path to the working directory in which to execute the
|
||||
/// command.
|
||||
pub(crate) trait ExecPolicy: Send + Sync {
|
||||
fn evaluate(&self, file: &Path, argv: &[String], workdir: &Path) -> ExecPolicyOutcome;
|
||||
}
|
||||
|
||||
pub(crate) type SharedExecPolicy = Arc<dyn ExecPolicy>;
|
||||
pub(crate) type ExecPolicy = fn(file: &Path, argv: &[String], workdir: &Path) -> ExecPolicyOutcome;
|
||||
|
||||
pub(crate) enum ExecPolicyOutcome {
|
||||
Allow {
|
||||
@@ -38,14 +33,14 @@ pub(crate) enum ExecPolicyOutcome {
|
||||
/// ExecPolicy with access to the MCP RequestContext so that it can leverage
|
||||
/// elicitations.
|
||||
pub(crate) struct McpEscalationPolicy {
|
||||
policy: SharedExecPolicy,
|
||||
policy: ExecPolicy,
|
||||
context: RequestContext<RoleServer>,
|
||||
stopwatch: Stopwatch,
|
||||
}
|
||||
|
||||
impl McpEscalationPolicy {
|
||||
pub(crate) fn new(
|
||||
policy: SharedExecPolicy,
|
||||
policy: ExecPolicy,
|
||||
context: RequestContext<RoleServer>,
|
||||
stopwatch: Stopwatch,
|
||||
) -> Self {
|
||||
@@ -108,7 +103,7 @@ impl EscalationPolicy for McpEscalationPolicy {
|
||||
argv: &[String],
|
||||
workdir: &Path,
|
||||
) -> Result<EscalateAction, rmcp::ErrorData> {
|
||||
let outcome = self.policy.evaluate(file, argv, workdir);
|
||||
let outcome = (self.policy)(file, argv, workdir);
|
||||
let action = match outcome {
|
||||
ExecPolicyOutcome::Allow {
|
||||
run_with_escalated_permissions,
|
||||
|
||||
@@ -14,7 +14,7 @@ codex-core = { path = "../core" }
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["rt"] }
|
||||
tracing = { version = "0.1.43", features = ["log"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] }
|
||||
which = "6.0"
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -1256,18 +1256,13 @@ pub struct GitInfo {
|
||||
pub repository_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ReviewDelivery {
|
||||
Inline,
|
||||
Detached,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema, TS)]
|
||||
/// Review request sent to the review session.
|
||||
pub struct ReviewRequest {
|
||||
pub prompt: String,
|
||||
pub user_facing_hint: String,
|
||||
#[serde(default)]
|
||||
pub append_to_original_thread: bool,
|
||||
}
|
||||
|
||||
/// Structured review result produced by a child review session.
|
||||
|
||||
@@ -2895,6 +2895,7 @@ impl ChatWidget {
|
||||
review_request: ReviewRequest {
|
||||
prompt: "Review the current code changes (staged, unstaged, and untracked files) and provide prioritized findings.".to_string(),
|
||||
user_facing_hint: "current changes".to_string(),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
}));
|
||||
},
|
||||
@@ -2951,6 +2952,7 @@ impl ChatWidget {
|
||||
"Review the code changes against the base branch '{branch}'. Start by finding the merge diff between the current branch and {branch}'s upstream e.g. (`git merge-base HEAD \"$(git rev-parse --abbrev-ref \"{branch}@{{upstream}}\")\"`), then run `git diff` against that SHA to see what changes we would merge into the {branch} branch. Provide prioritized, actionable findings."
|
||||
),
|
||||
user_facing_hint: format!("changes against '{branch}'"),
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
}));
|
||||
})],
|
||||
@@ -2991,6 +2993,7 @@ impl ChatWidget {
|
||||
review_request: ReviewRequest {
|
||||
prompt,
|
||||
user_facing_hint: hint,
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
}));
|
||||
})],
|
||||
@@ -3025,6 +3028,7 @@ impl ChatWidget {
|
||||
review_request: ReviewRequest {
|
||||
prompt: trimmed.clone(),
|
||||
user_facing_hint: trimmed,
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
}));
|
||||
}),
|
||||
@@ -3240,6 +3244,7 @@ pub(crate) fn show_review_commit_picker_with_entries(
|
||||
review_request: ReviewRequest {
|
||||
prompt,
|
||||
user_facing_hint: hint,
|
||||
append_to_original_thread: true,
|
||||
},
|
||||
}));
|
||||
})],
|
||||
|
||||
@@ -155,6 +155,7 @@ fn entered_review_mode_uses_request_hint() {
|
||||
msg: EventMsg::EnteredReviewMode(ReviewRequest {
|
||||
prompt: "Review the latest changes".to_string(),
|
||||
user_facing_hint: "feature branch".to_string(),
|
||||
append_to_original_thread: true,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -174,6 +175,7 @@ fn entered_review_mode_defaults_to_current_changes_banner() {
|
||||
msg: EventMsg::EnteredReviewMode(ReviewRequest {
|
||||
prompt: "Review the current changes".to_string(),
|
||||
user_facing_hint: "current changes".to_string(),
|
||||
append_to_original_thread: true,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -241,6 +243,7 @@ fn review_restores_context_window_indicator() {
|
||||
msg: EventMsg::EnteredReviewMode(ReviewRequest {
|
||||
prompt: "Review the latest changes".to_string(),
|
||||
user_facing_hint: "feature branch".to_string(),
|
||||
append_to_original_thread: true,
|
||||
}),
|
||||
});
|
||||
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
|
||||
If you already lean on Codex every day and just need a little more control, this page collects the knobs you are most likely to reach for: tweak defaults in [Config](./config.md), add extra tools through [Model Context Protocol support](#model-context-protocol), and script full runs with [`codex exec`](./exec.md). Jump to the section you need and keep building.
|
||||
|
||||
## Config quickstart
|
||||
## Config quickstart {#config-quickstart}
|
||||
|
||||
Most day-to-day tuning lives in `config.toml`: set approval + sandbox presets, pin model defaults, and add MCP server launchers. The [Config guide](./config.md) walks through every option and provides copy-paste examples for common setups.
|
||||
|
||||
## Tracing / verbose logging
|
||||
## Tracing / verbose logging {#tracing-verbose-logging}
|
||||
|
||||
Because Codex is written in Rust, it honors the `RUST_LOG` environment variable to configure its logging behavior.
|
||||
|
||||
@@ -20,15 +20,15 @@ By comparison, the non-interactive mode (`codex exec`) defaults to `RUST_LOG=err
|
||||
|
||||
See the Rust documentation on [`RUST_LOG`](https://docs.rs/env_logger/latest/env_logger/#enabling-logging) for more information on the configuration options.
|
||||
|
||||
## Model Context Protocol (MCP)
|
||||
## Model Context Protocol (MCP) {#model-context-protocol}
|
||||
|
||||
The Codex CLI and IDE extension is a MCP client which means that it can be configured to connect to MCP servers. For more information, refer to the [`config docs`](./config.md#mcp-integration).
|
||||
|
||||
## Using Codex as an MCP Server
|
||||
## Using Codex as an MCP Server {#mcp-server}
|
||||
|
||||
The Codex CLI can also be run as an MCP _server_ via `codex mcp-server`. For example, you can use `codex mcp-server` to make Codex available as a tool inside of a multi-agent framework like the OpenAI [Agents SDK](https://platform.openai.com/docs/guides/agents). Use `codex mcp` separately to add/list/get/remove MCP server launchers in your configuration.
|
||||
|
||||
### Codex MCP Server Quickstart
|
||||
### Codex MCP Server Quickstart {#mcp-server-quickstart}
|
||||
|
||||
You can launch a Codex MCP server with the [Model Context Protocol Inspector](https://modelcontextprotocol.io/legacy/tools/inspector):
|
||||
|
||||
@@ -58,7 +58,7 @@ Send a `tools/list` request and you will see that there are two tools available:
|
||||
| **`prompt`** (required) | string | The next user prompt to continue the Codex conversation. |
|
||||
| **`conversationId`** (required) | string | The id of the conversation to continue. |
|
||||
|
||||
### Trying it Out
|
||||
### Trying it Out {#mcp-server-trying-it-out}
|
||||
|
||||
> [!TIP]
|
||||
> Codex often takes a few minutes to run. To accommodate this, adjust the MCP inspector's Request and Total timeouts to 600000ms (10 minutes) under ⛭ Configuration.
|
||||
|
||||
@@ -180,9 +180,6 @@ chatgpt_base_url = "https://chatgpt.com/backend-api/"
|
||||
# Allowed values: chatgpt | api
|
||||
# forced_login_method = "chatgpt"
|
||||
|
||||
# Preferred store for MCP OAuth credentials: auto (default) | file | keyring
|
||||
mcp_oauth_credentials_store = "auto"
|
||||
|
||||
################################################################################
|
||||
# Project Documentation Controls
|
||||
################################################################################
|
||||
@@ -235,6 +232,16 @@ experimental_use_rmcp_client = false
|
||||
# Include apply_patch via freeform editing path (affects default tool set). Default: false
|
||||
experimental_use_freeform_apply_patch = false
|
||||
|
||||
# Enable model-based sandbox command assessment. Default: false
|
||||
experimental_sandbox_command_assessment = false
|
||||
|
||||
################################################################################
|
||||
# MCP (Model Context Protocol) servers
|
||||
################################################################################
|
||||
|
||||
# Preferred store for MCP OAuth credentials: auto (default) | file | keyring
|
||||
mcp_oauth_credentials_store = "auto"
|
||||
|
||||
# Define MCP servers under this table. Leave empty to disable.
|
||||
[mcp_servers]
|
||||
|
||||
|
||||
@@ -113,7 +113,3 @@ Paste images directly into the composer (Ctrl+V / Cmd+V) to attach them to your
|
||||
codex -i screenshot.png "Explain this error"
|
||||
codex --image img1.png,img2.jpg "Summarize these diagrams"
|
||||
```
|
||||
|
||||
#### Environment variables and executables
|
||||
|
||||
Make sure your environment is already set up before launching Codex so it does not spend tokens probing what to activate. For example, source your Python virtualenv (or other language runtimes), start any required daemons, and export the env vars you expect to use ahead of time.
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
## Platform sandboxing
|
||||
|
||||
This content now lives alongside the rest of the sandbox guidance. See [Sandbox mechanics by platform](./sandbox.md#sandbox-mechanics-by-platform) for up-to-date details.
|
||||
This content now lives alongside the rest of the sandbox guidance. See [Sandbox mechanics by platform](./sandbox.md#platform-sandboxing-details) for up-to-date details.
|
||||
|
||||
Reference in New Issue
Block a user