mirror of
https://github.com/openai/codex.git
synced 2026-02-03 23:43:39 +00:00
Compare commits
3 Commits
rust-v0.92
...
remove/doc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
200f07f1ec | ||
|
|
737654923f | ||
|
|
a9a56081d0 |
@@ -11,7 +11,6 @@ In the codex-rs folder where the rust code lives:
|
||||
- Always collapse if statements per https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_if
|
||||
- Always inline format! args when possible per https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args
|
||||
- Use method references over closures when possible per https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure_for_method_calls
|
||||
- When possible, make `match` statements exhaustive and avoid wildcard arms.
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
|
||||
56
codex-rs/Cargo.lock
generated
56
codex-rs/Cargo.lock
generated
@@ -361,7 +361,7 @@ dependencies = [
|
||||
"objc2-foundation",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
"wl-clipboard-rs",
|
||||
"x11rb",
|
||||
]
|
||||
@@ -616,9 +616,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.8"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
|
||||
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"bytes",
|
||||
@@ -634,7 +634,8 @@ dependencies = [
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"serde_core",
|
||||
"rustversion",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"sync_wrapper",
|
||||
@@ -646,9 +647,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.5.6"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
|
||||
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
@@ -657,6 +658,7 @@ dependencies = [
|
||||
"http-body-util",
|
||||
"mime",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"sync_wrapper",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
@@ -2904,7 +2906,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3004,7 +3006,7 @@ checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3309,7 +3311,7 @@ dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"rustversion",
|
||||
"windows-link 0.1.3",
|
||||
"windows-link 0.2.0",
|
||||
"windows-result 0.3.4",
|
||||
]
|
||||
|
||||
@@ -3383,9 +3385,9 @@ checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
|
||||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.18"
|
||||
version = "0.4.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3"
|
||||
checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
@@ -3714,7 +3716,7 @@ dependencies = [
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2 0.6.1",
|
||||
"socket2 0.5.10",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
@@ -4091,7 +4093,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5630,7 +5632,7 @@ dependencies = [
|
||||
"quinn-udp",
|
||||
"rustc-hash",
|
||||
"rustls",
|
||||
"socket2 0.6.1",
|
||||
"socket2 0.5.10",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tracing",
|
||||
@@ -5667,9 +5669,9 @@ dependencies = [
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"socket2 0.6.1",
|
||||
"socket2 0.5.10",
|
||||
"tracing",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6364,7 +6366,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6377,7 +6379,7 @@ dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.4",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7464,7 +7466,7 @@ dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.61.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7795,10 +7797,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio-test"
|
||||
version = "0.4.5"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f6d24790a10a7af737693a3e8f1d03faef7e6ca0cc99aae5066f533766de545"
|
||||
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
|
||||
dependencies = [
|
||||
"async-stream",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
@@ -7997,9 +8001,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.44"
|
||||
version = "0.1.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
|
||||
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
@@ -8032,9 +8036,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.36"
|
||||
version = "0.1.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
|
||||
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
@@ -8746,7 +8750,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -51,7 +51,7 @@ members = [
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.92.0"
|
||||
version = "0.0.0"
|
||||
# Track the edition for all workspace crates in one place. Individual
|
||||
# crates can still override this value, but keeping it here means new
|
||||
# crates created with `cargo new -w ...` automatically inherit the 2024
|
||||
@@ -216,7 +216,7 @@ tokio-tungstenite = { version = "0.28.0", features = ["proxy", "rustls-tls-nativ
|
||||
tokio-util = "0.7.18"
|
||||
toml = "0.9.5"
|
||||
toml_edit = "0.24.0"
|
||||
tracing = "0.1.44"
|
||||
tracing = "0.1.43"
|
||||
tracing-appender = "0.2.3"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-test = "0.2.5"
|
||||
|
||||
@@ -15,8 +15,8 @@ You can also install via Homebrew (`brew install --cask codex`) or download a pl
|
||||
|
||||
## Documentation quickstart
|
||||
|
||||
- First run with Codex? Start with [`docs/getting-started.md`](../docs/getting-started.md) (links to the walkthrough for prompts, keyboard shortcuts, and session management).
|
||||
- Want deeper control? See [`docs/config.md`](../docs/config.md) and [`docs/install.md`](../docs/install.md).
|
||||
- First run with Codex? Start with the [Getting Started guide](https://developers.openai.com/codex) (links to the walkthrough for prompts, keyboard shortcuts, and session management).
|
||||
- Want deeper control? See [Configuration documentation](https://developers.openai.com/codex/config-advanced/).
|
||||
|
||||
## What's new in the Rust CLI
|
||||
|
||||
@@ -24,13 +24,13 @@ The Rust implementation is now the maintained Codex CLI and serves as the defaul
|
||||
|
||||
### Config
|
||||
|
||||
Codex supports a rich set of configuration options. Note that the Rust CLI uses `config.toml` instead of `config.json`. See [`docs/config.md`](../docs/config.md) for details.
|
||||
Codex supports a rich set of configuration options. Note that the Rust CLI uses `config.toml` instead of `config.json`. See [Configuration documentation](https://developers.openai.com/codex/config-advanced/) for details.
|
||||
|
||||
### Model Context Protocol Support
|
||||
|
||||
#### MCP client
|
||||
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [`configuration documentation`](../docs/config.md#connecting-to-mcp-servers) for details.
|
||||
Codex CLI functions as an MCP client that allows the Codex CLI and IDE extension to connect to MCP servers on startup. See the [configuration documentation](https://developers.openai.com/codex/config-advanced/) for details.
|
||||
|
||||
#### MCP server (experimental)
|
||||
|
||||
@@ -46,7 +46,7 @@ Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.t
|
||||
|
||||
### Notifications
|
||||
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS. When Codex detects that it is running under WSL 2 inside Windows Terminal (`WT_SESSION` is set), the TUI automatically falls back to native Windows toast notifications so approval prompts and completed turns surface even though Windows Terminal does not implement OSC 9.
|
||||
You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](https://developers.openai.com/codex/config-advanced/#notifications) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS. When Codex detects that it is running under WSL 2 inside Windows Terminal (`WT_SESSION` is set), the TUI automatically falls back to native Windows toast notifications so approval prompts and completed turns surface even though Windows Terminal does not implement OSC 9.
|
||||
|
||||
### `codex exec` to run Codex programmatically/non-interactively
|
||||
|
||||
|
||||
@@ -2621,7 +2621,6 @@ mod tests {
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -2729,9 +2728,6 @@ mod tests {
|
||||
let search_item = TurnItem::WebSearch(WebSearchItem {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("docs".to_string()),
|
||||
},
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
|
||||
@@ -147,7 +147,7 @@ struct ResumeCommand {
|
||||
session_id: Option<String>,
|
||||
|
||||
/// Continue the most recent session without showing the picker.
|
||||
#[arg(long = "last", default_value_t = false)]
|
||||
#[arg(long = "last", default_value_t = false, conflicts_with = "session_id")]
|
||||
last: bool,
|
||||
|
||||
/// Show all sessions (disables cwd filtering and shows CWD column).
|
||||
@@ -932,24 +932,6 @@ mod tests {
|
||||
finalize_fork_interactive(interactive, root_overrides, session_id, last, all, fork_cli)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exec_resume_last_accepts_prompt_positional() {
|
||||
let cli =
|
||||
MultitoolCli::try_parse_from(["codex", "exec", "--json", "resume", "--last", "2+2"])
|
||||
.expect("parse should succeed");
|
||||
|
||||
let Some(Subcommand::Exec(exec)) = cli.subcommand else {
|
||||
panic!("expected exec subcommand");
|
||||
};
|
||||
let Some(codex_exec::Command::Resume(args)) = exec.command else {
|
||||
panic!("expected exec resume");
|
||||
};
|
||||
|
||||
assert!(args.last);
|
||||
assert_eq!(args.session_id, None);
|
||||
assert_eq!(args.prompt.as_deref(), Some("2+2"));
|
||||
}
|
||||
|
||||
fn app_server_from_args(args: &[&str]) -> AppServerCommand {
|
||||
let cli = MultitoolCli::try_parse_from(args).expect("parse");
|
||||
let Subcommand::AppServer(app_server) = cli.subcommand.expect("app-server present") else {
|
||||
|
||||
@@ -291,7 +291,7 @@ pub fn process_responses_event(
|
||||
if let Ok(item) = serde_json::from_value::<ResponseItem>(item_val) {
|
||||
return Ok(Some(ResponseEvent::OutputItemAdded(item)));
|
||||
}
|
||||
debug!("failed to parse ResponseItem from output_item.added");
|
||||
debug!("failed to parse ResponseItem from output_item.done");
|
||||
}
|
||||
}
|
||||
"response.reasoning_summary_part.added" => {
|
||||
|
||||
@@ -2,5 +2,4 @@
|
||||
|
||||
This file has moved. Please see the latest configuration documentation here:
|
||||
|
||||
- Full config docs: [docs/config.md](../docs/config.md)
|
||||
- MCP servers section: [docs/config.md#connecting-to-mcp-servers](../docs/config.md#connecting-to-mcp-servers)
|
||||
- Configuration documentation: https://developers.openai.com/codex/config-advanced/
|
||||
|
||||
@@ -1465,10 +1465,6 @@
|
||||
],
|
||||
"description": "User-level skill config entries keyed by SKILL.md path."
|
||||
},
|
||||
"suppress_unstable_features_warning": {
|
||||
"description": "Suppress warnings about unstable (under development) features.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"tool_output_token_limit": {
|
||||
"description": "Token budget applied when storing tool/function outputs in the context manager.",
|
||||
"format": "uint",
|
||||
|
||||
@@ -655,11 +655,13 @@ fn build_responses_headers(
|
||||
let mut headers = experimental_feature_headers(config);
|
||||
headers.insert(
|
||||
WEB_SEARCH_ELIGIBLE_HEADER,
|
||||
HeaderValue::from_static(if config.web_search_mode == WebSearchMode::Disabled {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
}),
|
||||
HeaderValue::from_static(
|
||||
if matches!(config.web_search_mode, Some(WebSearchMode::Disabled)) {
|
||||
"false"
|
||||
} else {
|
||||
"true"
|
||||
},
|
||||
),
|
||||
);
|
||||
if let Some(turn_state) = turn_state
|
||||
&& let Some(state) = turn_state.get()
|
||||
|
||||
@@ -22,7 +22,6 @@ use crate::connectors;
|
||||
use crate::exec_policy::ExecPolicyManager;
|
||||
use crate::features::Feature;
|
||||
use crate::features::Features;
|
||||
use crate::features::maybe_push_unstable_features_warning;
|
||||
use crate::models_manager::manager::ModelsManager;
|
||||
use crate::parse_command::parse_command;
|
||||
use crate::parse_turn_item;
|
||||
@@ -733,7 +732,7 @@ impl Session {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
"Enable it with `--enable {canonical}` or `[features].{canonical}` in config.toml. See https://developers.openai.com/codex/config-advanced/ for details."
|
||||
))
|
||||
};
|
||||
post_session_configured_events.push(Event {
|
||||
@@ -755,7 +754,6 @@ impl Session {
|
||||
});
|
||||
}
|
||||
maybe_push_chat_wire_api_deprecation(&config, &mut post_session_configured_events);
|
||||
maybe_push_unstable_features_warning(&config, &mut post_session_configured_events);
|
||||
|
||||
let auth = auth.as_ref();
|
||||
let otel_manager = OtelManager::new(
|
||||
@@ -2824,7 +2822,7 @@ async fn spawn_review_thread(
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &review_model_info,
|
||||
features: &review_features,
|
||||
web_search_mode: review_web_search_mode,
|
||||
web_search_mode: Some(review_web_search_mode),
|
||||
});
|
||||
|
||||
let review_prompt = resolved.prompt.clone();
|
||||
@@ -2836,7 +2834,7 @@ async fn spawn_review_thread(
|
||||
let mut per_turn_config = (*config).clone();
|
||||
per_turn_config.model = Some(model.clone());
|
||||
per_turn_config.features = review_features.clone();
|
||||
per_turn_config.web_search_mode = review_web_search_mode;
|
||||
per_turn_config.web_search_mode = Some(review_web_search_mode);
|
||||
|
||||
let otel_manager = parent_turn_context
|
||||
.client
|
||||
@@ -3439,8 +3437,10 @@ async fn try_run_sampling_request(
|
||||
}
|
||||
ResponseEvent::OutputItemAdded(item) => {
|
||||
if let Some(turn_item) = handle_non_tool_response_item(&item).await {
|
||||
let tracked_item = turn_item.clone();
|
||||
sess.emit_turn_item_started(&turn_context, &turn_item).await;
|
||||
active_item = Some(turn_item);
|
||||
|
||||
active_item = Some(tracked_item);
|
||||
}
|
||||
}
|
||||
ResponseEvent::ServerReasoningIncluded(included) => {
|
||||
|
||||
@@ -304,8 +304,8 @@ pub struct Config {
|
||||
/// model info's default preference.
|
||||
pub include_apply_patch_tool: bool,
|
||||
|
||||
/// Explicit or feature-derived web search mode. Defaults to cached.
|
||||
pub web_search_mode: WebSearchMode,
|
||||
/// Explicit or feature-derived web search mode.
|
||||
pub web_search_mode: Option<WebSearchMode>,
|
||||
|
||||
/// If set to `true`, used only the experimental unified exec tool.
|
||||
pub use_experimental_unified_exec_tool: bool,
|
||||
@@ -316,9 +316,6 @@ pub struct Config {
|
||||
/// Centralized feature flags; source of truth for feature gating.
|
||||
pub features: Features,
|
||||
|
||||
/// When `true`, suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: bool,
|
||||
|
||||
/// The active profile name used to derive this `Config` (if any).
|
||||
pub active_profile: Option<String>,
|
||||
|
||||
@@ -909,9 +906,6 @@ pub struct ConfigToml {
|
||||
#[schemars(schema_with = "crate::config::schema::features_schema")]
|
||||
pub features: Option<FeaturesToml>,
|
||||
|
||||
/// Suppress warnings about unstable (under development) features.
|
||||
pub suppress_unstable_features_warning: Option<bool>,
|
||||
|
||||
/// Settings for ghost snapshots (used for undo).
|
||||
#[serde(default)]
|
||||
pub ghost_snapshot: Option<GhostSnapshotToml>,
|
||||
@@ -1205,17 +1199,17 @@ fn resolve_web_search_mode(
|
||||
config_toml: &ConfigToml,
|
||||
config_profile: &ConfigProfile,
|
||||
features: &Features,
|
||||
) -> WebSearchMode {
|
||||
) -> Option<WebSearchMode> {
|
||||
if let Some(mode) = config_profile.web_search.or(config_toml.web_search) {
|
||||
return mode;
|
||||
return Some(mode);
|
||||
}
|
||||
if features.enabled(Feature::WebSearchCached) {
|
||||
return WebSearchMode::Cached;
|
||||
return Some(WebSearchMode::Cached);
|
||||
}
|
||||
if features.enabled(Feature::WebSearchRequest) {
|
||||
return WebSearchMode::Live;
|
||||
return Some(WebSearchMode::Live);
|
||||
}
|
||||
WebSearchMode::Cached
|
||||
None
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -1570,9 +1564,6 @@ impl Config {
|
||||
use_experimental_unified_exec_tool,
|
||||
ghost_snapshot,
|
||||
features,
|
||||
suppress_unstable_features_warning: cfg
|
||||
.suppress_unstable_features_warning
|
||||
.unwrap_or(false),
|
||||
active_profile: active_profile_name,
|
||||
active_project,
|
||||
windows_wsl_setup_acknowledged: cfg.windows_wsl_setup_acknowledged.unwrap_or(false),
|
||||
@@ -2264,15 +2255,12 @@ trust_level = "trusted"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_defaults_to_cached_if_unset() {
|
||||
fn web_search_mode_uses_none_if_unset() {
|
||||
let cfg = ConfigToml::default();
|
||||
let profile = ConfigProfile::default();
|
||||
let features = Features::with_defaults();
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
WebSearchMode::Cached
|
||||
);
|
||||
assert_eq!(resolve_web_search_mode(&cfg, &profile, &features), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2287,7 +2275,7 @@ trust_level = "trusted"
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
WebSearchMode::Live
|
||||
Some(WebSearchMode::Live)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2303,7 +2291,7 @@ trust_level = "trusted"
|
||||
|
||||
assert_eq!(
|
||||
resolve_web_search_mode(&cfg, &profile, &features),
|
||||
WebSearchMode::Disabled
|
||||
Some(WebSearchMode::Disabled)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3740,11 +3728,10 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("o3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3823,11 +3810,10 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt3".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -3921,11 +3907,10 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("zdr".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
@@ -4005,11 +3990,10 @@ model_verbosity = "high"
|
||||
forced_chatgpt_workspace_id: None,
|
||||
forced_login_method: None,
|
||||
include_apply_patch_tool: false,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: None,
|
||||
use_experimental_unified_exec_tool: false,
|
||||
ghost_snapshot: GhostSnapshotConfig::default(),
|
||||
features: Features::with_defaults(),
|
||||
suppress_unstable_features_warning: false,
|
||||
active_profile: Some("gpt5".to_string()),
|
||||
active_project: ProjectConfig { trust_level: None },
|
||||
windows_wsl_setup_acknowledged: false,
|
||||
|
||||
@@ -21,7 +21,6 @@ use crate::instructions::SkillInstructions;
|
||||
use crate::instructions::UserInstructions;
|
||||
use crate::session_prefix::is_session_prefix;
|
||||
use crate::user_shell_command::is_user_shell_command_text;
|
||||
use crate::web_search::web_search_action_detail;
|
||||
|
||||
fn parse_user_message(message: &[ContentItem]) -> Option<UserMessageItem> {
|
||||
if UserInstructions::is_user_instructions(message)
|
||||
@@ -128,17 +127,14 @@ pub fn parse_turn_item(item: &ResponseItem) -> Option<TurnItem> {
|
||||
raw_content,
|
||||
}))
|
||||
}
|
||||
ResponseItem::WebSearchCall { id, action, .. } => {
|
||||
let (action, query) = match action {
|
||||
Some(action) => (action.clone(), web_search_action_detail(action)),
|
||||
None => (WebSearchAction::Other, String::new()),
|
||||
};
|
||||
Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query,
|
||||
action,
|
||||
}))
|
||||
}
|
||||
ResponseItem::WebSearchCall {
|
||||
id,
|
||||
action: WebSearchAction::Search { query },
|
||||
..
|
||||
} => Some(TurnItem::WebSearch(WebSearchItem {
|
||||
id: id.clone().unwrap_or_default(),
|
||||
query: query.clone().unwrap_or_default(),
|
||||
})),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -148,7 +144,6 @@ mod tests {
|
||||
use super::parse_turn_item;
|
||||
use codex_protocol::items::AgentMessageContent;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ReasoningItemReasoningSummary;
|
||||
@@ -424,102 +419,18 @@ mod tests {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_1".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::Search {
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_1".to_string(),
|
||||
query: "weather".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_open_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_open".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_open".to_string(),
|
||||
query: "https://example.com".to_string(),
|
||||
action: WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_web_search_find_in_page_call() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_find".to_string()),
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
}),
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_find".to_string(),
|
||||
query: "'needle' in https://example.com".to_string(),
|
||||
action: WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com".to_string()),
|
||||
pattern: Some("needle".to_string()),
|
||||
},
|
||||
}
|
||||
),
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_partial_web_search_call_without_action_as_other() {
|
||||
let item = ResponseItem::WebSearchCall {
|
||||
id: Some("ws_partial".to_string()),
|
||||
status: Some("in_progress".to_string()),
|
||||
action: None,
|
||||
};
|
||||
|
||||
let turn_item = parse_turn_item(&item).expect("expected web search turn item");
|
||||
match turn_item {
|
||||
TurnItem::WebSearch(search) => assert_eq!(
|
||||
search,
|
||||
WebSearchItem {
|
||||
id: "ws_partial".to_string(),
|
||||
query: String::new(),
|
||||
action: WebSearchAction::Other,
|
||||
}
|
||||
),
|
||||
TurnItem::WebSearch(search) => {
|
||||
assert_eq!(search.id, "ws_1");
|
||||
assert_eq!(search.query, "weather");
|
||||
}
|
||||
other => panic!("expected TurnItem::WebSearch, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -312,7 +312,20 @@ async fn exec_windows_sandbox(
|
||||
text: stderr_text,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -506,39 +519,6 @@ fn append_capped(dst: &mut Vec<u8>, src: &[u8], max_bytes: usize) {
|
||||
dst.extend_from_slice(&src[..take]);
|
||||
}
|
||||
|
||||
fn aggregate_output(
|
||||
stdout: &StreamOutput<Vec<u8>>,
|
||||
stderr: &StreamOutput<Vec<u8>>,
|
||||
) -> StreamOutput<Vec<u8>> {
|
||||
let total_len = stdout.text.len().saturating_add(stderr.text.len());
|
||||
let max_bytes = EXEC_OUTPUT_MAX_BYTES;
|
||||
let mut aggregated = Vec::with_capacity(total_len.min(max_bytes));
|
||||
|
||||
if total_len <= max_bytes {
|
||||
aggregated.extend_from_slice(&stdout.text);
|
||||
aggregated.extend_from_slice(&stderr.text);
|
||||
return StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
}
|
||||
|
||||
// Under contention, reserve 1/3 for stdout and 2/3 for stderr; rebalance unused stderr to stdout.
|
||||
let want_stdout = stdout.text.len().min(max_bytes / 3);
|
||||
let want_stderr = stderr.text.len();
|
||||
let stderr_take = want_stderr.min(max_bytes.saturating_sub(want_stdout));
|
||||
let remaining = max_bytes.saturating_sub(want_stdout + stderr_take);
|
||||
let stdout_take = want_stdout + remaining.min(stdout.text.len().saturating_sub(want_stdout));
|
||||
|
||||
aggregated.extend_from_slice(&stdout.text[..stdout_take]);
|
||||
aggregated.extend_from_slice(&stderr.text[..stderr_take]);
|
||||
|
||||
StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExecToolCallOutput {
|
||||
pub exit_code: i32,
|
||||
@@ -703,7 +683,20 @@ async fn consume_truncated_output(
|
||||
Duration::from_millis(IO_DRAIN_TIMEOUT_MS),
|
||||
)
|
||||
.await?;
|
||||
let aggregated_output = aggregate_output(&stdout, &stderr);
|
||||
// Best-effort aggregate: stdout then stderr (capped).
|
||||
let mut aggregated = Vec::with_capacity(
|
||||
stdout
|
||||
.text
|
||||
.len()
|
||||
.saturating_add(stderr.text.len())
|
||||
.min(EXEC_OUTPUT_MAX_BYTES),
|
||||
);
|
||||
append_capped(&mut aggregated, &stdout.text, EXEC_OUTPUT_MAX_BYTES);
|
||||
append_capped(&mut aggregated, &stderr.text, EXEC_OUTPUT_MAX_BYTES * 2);
|
||||
let aggregated_output = StreamOutput {
|
||||
text: aggregated,
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
Ok(RawExecToolCallOutput {
|
||||
exit_status,
|
||||
@@ -778,7 +771,6 @@ fn synthetic_exit_status(code: i32) -> ExitStatus {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::time::Duration;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
@@ -854,85 +846,6 @@ mod tests {
|
||||
assert_eq!(out.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_prefers_stderr_on_contention() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_cap = EXEC_OUTPUT_MAX_BYTES / 3;
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_cap);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_cap], vec![b'a'; stdout_cap]);
|
||||
assert_eq!(aggregated.text[stdout_cap..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_fills_remaining_capacity_with_stderr() {
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES / 10;
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; stdout_len],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stderr_cap = EXEC_OUTPUT_MAX_BYTES.saturating_sub(stdout_len);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; stderr_cap]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_rebalances_when_stderr_is_small() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; EXEC_OUTPUT_MAX_BYTES],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 1],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let stdout_len = EXEC_OUTPUT_MAX_BYTES.saturating_sub(1);
|
||||
|
||||
assert_eq!(aggregated.text.len(), EXEC_OUTPUT_MAX_BYTES);
|
||||
assert_eq!(aggregated.text[..stdout_len], vec![b'a'; stdout_len]);
|
||||
assert_eq!(aggregated.text[stdout_len..], vec![b'b'; 1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aggregate_output_keeps_stdout_then_stderr_when_under_cap() {
|
||||
let stdout = StreamOutput {
|
||||
text: vec![b'a'; 4],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
let stderr = StreamOutput {
|
||||
text: vec![b'b'; 3],
|
||||
truncated_after_lines: None,
|
||||
};
|
||||
|
||||
let aggregated = aggregate_output(&stdout, &stderr);
|
||||
let mut expected = Vec::new();
|
||||
expected.extend_from_slice(&stdout.text);
|
||||
expected.extend_from_slice(&stderr.text);
|
||||
|
||||
assert_eq!(aggregated.text, expected);
|
||||
assert_eq!(aggregated.truncated_after_lines, None);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn sandbox_detection_flags_sigsys_exit_code() {
|
||||
|
||||
@@ -5,20 +5,14 @@
|
||||
//! booleans through multiple types, call sites consult a single `Features`
|
||||
//! container attached to `Config`.
|
||||
|
||||
use crate::config::CONFIG_TOML_FILE;
|
||||
use crate::config::Config;
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config::profile::ConfigProfile;
|
||||
use crate::protocol::Event;
|
||||
use crate::protocol::EventMsg;
|
||||
use crate::protocol::WarningEvent;
|
||||
use codex_otel::OtelManager;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::BTreeSet;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
mod legacy;
|
||||
pub(crate) use legacy::LegacyFeatureToggles;
|
||||
@@ -472,54 +466,3 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
default_enabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
/// Push a warning event if any under-development features are enabled.
|
||||
pub fn maybe_push_unstable_features_warning(
|
||||
config: &Config,
|
||||
post_session_configured_events: &mut Vec<Event>,
|
||||
) {
|
||||
if config.suppress_unstable_features_warning {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut under_development_feature_keys = Vec::new();
|
||||
if let Some(table) = config
|
||||
.config_layer_stack
|
||||
.effective_config()
|
||||
.get("features")
|
||||
.and_then(TomlValue::as_table)
|
||||
{
|
||||
for (key, value) in table {
|
||||
if value.as_bool() != Some(true) {
|
||||
continue;
|
||||
}
|
||||
let Some(spec) = FEATURES.iter().find(|spec| spec.key == key.as_str()) else {
|
||||
continue;
|
||||
};
|
||||
if !config.features.enabled(spec.id) {
|
||||
continue;
|
||||
}
|
||||
if matches!(spec.stage, Stage::UnderDevelopment) {
|
||||
under_development_feature_keys.push(spec.key.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if under_development_feature_keys.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let under_development_feature_keys = under_development_feature_keys.join(", ");
|
||||
let config_path = config
|
||||
.codex_home
|
||||
.join(CONFIG_TOML_FILE)
|
||||
.display()
|
||||
.to_string();
|
||||
let message = format!(
|
||||
"Under-development features enabled: {under_development_feature_keys}. Under-development features are incomplete and may behave unpredictably. To suppress this warning, set `suppress_unstable_features_warning = true` in {config_path}."
|
||||
);
|
||||
post_session_configured_events.push(Event {
|
||||
id: "".to_owned(),
|
||||
msg: EventMsg::Warning(WarningEvent { message }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -69,7 +69,6 @@ mod event_mapping;
|
||||
pub mod review_format;
|
||||
pub mod review_prompts;
|
||||
mod thread_manager;
|
||||
pub mod web_search;
|
||||
pub use codex_protocol::protocol::InitialHistory;
|
||||
pub use thread_manager::NewThread;
|
||||
pub use thread_manager::ThreadManager;
|
||||
|
||||
@@ -28,7 +28,7 @@ fn plan_preset() -> CollaborationModeMask {
|
||||
name: "Plan".to_string(),
|
||||
mode: Some(ModeKind::Plan),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
|
||||
reasoning_effort: Some(Some(ReasoningEffort::High)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_PLAN.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ struct SkillFrontmatterMetadata {
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize)]
|
||||
struct SkillMetadataFile {
|
||||
struct SkillToml {
|
||||
#[serde(default)]
|
||||
interface: Option<Interface>,
|
||||
}
|
||||
@@ -51,7 +51,6 @@ struct Interface {
|
||||
}
|
||||
|
||||
const SKILLS_FILENAME: &str = "SKILL.md";
|
||||
const SKILLS_JSON_FILENAME: &str = "SKILL.json";
|
||||
const SKILLS_TOML_FILENAME: &str = "SKILL.toml";
|
||||
const SKILLS_DIR_NAME: &str = "skills";
|
||||
const MAX_NAME_LEN: usize = 64;
|
||||
@@ -371,94 +370,62 @@ fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, Ski
|
||||
}
|
||||
|
||||
fn load_skill_interface(skill_path: &Path) -> Option<SkillInterface> {
|
||||
// Fail open: optional interface metadata should not block loading SKILL.md.
|
||||
// Fail open: optional SKILL.toml metadata should not block loading SKILL.md.
|
||||
let skill_dir = skill_path.parent()?;
|
||||
let interface_paths = [
|
||||
(skill_dir.join(SKILLS_JSON_FILENAME), InterfaceFormat::Json),
|
||||
(skill_dir.join(SKILLS_TOML_FILENAME), InterfaceFormat::Toml),
|
||||
];
|
||||
|
||||
for (interface_path, format) in interface_paths {
|
||||
if !interface_path.exists() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let contents = match fs::read_to_string(&interface_path) {
|
||||
Ok(contents) => contents,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: failed to read {label}: {error}",
|
||||
path = interface_path.display(),
|
||||
label = format.label()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let parsed: SkillMetadataFile = match format.parse(&contents) {
|
||||
Ok(parsed) => parsed,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: invalid {label}: {error}",
|
||||
path = interface_path.display(),
|
||||
label = format.label()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let interface = parsed.interface?;
|
||||
|
||||
let interface = SkillInterface {
|
||||
display_name: resolve_str(
|
||||
interface.display_name,
|
||||
MAX_NAME_LEN,
|
||||
"interface.display_name",
|
||||
),
|
||||
short_description: resolve_str(
|
||||
interface.short_description,
|
||||
MAX_SHORT_DESCRIPTION_LEN,
|
||||
"interface.short_description",
|
||||
),
|
||||
icon_small: resolve_asset_path(skill_dir, "interface.icon_small", interface.icon_small),
|
||||
icon_large: resolve_asset_path(skill_dir, "interface.icon_large", interface.icon_large),
|
||||
brand_color: resolve_color_str(interface.brand_color, "interface.brand_color"),
|
||||
default_prompt: resolve_str(
|
||||
interface.default_prompt,
|
||||
MAX_DEFAULT_PROMPT_LEN,
|
||||
"interface.default_prompt",
|
||||
),
|
||||
};
|
||||
let has_fields = interface.display_name.is_some()
|
||||
|| interface.short_description.is_some()
|
||||
|| interface.icon_small.is_some()
|
||||
|| interface.icon_large.is_some()
|
||||
|| interface.brand_color.is_some()
|
||||
|| interface.default_prompt.is_some();
|
||||
return if has_fields { Some(interface) } else { None };
|
||||
let interface_path = skill_dir.join(SKILLS_TOML_FILENAME);
|
||||
if !interface_path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
enum InterfaceFormat {
|
||||
Json,
|
||||
Toml,
|
||||
}
|
||||
|
||||
impl InterfaceFormat {
|
||||
fn label(self) -> &'static str {
|
||||
match self {
|
||||
InterfaceFormat::Json => "SKILL.json",
|
||||
InterfaceFormat::Toml => "SKILL.toml",
|
||||
let contents = match fs::read_to_string(&interface_path) {
|
||||
Ok(contents) => contents,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: failed to read SKILL.toml: {error}",
|
||||
path = interface_path.display()
|
||||
);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
fn parse(self, contents: &str) -> Result<SkillMetadataFile, String> {
|
||||
match self {
|
||||
InterfaceFormat::Json => serde_json::from_str(contents).map_err(|err| err.to_string()),
|
||||
InterfaceFormat::Toml => toml::from_str(contents).map_err(|err| err.to_string()),
|
||||
};
|
||||
let parsed: SkillToml = match toml::from_str(&contents) {
|
||||
Ok(parsed) => parsed,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"ignoring {path}: invalid TOML: {error}",
|
||||
path = interface_path.display()
|
||||
);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
let interface = parsed.interface?;
|
||||
|
||||
let interface = SkillInterface {
|
||||
display_name: resolve_str(
|
||||
interface.display_name,
|
||||
MAX_NAME_LEN,
|
||||
"interface.display_name",
|
||||
),
|
||||
short_description: resolve_str(
|
||||
interface.short_description,
|
||||
MAX_SHORT_DESCRIPTION_LEN,
|
||||
"interface.short_description",
|
||||
),
|
||||
icon_small: resolve_asset_path(skill_dir, "interface.icon_small", interface.icon_small),
|
||||
icon_large: resolve_asset_path(skill_dir, "interface.icon_large", interface.icon_large),
|
||||
brand_color: resolve_color_str(interface.brand_color, "interface.brand_color"),
|
||||
default_prompt: resolve_str(
|
||||
interface.default_prompt,
|
||||
MAX_DEFAULT_PROMPT_LEN,
|
||||
"interface.default_prompt",
|
||||
),
|
||||
};
|
||||
let has_fields = interface.display_name.is_some()
|
||||
|| interface.short_description.is_some()
|
||||
|| interface.icon_small.is_some()
|
||||
|| interface.icon_large.is_some()
|
||||
|| interface.brand_color.is_some()
|
||||
|| interface.default_prompt.is_some();
|
||||
if has_fields { Some(interface) } else { None }
|
||||
}
|
||||
|
||||
fn resolve_asset_path(
|
||||
@@ -794,12 +761,6 @@ mod tests {
|
||||
path
|
||||
}
|
||||
|
||||
fn write_skill_interface_json_at(skill_dir: &Path, contents: &str) -> PathBuf {
|
||||
let path = skill_dir.join(SKILLS_JSON_FILENAME);
|
||||
fs::write(&path, contents).unwrap();
|
||||
path
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_happy_path() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
@@ -848,57 +809,6 @@ default_prompt = " default prompt "
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skill_interface_metadata_from_json() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let skill_path = write_skill(&codex_home, "demo", "ui-skill", "from json");
|
||||
let skill_dir = skill_path.parent().expect("skill dir");
|
||||
let normalized_skill_dir = normalized(skill_dir);
|
||||
|
||||
write_skill_interface_json_at(
|
||||
skill_dir,
|
||||
r##"
|
||||
{
|
||||
"interface": {
|
||||
"display_name": "UI Skill",
|
||||
"short_description": " short desc ",
|
||||
"icon_small": "./assets/small-400px.png",
|
||||
"icon_large": "./assets/large-logo.svg",
|
||||
"brand_color": "#3B82F6",
|
||||
"default_prompt": " default prompt "
|
||||
}
|
||||
}
|
||||
"##,
|
||||
);
|
||||
|
||||
let cfg = make_config(&codex_home).await;
|
||||
let outcome = load_skills(&cfg);
|
||||
|
||||
assert!(
|
||||
outcome.errors.is_empty(),
|
||||
"unexpected errors: {:?}",
|
||||
outcome.errors
|
||||
);
|
||||
assert_eq!(
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "ui-skill".to_string(),
|
||||
description: "from json".to_string(),
|
||||
short_description: None,
|
||||
interface: Some(SkillInterface {
|
||||
display_name: Some("UI Skill".to_string()),
|
||||
short_description: Some("short desc".to_string()),
|
||||
icon_small: Some(normalized_skill_dir.join("assets/small-400px.png")),
|
||||
icon_large: Some(normalized_skill_dir.join("assets/large-logo.svg")),
|
||||
brand_color: Some("#3B82F6".to_string()),
|
||||
default_prompt: Some("default prompt".to_string()),
|
||||
}),
|
||||
path: normalized(skill_path.as_path()),
|
||||
scope: SkillScope::User,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn accepts_icon_paths_under_assets_dir() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
|
||||
@@ -86,7 +86,7 @@ async fn start_review_conversation(
|
||||
let mut sub_agent_config = config.as_ref().clone();
|
||||
// Carry over review-only feature restrictions so the delegate cannot
|
||||
// re-enable blocked tools (web search, view image).
|
||||
sub_agent_config.web_search_mode = WebSearchMode::Disabled;
|
||||
sub_agent_config.web_search_mode = Some(WebSearchMode::Disabled);
|
||||
|
||||
// Set explicit review rubric for the sub-agent
|
||||
sub_agent_config.base_instructions = Some(crate::REVIEW_PROMPT.to_string());
|
||||
|
||||
@@ -36,14 +36,12 @@ impl ToolHandler for RequestUserInputHandler {
|
||||
}
|
||||
};
|
||||
|
||||
let mode = session.collaboration_mode().await.mode;
|
||||
if !matches!(mode, ModeKind::Plan | ModeKind::PairProgramming) {
|
||||
let mode_name = match mode {
|
||||
ModeKind::Code => "Code",
|
||||
ModeKind::Execute => "Execute",
|
||||
ModeKind::Custom => "Custom",
|
||||
ModeKind::Plan | ModeKind::PairProgramming => unreachable!(),
|
||||
};
|
||||
let disallowed_mode = match session.collaboration_mode().await.mode {
|
||||
ModeKind::Execute => Some("Execute"),
|
||||
ModeKind::Custom => Some("Custom"),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(mode_name) = disallowed_mode {
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"request_user_input is unavailable in {mode_name} mode"
|
||||
)));
|
||||
|
||||
@@ -27,7 +27,7 @@ use std::collections::HashMap;
|
||||
pub(crate) struct ToolsConfig {
|
||||
pub shell_type: ConfigShellToolType,
|
||||
pub apply_patch_tool_type: Option<ApplyPatchToolType>,
|
||||
pub web_search_mode: WebSearchMode,
|
||||
pub web_search_mode: Option<WebSearchMode>,
|
||||
pub collab_tools: bool,
|
||||
pub collaboration_modes_tools: bool,
|
||||
pub experimental_supported_tools: Vec<String>,
|
||||
@@ -36,7 +36,7 @@ pub(crate) struct ToolsConfig {
|
||||
pub(crate) struct ToolsConfigParams<'a> {
|
||||
pub(crate) model_info: &'a ModelInfo,
|
||||
pub(crate) features: &'a Features,
|
||||
pub(crate) web_search_mode: WebSearchMode,
|
||||
pub(crate) web_search_mode: Option<WebSearchMode>,
|
||||
}
|
||||
|
||||
impl ToolsConfig {
|
||||
@@ -1374,17 +1374,17 @@ pub(crate) fn build_specs(
|
||||
}
|
||||
|
||||
match config.web_search_mode {
|
||||
WebSearchMode::Cached => {
|
||||
Some(WebSearchMode::Cached) => {
|
||||
builder.push_spec(ToolSpec::WebSearch {
|
||||
external_web_access: Some(false),
|
||||
});
|
||||
}
|
||||
WebSearchMode::Live => {
|
||||
Some(WebSearchMode::Live) => {
|
||||
builder.push_spec(ToolSpec::WebSearch {
|
||||
external_web_access: Some(true),
|
||||
});
|
||||
}
|
||||
WebSearchMode::Disabled => {}
|
||||
Some(WebSearchMode::Disabled) | None => {}
|
||||
}
|
||||
|
||||
builder.push_spec_with_parallel_support(create_view_image_tool(), true);
|
||||
@@ -1546,7 +1546,7 @@ mod tests {
|
||||
let config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(&config, None, &[]).build();
|
||||
|
||||
@@ -1610,7 +1610,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
assert_contains_tool_names(
|
||||
@@ -1628,7 +1628,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
assert!(
|
||||
@@ -1640,7 +1640,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
assert_contains_tool_names(&tools, &["request_user_input"]);
|
||||
@@ -1649,7 +1649,7 @@ mod tests {
|
||||
fn assert_model_tools(
|
||||
model_slug: &str,
|
||||
features: &Features,
|
||||
web_search_mode: WebSearchMode,
|
||||
web_search_mode: Option<WebSearchMode>,
|
||||
expected_tools: &[&str],
|
||||
) {
|
||||
let config = test_config();
|
||||
@@ -1673,7 +1673,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -1695,7 +1695,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -1715,7 +1715,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5-codex",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1737,7 +1737,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1-codex",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1760,7 +1760,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5-codex",
|
||||
&features,
|
||||
WebSearchMode::Live,
|
||||
Some(WebSearchMode::Live),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1784,7 +1784,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1-codex",
|
||||
&features,
|
||||
WebSearchMode::Live,
|
||||
Some(WebSearchMode::Live),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1807,7 +1807,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"codex-mini-latest",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"local_shell",
|
||||
"list_mcp_resources",
|
||||
@@ -1828,7 +1828,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1-codex-mini",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1850,7 +1850,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell",
|
||||
"list_mcp_resources",
|
||||
@@ -1871,7 +1871,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"gpt-5.1",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"shell_command",
|
||||
"list_mcp_resources",
|
||||
@@ -1893,7 +1893,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"exp-5.1",
|
||||
&features,
|
||||
WebSearchMode::Cached,
|
||||
Some(WebSearchMode::Cached),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1917,7 +1917,7 @@ mod tests {
|
||||
assert_model_tools(
|
||||
"codex-mini-latest",
|
||||
&features,
|
||||
WebSearchMode::Live,
|
||||
Some(WebSearchMode::Live),
|
||||
&[
|
||||
"exec_command",
|
||||
"write_stdin",
|
||||
@@ -1941,7 +1941,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, Some(HashMap::new()), &[]).build();
|
||||
|
||||
@@ -1963,7 +1963,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -1982,7 +1982,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(&tools_config, None, &[]).build();
|
||||
|
||||
@@ -2013,7 +2013,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Live,
|
||||
web_search_mode: Some(WebSearchMode::Live),
|
||||
});
|
||||
let (tools, _) = build_specs(
|
||||
&tools_config,
|
||||
@@ -2109,7 +2109,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
// Intentionally construct a map with keys that would sort alphabetically.
|
||||
@@ -2186,7 +2186,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2244,7 +2244,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2299,7 +2299,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2356,7 +2356,7 @@ mod tests {
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
|
||||
let (tools, _) = build_specs(
|
||||
@@ -2469,7 +2469,7 @@ Examples of valid command strings:
|
||||
let tools_config = ToolsConfig::new(&ToolsConfigParams {
|
||||
model_info: &model_info,
|
||||
features: &features,
|
||||
web_search_mode: WebSearchMode::Cached,
|
||||
web_search_mode: Some(WebSearchMode::Cached),
|
||||
});
|
||||
let (tools, _) = build_specs(
|
||||
&tools_config,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
|
||||
pub fn web_search_action_detail(action: &WebSearchAction) -> String {
|
||||
match action {
|
||||
WebSearchAction::Search { query } => query.clone().unwrap_or_default(),
|
||||
WebSearchAction::OpenPage { url } => url.clone().unwrap_or_default(),
|
||||
WebSearchAction::FindInPage { url, pattern } => match (pattern, url) {
|
||||
(Some(pattern), Some(url)) => format!("'{pattern}' in {url}"),
|
||||
(Some(pattern), None) => format!("'{pattern}'"),
|
||||
(None, Some(url)) => url.clone(),
|
||||
(None, None) => String::new(),
|
||||
},
|
||||
WebSearchAction::Other => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn web_search_detail(action: Option<&WebSearchAction>, query: &str) -> String {
|
||||
let detail = action.map(web_search_action_detail).unwrap_or_default();
|
||||
if detail.is_empty() {
|
||||
query.to_string()
|
||||
} else {
|
||||
detail
|
||||
}
|
||||
}
|
||||
@@ -1,108 +1,46 @@
|
||||
# Plan Mode (Conversational)
|
||||
|
||||
You work in 3 phases, and you should *chat your way* to a great plan before finalizing it. A great plan is very detailed—intent- and implementation-wise—so that it can be handed to another engineer or agent to be implemented right away. It must be **decision complete**, where the implementer does not need to make any decisions.
|
||||
You work in 2 phases and you should *chat your way* to a great plan before finalizing it.
|
||||
|
||||
## Mode rules (strict)
|
||||
While in **Plan Mode**, you must not perform any mutating or execution actions. Once you enter Plan Mode, you remain there until you are **explicitly instructed otherwise**. Plan Mode may continue across multiple user messages unless a developer message ends it.
|
||||
|
||||
You are in **Plan Mode** until a developer message explicitly ends it.
|
||||
User intent, tone, or imperative language does **not** trigger a mode change. If a user asks for execution while you are still in Plan Mode, you must treat that request as a prompt to **plan the execution**, not to carry it out.
|
||||
|
||||
Plan Mode is not changed by user intent, tone, or imperative language. If a user asks for execution while still in Plan Mode, treat it as a request to **plan the execution**, not perform it.
|
||||
PHASE 1 — Intent chat (what they actually want)
|
||||
- Keep asking until you can clearly state: goal + success criteria, audience, in/out of scope, constraints, current state, and the key preferences/tradeoffs.
|
||||
- Bias toward questions over guessing: if any high‑impact ambiguity remains, do NOT plan yet—ask.
|
||||
- Include a “Confirm my understanding” question in each round (so the user can correct you early).
|
||||
|
||||
## Execution vs. mutation in Plan Mode
|
||||
|
||||
You may explore and execute **non-mutating** actions that improve the plan. You must not perform **mutating** actions.
|
||||
|
||||
### Allowed (non-mutating, plan-improving)
|
||||
|
||||
Actions that gather truth, reduce ambiguity, or validate feasibility without changing repo-tracked state. Examples:
|
||||
|
||||
* Reading or searching files, configs, schemas, types, manifests, and docs
|
||||
* Static analysis, inspection, and repo exploration
|
||||
* Dry-run style commands when they do not edit repo-tracked files
|
||||
* Tests, builds, or checks that may write to caches or build artifacts (for example, `target/`, `.cache/`, or snapshots) so long as they do not edit repo-tracked files
|
||||
|
||||
### Not allowed (mutating, plan-executing)
|
||||
|
||||
Actions that implement the plan or change repo-tracked state. Examples:
|
||||
|
||||
* Editing or writing files
|
||||
* Generating, updating, or accepting snapshots
|
||||
* Running formatters or linters that rewrite files
|
||||
* Applying patches, migrations, or codegen that updates repo-tracked files
|
||||
* Side-effectful commands whose purpose is to carry out the plan rather than refine it
|
||||
|
||||
When in doubt: if the action would reasonably be described as "doing the work" rather than "planning the work," do not do it.
|
||||
|
||||
## PHASE 1 — Ground in the environment (explore first, ask second)
|
||||
|
||||
Begin by grounding yourself in the actual environment. Eliminate unknowns in the prompt by discovering facts, not by asking the user. Resolve all questions that can be answered through exploration or inspection. Identify missing or ambiguous details only if they cannot be derived from the environment. Silent exploration between turns is allowed and encouraged.
|
||||
|
||||
Do not ask questions that can be answered from the repo or system (for example, "where is this struct?" or "which UI component should we use?" when exploration can make it clear). Only ask once you have exhausted reasonable non-mutating exploration.
|
||||
|
||||
## PHASE 2 — Intent chat (what they actually want)
|
||||
|
||||
* Keep asking until you can clearly state: goal + success criteria, audience, in/out of scope, constraints, current state, and the key preferences/tradeoffs.
|
||||
* Bias toward questions over guessing: if any high-impact ambiguity remains, do NOT plan yet—ask.
|
||||
|
||||
## PHASE 3 — Implementation chat (what/how we’ll build)
|
||||
|
||||
* Once intent is stable, keep asking until the spec is decision complete: approach, interfaces (APIs/schemas/I/O), data flow, edge cases/failure modes, testing + acceptance criteria, rollout/monitoring, and any migrations/compat constraints.
|
||||
PHASE 2 — Implementation chat (what/how we’ll build)
|
||||
- Once intent is stable, keep asking until the spec is decision‑complete: approach, interfaces (APIs/schemas/I/O), data flow, edge cases/failure modes, testing + acceptance criteria, rollout/monitoring, and any migrations/compat constraints.
|
||||
|
||||
## Hard interaction rule (critical)
|
||||
|
||||
Every assistant turn MUST be exactly one of:
|
||||
A) a `request_user_input` tool call (questions/options only), OR
|
||||
B) a non-final status update with no questions and no plan content, OR
|
||||
C) the final output: a titled, plan-only document.
|
||||
|
||||
B) the final output: a titled, plan‑only document.
|
||||
Rules:
|
||||
|
||||
* No questions in free text (only via `request_user_input`).
|
||||
* Never mix a `request_user_input` call with plan content.
|
||||
* Status updates must not include questions or plan content.
|
||||
* Internal tool/repo exploration is allowed privately before A, B, or C.
|
||||
|
||||
Status updates should be frequent during exploration. Provide 1-2 sentence updates that summarize discoveries, assumption changes, or why you are changing direction. Use Parallel tools for exploration.
|
||||
- No questions in free text (only via `request_user_input`).
|
||||
- Never mix a `request_user_input` call with plan content.
|
||||
- Internal tool/repo exploration is allowed privately before A or B.
|
||||
|
||||
## Ask a lot, but never ask trivia
|
||||
|
||||
You SHOULD ask many questions, but each question must:
|
||||
|
||||
* materially change the spec/plan, OR
|
||||
* confirm/lock an assumption, OR
|
||||
* choose between meaningful tradeoffs.
|
||||
* not be answerable by non-mutating commands.
|
||||
|
||||
Use the `request_user_input` tool only for decisions that materially change the plan, for confirming important assumptions, or for information that cannot be discovered via non-mutating exploration.
|
||||
- materially change the spec/plan, OR
|
||||
- confirm/lock an assumption, OR
|
||||
- choose between meaningful tradeoffs.
|
||||
- not be answerable by non-mutating commands
|
||||
Batch questions (e.g., 4–10) per `request_user_input` call to keep momentum.
|
||||
|
||||
## Two kinds of unknowns (treat differently)
|
||||
1) Discoverable facts (repo/system truth): explore first.
|
||||
- Before asking, run ≥2 targeted searches (exact + variant) and check likely sources of truth (configs/manifests/entrypoints/schemas/types/constants).
|
||||
- Ask only if: multiple plausible candidates; nothing found but you need a missing identifier/context; or ambiguity is actually product intent.
|
||||
- If asking, present concrete candidates (paths/service names) + recommend one.
|
||||
|
||||
1. **Discoverable facts** (repo/system truth): explore first.
|
||||
|
||||
* Before asking, run targeted searches and check likely sources of truth (configs/manifests/entrypoints/schemas/types/constants).
|
||||
* Ask only if: multiple plausible candidates; nothing found but you need a missing identifier/context; or ambiguity is actually product intent.
|
||||
* If asking, present concrete candidates (paths/service names) + recommend one.
|
||||
* Never ask questions you can answer from your environment (e.g., “where is this struct”).
|
||||
|
||||
2. **Preferences/tradeoffs** (not discoverable): ask early.
|
||||
|
||||
* These are intent or implementation preferences that cannot be derived from exploration.
|
||||
* Provide 2–4 mutually exclusive options + a recommended default.
|
||||
* If unanswered, proceed with the recommended option and record it as an assumption in the final plan.
|
||||
2) Preferences/tradeoffs (not discoverable): ask early.
|
||||
- Provide 2–4 mutually exclusive options + a recommended default.
|
||||
- If unanswered, proceed with the recommended option and record it as an assumption in the final plan.
|
||||
|
||||
## Finalization rule
|
||||
|
||||
Only output the final plan when it is decision complete and leaves no decisions to the implementer.
|
||||
|
||||
The final plan must be plan-only and include:
|
||||
|
||||
* A clear title
|
||||
* Exact file paths to change
|
||||
* Exact structures or shapes to introduce or modify
|
||||
* Exact function, method, type, and variable names and signatures
|
||||
* Test cases
|
||||
* Explicit assumptions and defaults chosen where needed
|
||||
|
||||
Do not ask "should I proceed?" in the final output.
|
||||
|
||||
Only produce the final answer when you are presenting the complete spec.
|
||||
Only output the final plan when remaining unknowns are low‑impact and explicitly listed as assumptions.
|
||||
Final output must be plan‑only with a good title (no “should I proceed?”).
|
||||
@@ -494,13 +494,14 @@ pub fn ev_reasoning_text_delta(delta: &str) -> Value {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn ev_web_search_call_added_partial(id: &str, status: &str) -> Value {
|
||||
pub fn ev_web_search_call_added(id: &str, status: &str, query: &str) -> Value {
|
||||
serde_json::json!({
|
||||
"type": "response.output_item.added",
|
||||
"item": {
|
||||
"type": "web_search_call",
|
||||
"id": id,
|
||||
"status": status
|
||||
"status": status,
|
||||
"action": {"type": "search", "query": query}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -57,7 +57,6 @@ pub struct TestCodexBuilder {
|
||||
config_mutators: Vec<Box<ConfigMutator>>,
|
||||
auth: CodexAuth,
|
||||
pre_build_hooks: Vec<Box<PreBuildHook>>,
|
||||
home: Option<Arc<TempDir>>,
|
||||
}
|
||||
|
||||
impl TestCodexBuilder {
|
||||
@@ -89,16 +88,8 @@ impl TestCodexBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_home(mut self, home: Arc<TempDir>) -> Self {
|
||||
self.home = Some(home);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn build(&mut self, server: &wiremock::MockServer) -> anyhow::Result<TestCodex> {
|
||||
let home = match self.home.clone() {
|
||||
Some(home) => home,
|
||||
None => Arc::new(TempDir::new()?),
|
||||
};
|
||||
let home = Arc::new(TempDir::new()?);
|
||||
self.build_with_home(server, home, None).await
|
||||
}
|
||||
|
||||
@@ -107,10 +98,7 @@ impl TestCodexBuilder {
|
||||
server: &StreamingSseServer,
|
||||
) -> anyhow::Result<TestCodex> {
|
||||
let base_url = server.uri();
|
||||
let home = match self.home.clone() {
|
||||
Some(home) => home,
|
||||
None => Arc::new(TempDir::new()?),
|
||||
};
|
||||
let home = Arc::new(TempDir::new()?);
|
||||
self.build_with_home_and_base_url(format!("{base_url}/v1"), home, None)
|
||||
.await
|
||||
}
|
||||
@@ -120,10 +108,7 @@ impl TestCodexBuilder {
|
||||
server: &WebSocketTestServer,
|
||||
) -> anyhow::Result<TestCodex> {
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let home = match self.home.clone() {
|
||||
Some(home) => home,
|
||||
None => Arc::new(TempDir::new()?),
|
||||
};
|
||||
let home = Arc::new(TempDir::new()?);
|
||||
let base_url_clone = base_url.clone();
|
||||
self.config_mutators.push(Box::new(move |config| {
|
||||
config.model_provider.base_url = Some(base_url_clone);
|
||||
@@ -447,6 +432,5 @@ pub fn test_codex() -> TestCodexBuilder {
|
||||
config_mutators: vec![],
|
||||
auth: CodexAuth::from_api_key("dummy"),
|
||||
pre_build_hooks: vec![],
|
||||
home: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -264,7 +264,7 @@ async fn responses_stream_includes_web_search_eligible_header_false_when_disable
|
||||
|
||||
let test = test_codex()
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = WebSearchMode::Disabled;
|
||||
config.web_search_mode = Some(WebSearchMode::Disabled);
|
||||
})
|
||||
.build(&server)
|
||||
.await
|
||||
|
||||
@@ -257,19 +257,31 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
// Configure Codex to resume from our file
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home.clone())
|
||||
.with_config(|config| {
|
||||
// Ensure user instructions are NOT delivered on resume.
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
});
|
||||
let test = builder
|
||||
.resume(&server, codex_home, session_path.clone())
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
// Also configure user instructions to ensure they are NOT delivered on resume.
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let auth_manager =
|
||||
codex_core::AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
session_configured,
|
||||
..
|
||||
} = thread_manager
|
||||
.resume_thread_from_rollout(config, session_path.clone(), auth_manager)
|
||||
.await
|
||||
.expect("resume conversation");
|
||||
let codex = test.codex.clone();
|
||||
let session_configured = test.session_configured;
|
||||
|
||||
// 1) Assert initial_messages only includes existing EventMsg entries; response items are not converted
|
||||
let initial_msgs = session_configured
|
||||
@@ -355,13 +367,30 @@ async fn includes_conversation_id_and_model_headers_in_request() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let mut builder = test_codex().with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
let test = builder
|
||||
.build(&server)
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
thread_id: session_id,
|
||||
session_configured: _,
|
||||
..
|
||||
} = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
let codex = test.codex.clone();
|
||||
let session_id = test.session_configured.session_id;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -396,16 +425,26 @@ async fn includes_base_instructions_override_in_request() {
|
||||
let server = MockServer::start().await;
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(|config| {
|
||||
config.base_instructions = Some("test instructions".to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
|
||||
config.base_instructions = Some("test instructions".to_string());
|
||||
config.model_provider = model_provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -440,19 +479,29 @@ async fn chatgpt_auth_sends_correct_request() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let mut model_provider = built_in_model_providers()["openai"].clone();
|
||||
model_provider.base_url = Some(format!("{}/api/codex", server.uri()));
|
||||
let mut builder = test_codex()
|
||||
.with_auth(create_dummy_codex_auth())
|
||||
.with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/api/codex", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
create_dummy_codex_auth(),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
thread_id,
|
||||
session_configured: _,
|
||||
..
|
||||
} = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation");
|
||||
let codex = test.codex.clone();
|
||||
let thread_id = test.session_configured.session_id;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -568,16 +617,26 @@ async fn includes_user_instructions_message_in_request() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(|config| {
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -630,7 +689,12 @@ async fn skills_append_to_instructions() {
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let skill_dir = codex_home.path().join("skills/demo");
|
||||
std::fs::create_dir_all(&skill_dir).expect("create skill dir");
|
||||
std::fs::write(
|
||||
@@ -639,18 +703,20 @@ async fn skills_append_to_instructions() {
|
||||
)
|
||||
.expect("write skill");
|
||||
|
||||
let codex_home_path = codex_home.path().to_path_buf();
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home.clone())
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(move |config| {
|
||||
config.cwd = codex_home_path;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.cwd = codex_home.path().to_path_buf();
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1065,17 +1131,28 @@ async fn includes_developer_instructions_message_in_request() {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
let resp_mock = mount_sse_once(&server, sse_completed("resp1")).await;
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("Test API Key"))
|
||||
.with_config(|config| {
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
config.developer_instructions = Some("be useful".to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.user_instructions = Some("be nice".to_string());
|
||||
config.developer_instructions = Some("be useful".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1211,9 +1288,9 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
prompt.input.push(ResponseItem::WebSearchCall {
|
||||
id: Some("web-search-id".into()),
|
||||
status: Some("completed".into()),
|
||||
action: Some(WebSearchAction::Search {
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("weather".into()),
|
||||
}),
|
||||
},
|
||||
});
|
||||
prompt.input.push(ResponseItem::FunctionCall {
|
||||
id: Some("function-id".into()),
|
||||
@@ -1313,16 +1390,20 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
let mut provider = built_in_model_providers()["openai"].clone();
|
||||
provider.base_url = Some(format!("{}/v1", server.uri()));
|
||||
|
||||
let mut builder = test_codex()
|
||||
.with_auth(CodexAuth::from_api_key("test"))
|
||||
.with_config(move |config| {
|
||||
config.model_provider = provider;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("test"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1672,16 +1753,20 @@ async fn azure_overrides_assign_properties_used_for_responses_url() {
|
||||
};
|
||||
|
||||
// Init session
|
||||
let mut builder = test_codex()
|
||||
.with_auth(create_dummy_codex_auth())
|
||||
.with_config(move |config| {
|
||||
config.model_provider = provider;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
create_dummy_codex_auth(),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1752,16 +1837,20 @@ async fn env_var_overrides_loaded_auth() {
|
||||
};
|
||||
|
||||
// Init session
|
||||
let mut builder = test_codex()
|
||||
.with_auth(create_dummy_codex_auth())
|
||||
.with_config(move |config| {
|
||||
config.model_provider = provider;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
create_dummy_codex_auth(),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1816,12 +1905,26 @@ async fn history_dedupes_streamed_and_final_messages_across_turns() {
|
||||
|
||||
let request_log = mount_sse_sequence(&server, vec![sse1.clone(), sse1.clone(), sse1]).await;
|
||||
|
||||
let mut builder = test_codex().with_auth(CodexAuth::from_api_key("Test API Key"));
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
// Configure provider to point to mock server (Responses API) and use API key auth.
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
// Init session with isolated codex home.
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider_and_home(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
config.codex_home.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.expect("create new conversation");
|
||||
|
||||
// Turn 1: user sends U1; wait for completion.
|
||||
codex
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#![allow(clippy::expect_used)]
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::compact::SUMMARIZATION_PROMPT;
|
||||
use codex_core::compact::SUMMARY_PREFIX;
|
||||
@@ -15,6 +17,7 @@ use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses::ev_local_shell_call;
|
||||
use core_test_support::responses::ev_reasoning_item;
|
||||
use core_test_support::skip_if_no_network;
|
||||
@@ -22,6 +25,7 @@ use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use core_test_support::wait_for_event_match;
|
||||
use std::collections::VecDeque;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
@@ -136,14 +140,21 @@ async fn summarize_context_three_requests_and_instructions() {
|
||||
|
||||
// Build config pointing to the mock server and spawn Codex.
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let test = builder.build(&server).await.unwrap();
|
||||
let codex = test.codex.clone();
|
||||
let rollout_path = test.session_configured.rollout_path.expect("rollout path");
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
session_configured,
|
||||
..
|
||||
} = thread_manager.start_thread(config).await.unwrap();
|
||||
let rollout_path = session_configured.rollout_path.expect("rollout path");
|
||||
|
||||
// 1) Normal user input – should hit server once.
|
||||
codex
|
||||
@@ -327,15 +338,20 @@ async fn manual_compact_uses_custom_prompt() {
|
||||
let custom_prompt = "Use this compact prompt instead";
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
config.compact_prompt = Some(custom_prompt.to_string());
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.compact_prompt = Some(custom_prompt.to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.codex;
|
||||
.thread;
|
||||
|
||||
codex.submit(Op::Compact).await.expect("trigger compact");
|
||||
let warning_event = wait_for_event(&codex, |ev| matches!(ev, EventMsg::Warning(_))).await;
|
||||
@@ -398,11 +414,16 @@ async fn manual_compact_emits_api_and_local_token_usage_events() {
|
||||
mount_sse_once(&server, sse_compact).await;
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager.start_thread(config).await.unwrap();
|
||||
|
||||
// Trigger manual compact and collect TokenCount events for the compact turn.
|
||||
codex.submit(Op::Compact).await.unwrap();
|
||||
@@ -1018,12 +1039,16 @@ async fn auto_compact_runs_after_token_limit_hit() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager.start_thread(config).await.unwrap().thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1354,14 +1379,20 @@ async fn auto_compact_persists_rollout_entries() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let test = builder.build(&server).await.unwrap();
|
||||
let codex = test.codex.clone();
|
||||
let session_configured = test.session_configured;
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread {
|
||||
thread: codex,
|
||||
session_configured,
|
||||
..
|
||||
} = thread_manager.start_thread(config).await.unwrap();
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1466,12 +1497,19 @@ async fn manual_compact_retries_after_context_window_error() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200_000);
|
||||
let codex = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
)
|
||||
.start_thread(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1594,11 +1632,18 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
let codex = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
)
|
||||
.start_thread(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
@@ -1655,11 +1700,12 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
|
||||
&& item
|
||||
.get("content")
|
||||
.and_then(|v| v.as_array())
|
||||
.is_some_and(|arr| {
|
||||
.map(|arr| {
|
||||
arr.iter().any(|entry| {
|
||||
entry.get("text").and_then(|v| v.as_str()) == Some(expected)
|
||||
})
|
||||
})
|
||||
.unwrap_or(false)
|
||||
})
|
||||
};
|
||||
|
||||
@@ -1797,12 +1843,16 @@ async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_auto_compact_token_limit = Some(200);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_auto_compact_token_limit = Some(200);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager.start_thread(config).await.unwrap().thread;
|
||||
|
||||
let mut auto_compact_lifecycle_events = Vec::new();
|
||||
for user in [MULTI_AUTO_MSG, follow_up_user, final_user] {
|
||||
@@ -1904,13 +1954,21 @@ async fn auto_compact_triggers_after_function_call_over_95_percent_usage() {
|
||||
|
||||
let model_provider = non_openai_model_provider(&server);
|
||||
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(config);
|
||||
config.model_context_window = Some(context_window);
|
||||
config.model_auto_compact_token_limit = Some(limit);
|
||||
});
|
||||
let codex = builder.build(&server).await.unwrap().codex;
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
set_test_compact_prompt(&mut config);
|
||||
config.model_context_window = Some(context_window);
|
||||
config.model_auto_compact_token_limit = Some(limit);
|
||||
|
||||
let codex = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
)
|
||||
.start_thread(config)
|
||||
.await
|
||||
.unwrap()
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::UserInput {
|
||||
|
||||
@@ -10,8 +10,12 @@
|
||||
use super::compact::COMPACT_WARNING_MESSAGE;
|
||||
use super::compact::FIRST_REPLY;
|
||||
use super::compact::SUMMARY_TEXT;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::compact::SUMMARIZATION_PROMPT;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -19,12 +23,12 @@ use codex_core::protocol::Op;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses::ResponseMock;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::mount_sse_once_match;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::Value;
|
||||
@@ -95,7 +99,8 @@ fn extract_summary_message(request: &Value, summary_text: &str) -> Value {
|
||||
.and_then(|arr| arr.first())
|
||||
.and_then(|entry| entry.get("text"))
|
||||
.and_then(Value::as_str)
|
||||
.is_some_and(|text| text.contains(summary_text))
|
||||
.map(|text| text.contains(summary_text))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
})
|
||||
.cloned()
|
||||
@@ -112,18 +117,21 @@ fn normalize_compact_prompts(requests: &mut [Value]) {
|
||||
{
|
||||
return true;
|
||||
}
|
||||
let Some(content) = item.get("content").and_then(Value::as_array) else {
|
||||
return false;
|
||||
};
|
||||
let Some(first) = content.first() else {
|
||||
return false;
|
||||
};
|
||||
let text = first
|
||||
.get("text")
|
||||
.and_then(Value::as_str)
|
||||
let content = item
|
||||
.get("content")
|
||||
.and_then(Value::as_array)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let normalized_text = normalize_line_endings_str(text);
|
||||
!(text.is_empty() || normalized_text == normalized_summary_prompt)
|
||||
if let Some(first) = content.first() {
|
||||
let text = first
|
||||
.get("text")
|
||||
.and_then(Value::as_str)
|
||||
.unwrap_or_default();
|
||||
let normalized_text = normalize_line_endings_str(text);
|
||||
!(text.is_empty() || normalized_text == normalized_summary_prompt)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -866,7 +874,9 @@ fn gather_request_bodies(request_log: &[ResponseMock]) -> Vec<Value> {
|
||||
.flat_map(ResponseMock::requests)
|
||||
.map(|request| request.body_json())
|
||||
.collect::<Vec<_>>();
|
||||
bodies.iter_mut().for_each(normalize_line_endings);
|
||||
for body in &mut bodies {
|
||||
normalize_line_endings(body);
|
||||
}
|
||||
bodies
|
||||
}
|
||||
|
||||
@@ -950,19 +960,29 @@ async fn mount_second_compact_flow(server: &MockServer) -> Vec<ResponseMock> {
|
||||
async fn start_test_conversation(
|
||||
server: &MockServer,
|
||||
model: Option<&str>,
|
||||
) -> (Arc<TempDir>, Config, Arc<ThreadManager>, Arc<CodexThread>) {
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let model = model.map(str::to_string);
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.model_provider.name = "Non-OpenAI Model provider".to_string();
|
||||
config.model_provider.base_url = Some(base_url);
|
||||
config.compact_prompt = Some(SUMMARIZATION_PROMPT.to_string());
|
||||
if let Some(model) = model {
|
||||
config.model = Some(model);
|
||||
}
|
||||
});
|
||||
let test = builder.build(server).await.expect("create conversation");
|
||||
(test.home, test.config, test.thread_manager, test.codex)
|
||||
) -> (TempDir, Config, ThreadManager, Arc<CodexThread>) {
|
||||
let model_provider = ModelProviderInfo {
|
||||
name: "Non-OpenAI Model provider".into(),
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let home = TempDir::new().expect("create temp dir");
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider;
|
||||
config.compact_prompt = Some(SUMMARIZATION_PROMPT.to_string());
|
||||
if let Some(model) = model {
|
||||
config.model = Some(model.to_string());
|
||||
}
|
||||
let manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread, .. } = manager
|
||||
.start_thread(config.clone())
|
||||
.await
|
||||
.expect("create conversation");
|
||||
|
||||
(home, config, manager, thread)
|
||||
}
|
||||
|
||||
async fn user_turn(conversation: &Arc<CodexThread>, text: &str) {
|
||||
@@ -1001,14 +1021,13 @@ async fn resume_conversation(
|
||||
config: &Config,
|
||||
path: std::path::PathBuf,
|
||||
) -> Arc<CodexThread> {
|
||||
let auth_manager = codex_core::AuthManager::from_auth_for_testing(
|
||||
codex_core::CodexAuth::from_api_key("dummy"),
|
||||
);
|
||||
manager
|
||||
let auth_manager =
|
||||
codex_core::AuthManager::from_auth_for_testing(CodexAuth::from_api_key("dummy"));
|
||||
let NewThread { thread, .. } = manager
|
||||
.resume_thread_from_rollout(config.clone(), path, auth_manager)
|
||||
.await
|
||||
.expect("resume conversation")
|
||||
.thread
|
||||
.expect("resume conversation");
|
||||
thread
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -1018,9 +1037,9 @@ async fn fork_thread(
|
||||
path: std::path::PathBuf,
|
||||
nth_user_message: usize,
|
||||
) -> Arc<CodexThread> {
|
||||
manager
|
||||
let NewThread { thread, .. } = manager
|
||||
.fork_thread(nth_user_message, config.clone(), path)
|
||||
.await
|
||||
.expect("fork conversation")
|
||||
.thread
|
||||
.expect("fork conversation");
|
||||
thread
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ async fn emits_deprecation_notice_for_legacy_feature_flag() -> anyhow::Result<()
|
||||
assert_eq!(
|
||||
details.as_deref(),
|
||||
Some(
|
||||
"Enable it with `--enable unified_exec` or `[features].unified_exec` in config.toml. See https://github.com/openai/codex/blob/main/docs/config.md#feature-flags for details."
|
||||
"Enable it with `--enable unified_exec` or `[features].unified_exec` in config.toml. See https://developers.openai.com/codex/config-advanced/ for details."
|
||||
),
|
||||
);
|
||||
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::parse_turn_item;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
@@ -6,9 +10,10 @@ use codex_core::protocol::RolloutItem;
|
||||
use codex_core::protocol::RolloutLine;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
@@ -39,11 +44,25 @@ async fn fork_thread_twice_drops_to_first_message() {
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex();
|
||||
let test = builder.build(&server).await.expect("create conversation");
|
||||
let codex = test.codex.clone();
|
||||
let thread_manager = test.thread_manager.clone();
|
||||
let config_for_fork = test.config.clone();
|
||||
// Configure Codex to use the mock server.
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.model_provider = model_provider.clone();
|
||||
let config_for_fork = config.clone();
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation");
|
||||
|
||||
// Send three user messages; wait for three completed turns.
|
||||
for text in ["first", "second", "third"] {
|
||||
|
||||
@@ -6,7 +6,6 @@ use codex_core::protocol::ItemCompletedEvent;
|
||||
use codex_core::protocol::ItemStartedEvent;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::user_input::ByteRange;
|
||||
use codex_protocol::user_input::TextElement;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
@@ -19,7 +18,7 @@ use core_test_support::responses::ev_reasoning_item_added;
|
||||
use core_test_support::responses::ev_reasoning_summary_text_delta;
|
||||
use core_test_support::responses::ev_reasoning_text_delta;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::ev_web_search_call_added_partial;
|
||||
use core_test_support::responses::ev_web_search_call_added;
|
||||
use core_test_support::responses::ev_web_search_call_done;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::sse;
|
||||
@@ -209,7 +208,8 @@ async fn web_search_item_is_emitted() -> anyhow::Result<()> {
|
||||
|
||||
let TestCodex { codex, .. } = test_codex().build(&server).await?;
|
||||
|
||||
let web_search_added = ev_web_search_call_added_partial("web-search-1", "in_progress");
|
||||
let web_search_added =
|
||||
ev_web_search_call_added("web-search-1", "in_progress", "weather seattle");
|
||||
let web_search_done = ev_web_search_call_done("web-search-1", "completed", "weather seattle");
|
||||
|
||||
let first_response = sse(vec![
|
||||
@@ -230,8 +230,11 @@ async fn web_search_item_is_emitted() -> anyhow::Result<()> {
|
||||
})
|
||||
.await?;
|
||||
|
||||
let begin = wait_for_event_match(&codex, |ev| match ev {
|
||||
EventMsg::WebSearchBegin(event) => Some(event.clone()),
|
||||
let started = wait_for_event_match(&codex, |ev| match ev {
|
||||
EventMsg::ItemStarted(ItemStartedEvent {
|
||||
item: TurnItem::WebSearch(item),
|
||||
..
|
||||
}) => Some(item.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.await;
|
||||
@@ -244,14 +247,8 @@ async fn web_search_item_is_emitted() -> anyhow::Result<()> {
|
||||
})
|
||||
.await;
|
||||
|
||||
assert_eq!(begin.call_id, "web-search-1");
|
||||
assert_eq!(completed.id, begin.call_id);
|
||||
assert_eq!(
|
||||
completed.action,
|
||||
WebSearchAction::Search {
|
||||
query: Some("weather seattle".to_string()),
|
||||
}
|
||||
);
|
||||
assert_eq!(started.id, completed.id);
|
||||
assert_eq!(completed.query, "weather seattle");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -74,7 +74,6 @@ mod tools;
|
||||
mod truncation;
|
||||
mod undo;
|
||||
mod unified_exec;
|
||||
mod unstable_features_warning;
|
||||
mod user_notification;
|
||||
mod user_shell_cmd;
|
||||
mod view_image;
|
||||
|
||||
@@ -1,28 +1,36 @@
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
const CONFIG_TOML: &str = "config.toml";
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_turn_context_does_not_persist_when_config_exists() {
|
||||
let server = start_mock_server().await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let config_path = codex_home.path().join(CONFIG_TOML);
|
||||
let initial_contents = "model = \"gpt-4o\"\n";
|
||||
let mut builder = test_codex()
|
||||
.with_pre_build_hook(move |home| {
|
||||
let config_path = home.join(CONFIG_TOML);
|
||||
std::fs::write(config_path, initial_contents).expect("seed config.toml");
|
||||
})
|
||||
.with_config(|config| {
|
||||
config.model = Some("gpt-4o".to_string());
|
||||
});
|
||||
let test = builder.build(&server).await.expect("create conversation");
|
||||
let codex = test.codex.clone();
|
||||
let config_path = test.home.path().join(CONFIG_TOML);
|
||||
tokio::fs::write(&config_path, initial_contents)
|
||||
.await
|
||||
.expect("seed config.toml");
|
||||
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model = Some("gpt-4o".to_string());
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
@@ -49,16 +57,25 @@ async fn override_turn_context_does_not_persist_when_config_exists() {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_turn_context_does_not_create_config_file() {
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex();
|
||||
let test = builder.build(&server).await.expect("create conversation");
|
||||
let codex = test.codex.clone();
|
||||
let config_path = test.home.path().join(CONFIG_TOML);
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let config_path = codex_home.path().join(CONFIG_TOML);
|
||||
assert!(
|
||||
!config_path.exists(),
|
||||
"test setup should start without config"
|
||||
);
|
||||
|
||||
let config = load_default_config_for_test(&codex_home).await;
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let codex = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.thread;
|
||||
|
||||
codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
|
||||
@@ -38,7 +38,7 @@ async fn collect_tool_identifiers_for_model(model: &str) -> Vec<String> {
|
||||
.with_model(model)
|
||||
// Keep tool expectations stable when the default web_search mode changes.
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
config.features.enable(Feature::CollaborationModes);
|
||||
});
|
||||
let test = builder
|
||||
|
||||
@@ -92,7 +92,7 @@ async fn prompt_tools_are_consistent_across_requests() -> anyhow::Result<()> {
|
||||
config.user_instructions = Some("be consistent and helpful".to_string());
|
||||
config.model = Some("gpt-5.1-codex-max".to_string());
|
||||
// Keep tool expectations stable when the default web_search mode changes.
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
config.features.enable(Feature::CollaborationModes);
|
||||
})
|
||||
.build(&server)
|
||||
|
||||
@@ -286,19 +286,6 @@ async fn request_user_input_rejected_in_execute_mode() -> anyhow::Result<()> {
|
||||
.await
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn request_user_input_rejected_in_code_mode() -> anyhow::Result<()> {
|
||||
assert_request_user_input_rejected("Code", |model| CollaborationMode {
|
||||
mode: ModeKind::Code,
|
||||
settings: Settings {
|
||||
model,
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
},
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn request_user_input_rejected_in_custom_mode() -> anyhow::Result<()> {
|
||||
assert_request_user_input_rejected("Custom", |model| CollaborationMode {
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::ContentItem;
|
||||
use codex_core::ModelProviderInfo;
|
||||
use codex_core::REVIEW_PROMPT;
|
||||
use codex_core::ResponseItem;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::protocol::ENVIRONMENT_CONTEXT_OPEN_TAG;
|
||||
use codex_core::protocol::EventMsg;
|
||||
@@ -17,11 +21,11 @@ use codex_core::protocol::RolloutItem;
|
||||
use codex_core::protocol::RolloutLine;
|
||||
use codex_core::review_format::render_review_output_text;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::load_sse_fixture_with_id_from_str;
|
||||
use core_test_support::responses::ResponseMock;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
@@ -69,8 +73,8 @@ async fn review_op_emits_lifecycle_and_review_output() {
|
||||
let review_json_escaped = serde_json::to_string(&review_json).unwrap();
|
||||
let sse_raw = sse_template.replace("__REVIEW__", &review_json_escaped);
|
||||
let (server, _request_log) = start_responses_server_with_sse(&sse_raw, 1).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
|
||||
// Submit review request.
|
||||
codex
|
||||
@@ -170,7 +174,6 @@ async fn review_op_emits_lifecycle_and_review_output() {
|
||||
"assistant review output contains user_action markup"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -191,8 +194,8 @@ async fn review_op_with_plain_text_emits_review_fallback() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, _request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
|
||||
codex
|
||||
.submit(Op::Review {
|
||||
@@ -223,7 +226,6 @@ async fn review_op_with_plain_text_emits_review_fallback() {
|
||||
assert_eq!(expected, review);
|
||||
let _complete = wait_for_event(&codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -252,8 +254,8 @@ async fn review_filters_agent_message_related_events() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, _request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
|
||||
codex
|
||||
.submit(Op::Review {
|
||||
@@ -293,7 +295,6 @@ async fn review_filters_agent_message_related_events() {
|
||||
.await;
|
||||
assert!(saw_entered && saw_exited, "missing review lifecycle events");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -334,8 +335,8 @@ async fn review_does_not_emit_agent_message_on_structured_output() {
|
||||
let review_json_escaped = serde_json::to_string(&review_json).unwrap();
|
||||
let sse_raw = sse_template.replace("__REVIEW__", &review_json_escaped);
|
||||
let (server, _request_log) = start_responses_server_with_sse(&sse_raw, 1).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
|
||||
codex
|
||||
.submit(Op::Review {
|
||||
@@ -374,7 +375,6 @@ async fn review_does_not_emit_agent_message_on_structured_output() {
|
||||
assert_eq!(1, agent_messages, "expected exactly one AgentMessage event");
|
||||
assert!(saw_entered && saw_exited, "missing review lifecycle events");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -389,9 +389,9 @@ async fn review_uses_custom_review_model_from_config() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
// Choose a review model different from the main model; ensure it is used.
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |cfg| {
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |cfg| {
|
||||
cfg.model = Some("gpt-4.1".to_string());
|
||||
cfg.review_model = Some("gpt-5.1".to_string());
|
||||
})
|
||||
@@ -428,7 +428,6 @@ async fn review_uses_custom_review_model_from_config() {
|
||||
let body = request.body_json();
|
||||
assert_eq!(body["model"].as_str().unwrap(), "gpt-5.1");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -443,8 +442,8 @@ async fn review_uses_session_model_when_review_model_unset() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |cfg| {
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |cfg| {
|
||||
cfg.model = Some("gpt-4.1".to_string());
|
||||
cfg.review_model = None;
|
||||
})
|
||||
@@ -479,7 +478,6 @@ async fn review_uses_session_model_when_review_model_unset() {
|
||||
let body = request.body_json();
|
||||
assert_eq!(body["model"].as_str().unwrap(), "gpt-4.1");
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -499,7 +497,12 @@ async fn review_input_isolated_from_parent_history() {
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 1).await;
|
||||
|
||||
// Seed a parent session history via resume file with both user + assistant items.
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
|
||||
let session_file = codex_home.path().join("resume.jsonl");
|
||||
{
|
||||
@@ -561,8 +564,7 @@ async fn review_input_isolated_from_parent_history() {
|
||||
.unwrap();
|
||||
}
|
||||
let codex =
|
||||
resume_conversation_for_server(&server, codex_home.clone(), session_file.clone(), |_| {})
|
||||
.await;
|
||||
resume_conversation_for_server(&server, &codex_home, session_file.clone(), |_| {}).await;
|
||||
|
||||
// Submit review request; it must start fresh (no parent history in `input`).
|
||||
let review_prompt = "Please review only this".to_string();
|
||||
@@ -655,7 +657,6 @@ async fn review_input_isolated_from_parent_history() {
|
||||
"expected user interruption message in rollout"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -674,8 +675,8 @@ async fn review_history_surfaces_in_parent_session() {
|
||||
{"type":"response.completed", "response": {"id": "__ID__"}}
|
||||
]"#;
|
||||
let (server, request_log) = start_responses_server_with_sse(sse_raw, 2).await;
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), |_| {}).await;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |_| {}).await;
|
||||
|
||||
// 1) Run a review turn that produces an assistant message (isolated in child).
|
||||
codex
|
||||
@@ -754,7 +755,6 @@ async fn review_history_surfaces_in_parent_session() {
|
||||
"review assistant output missing from parent turn input"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -807,10 +807,9 @@ async fn review_uses_overridden_cwd_for_base_branch_merge_base() {
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let codex_home = Arc::new(TempDir::new().unwrap());
|
||||
let initial_cwd_path = initial_cwd.path().to_path_buf();
|
||||
let codex = new_conversation_for_server(&server, codex_home.clone(), move |config| {
|
||||
config.cwd = initial_cwd_path;
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let codex = new_conversation_for_server(&server, &codex_home, |config| {
|
||||
config.cwd = initial_cwd.path().to_path_buf();
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -860,7 +859,6 @@ async fn review_uses_overridden_cwd_for_base_branch_merge_base() {
|
||||
"expected review prompt to include merge-base sha {head_sha}"
|
||||
);
|
||||
|
||||
let _codex_home_guard = codex_home;
|
||||
server.verify().await;
|
||||
}
|
||||
|
||||
@@ -880,47 +878,57 @@ async fn start_responses_server_with_sse(
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn new_conversation_for_server<F>(
|
||||
server: &MockServer,
|
||||
codex_home: Arc<TempDir>,
|
||||
codex_home: &TempDir,
|
||||
mutator: F,
|
||||
) -> Arc<CodexThread>
|
||||
where
|
||||
F: FnOnce(&mut Config) + Send + 'static,
|
||||
F: FnOnce(&mut Config),
|
||||
{
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home)
|
||||
.with_config(move |config| {
|
||||
config.model_provider.base_url = Some(base_url.clone());
|
||||
mutator(config);
|
||||
});
|
||||
builder
|
||||
.build(server)
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let mut config = load_default_config_for_test(codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
mutator(&mut config);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create conversation")
|
||||
.codex
|
||||
.thread
|
||||
}
|
||||
|
||||
/// Create a conversation resuming from a rollout file, configured to talk to the provided mock server.
|
||||
#[expect(clippy::expect_used)]
|
||||
async fn resume_conversation_for_server<F>(
|
||||
server: &MockServer,
|
||||
codex_home: Arc<TempDir>,
|
||||
codex_home: &TempDir,
|
||||
resume_path: std::path::PathBuf,
|
||||
mutator: F,
|
||||
) -> Arc<CodexThread>
|
||||
where
|
||||
F: FnOnce(&mut Config) + Send + 'static,
|
||||
F: FnOnce(&mut Config),
|
||||
{
|
||||
let base_url = format!("{}/v1", server.uri());
|
||||
let mut builder = test_codex()
|
||||
.with_home(codex_home.clone())
|
||||
.with_config(move |config| {
|
||||
config.model_provider.base_url = Some(base_url.clone());
|
||||
mutator(config);
|
||||
});
|
||||
builder
|
||||
.resume(server, codex_home, resume_path)
|
||||
let model_provider = ModelProviderInfo {
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
..built_in_model_providers()["openai"].clone()
|
||||
};
|
||||
let mut config = load_default_config_for_test(codex_home).await;
|
||||
config.model_provider = model_provider;
|
||||
mutator(&mut config);
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("Test API Key"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let auth_manager =
|
||||
codex_core::AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
|
||||
thread_manager
|
||||
.resume_thread_from_rollout(config, resume_path, auth_manager)
|
||||
.await
|
||||
.expect("resume conversation")
|
||||
.codex
|
||||
.thread
|
||||
}
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
#![allow(clippy::unwrap_used, clippy::expect_used)]
|
||||
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::CONFIG_TOML_FILE;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::InitialHistory;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use core::time::Duration;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::wait_for_event;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
use toml::toml;
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn emits_warning_when_unstable_features_enabled_via_config() {
|
||||
let home = TempDir::new().expect("tempdir");
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.features.enable(Feature::ChildAgentsMd);
|
||||
let user_config_path =
|
||||
AbsolutePathBuf::from_absolute_path(config.codex_home.join(CONFIG_TOML_FILE))
|
||||
.expect("absolute user config path");
|
||||
config.config_layer_stack = config.config_layer_stack.with_user_config(
|
||||
&user_config_path,
|
||||
toml! { features = { child_agents_md = true } }.into(),
|
||||
);
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("test"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
|
||||
|
||||
let NewThread {
|
||||
thread: conversation,
|
||||
..
|
||||
} = thread_manager
|
||||
.resume_thread_with_history(config, InitialHistory::New, auth_manager)
|
||||
.await
|
||||
.expect("spawn conversation");
|
||||
|
||||
let warning = wait_for_event(&conversation, |ev| matches!(ev, EventMsg::Warning(_))).await;
|
||||
let EventMsg::Warning(WarningEvent { message }) = warning else {
|
||||
panic!("expected warning event");
|
||||
};
|
||||
assert!(message.contains("child_agents_md"));
|
||||
assert!(message.contains("Under-development features enabled"));
|
||||
assert!(message.contains("suppress_unstable_features_warning = true"));
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn suppresses_warning_when_configured() {
|
||||
let home = TempDir::new().expect("tempdir");
|
||||
let mut config = load_default_config_for_test(&home).await;
|
||||
config.features.enable(Feature::ChildAgentsMd);
|
||||
config.suppress_unstable_features_warning = true;
|
||||
let user_config_path =
|
||||
AbsolutePathBuf::from_absolute_path(config.codex_home.join(CONFIG_TOML_FILE))
|
||||
.expect("absolute user config path");
|
||||
config.config_layer_stack = config.config_layer_stack.with_user_config(
|
||||
&user_config_path,
|
||||
toml! { features = { child_agents_md = true } }.into(),
|
||||
);
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("test"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
|
||||
|
||||
let NewThread {
|
||||
thread: conversation,
|
||||
..
|
||||
} = thread_manager
|
||||
.resume_thread_with_history(config, InitialHistory::New, auth_manager)
|
||||
.await
|
||||
.expect("spawn conversation");
|
||||
|
||||
let warning = timeout(
|
||||
Duration::from_millis(150),
|
||||
wait_for_event(&conversation, |ev| matches!(ev, EventMsg::Warning(_))),
|
||||
)
|
||||
.await;
|
||||
assert!(warning.is_err());
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
use anyhow::Context;
|
||||
use codex_core::NewThread;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::ExecCommandEndEvent;
|
||||
@@ -8,6 +10,7 @@ use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::TurnAbortReason;
|
||||
use core_test_support::assert_regex_match;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
@@ -35,17 +38,19 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
.await
|
||||
.expect("write temp file");
|
||||
|
||||
// Pin cwd to the temp dir so ls/cat operate there.
|
||||
let server = start_mock_server().await;
|
||||
let cwd_path = cwd.path().to_path_buf();
|
||||
let mut builder = test_codex().with_config(move |config| {
|
||||
config.cwd = cwd_path;
|
||||
});
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
// Load config and pin cwd to the temp dir so ls/cat operate there.
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.cwd = cwd.path().to_path_buf();
|
||||
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
codex_core::CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.expect("create new conversation");
|
||||
|
||||
// 1) shell command should list the file
|
||||
let list_cmd = "ls".to_string();
|
||||
@@ -92,13 +97,16 @@ async fn user_shell_cmd_ls_and_cat_in_temp_dir() {
|
||||
#[tokio::test]
|
||||
async fn user_shell_cmd_can_be_interrupted() {
|
||||
// Set up isolated config and conversation.
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex();
|
||||
let codex = builder
|
||||
.build(&server)
|
||||
let codex_home = TempDir::new().unwrap();
|
||||
let config = load_default_config_for_test(&codex_home).await;
|
||||
let thread_manager = ThreadManager::with_models_provider(
|
||||
codex_core::CodexAuth::from_api_key("dummy"),
|
||||
config.model_provider.clone(),
|
||||
);
|
||||
let NewThread { thread: codex, .. } = thread_manager
|
||||
.start_thread(config)
|
||||
.await
|
||||
.expect("create new conversation")
|
||||
.codex;
|
||||
.expect("create new conversation");
|
||||
|
||||
// Start a long-running command and then interrupt it.
|
||||
let sleep_cmd = "sleep 5".to_string();
|
||||
|
||||
@@ -35,7 +35,7 @@ async fn web_search_mode_cached_sets_external_web_access_false_in_request_body()
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5-codex")
|
||||
.with_config(|config| {
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
@@ -67,7 +67,7 @@ async fn web_search_mode_takes_precedence_over_legacy_flags_in_request_body() {
|
||||
.with_model("gpt-5-codex")
|
||||
.with_config(|config| {
|
||||
config.features.enable(Feature::WebSearchRequest);
|
||||
config.web_search_mode = WebSearchMode::Cached;
|
||||
config.web_search_mode = Some(WebSearchMode::Cached);
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use clap::Args;
|
||||
use clap::FromArgMatches;
|
||||
use clap::Parser;
|
||||
use clap::ValueEnum;
|
||||
use codex_common::CliConfigOverrides;
|
||||
@@ -110,22 +108,20 @@ pub enum Command {
|
||||
Review(ReviewArgs),
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
struct ResumeArgsRaw {
|
||||
// Note: This is the direct clap shape. We reinterpret the positional when --last is set
|
||||
// so "codex resume --last <prompt>" treats the positional as a prompt, not a session id.
|
||||
#[derive(Parser, Debug)]
|
||||
pub struct ResumeArgs {
|
||||
/// Conversation/session id (UUID). When provided, resumes this session.
|
||||
/// If omitted, use --last to pick the most recent recorded session.
|
||||
#[arg(value_name = "SESSION_ID")]
|
||||
session_id: Option<String>,
|
||||
pub session_id: Option<String>,
|
||||
|
||||
/// Resume the most recent recorded session (newest) without specifying an id.
|
||||
#[arg(long = "last", default_value_t = false)]
|
||||
last: bool,
|
||||
pub last: bool,
|
||||
|
||||
/// Show all sessions (disables cwd filtering).
|
||||
#[arg(long = "all", default_value_t = false)]
|
||||
all: bool,
|
||||
pub all: bool,
|
||||
|
||||
/// Optional image(s) to attach to the prompt sent after resuming.
|
||||
#[arg(
|
||||
@@ -135,72 +131,13 @@ struct ResumeArgsRaw {
|
||||
value_delimiter = ',',
|
||||
num_args = 1
|
||||
)]
|
||||
images: Vec<PathBuf>,
|
||||
|
||||
/// Prompt to send after resuming the session. If `-` is used, read from stdin.
|
||||
#[arg(value_name = "PROMPT", value_hint = clap::ValueHint::Other)]
|
||||
prompt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ResumeArgs {
|
||||
/// Conversation/session id (UUID). When provided, resumes this session.
|
||||
/// If omitted, use --last to pick the most recent recorded session.
|
||||
pub session_id: Option<String>,
|
||||
|
||||
/// Resume the most recent recorded session (newest) without specifying an id.
|
||||
pub last: bool,
|
||||
|
||||
/// Show all sessions (disables cwd filtering).
|
||||
pub all: bool,
|
||||
|
||||
/// Optional image(s) to attach to the prompt sent after resuming.
|
||||
pub images: Vec<PathBuf>,
|
||||
|
||||
/// Prompt to send after resuming the session. If `-` is used, read from stdin.
|
||||
#[arg(value_name = "PROMPT", value_hint = clap::ValueHint::Other)]
|
||||
pub prompt: Option<String>,
|
||||
}
|
||||
|
||||
impl From<ResumeArgsRaw> for ResumeArgs {
|
||||
fn from(raw: ResumeArgsRaw) -> Self {
|
||||
// When --last is used without an explicit prompt, treat the positional as the prompt
|
||||
// (clap can’t express this conditional positional meaning cleanly).
|
||||
let (session_id, prompt) = if raw.last && raw.prompt.is_none() {
|
||||
(None, raw.session_id)
|
||||
} else {
|
||||
(raw.session_id, raw.prompt)
|
||||
};
|
||||
Self {
|
||||
session_id,
|
||||
last: raw.last,
|
||||
all: raw.all,
|
||||
images: raw.images,
|
||||
prompt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Args for ResumeArgs {
|
||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
||||
ResumeArgsRaw::augment_args(cmd)
|
||||
}
|
||||
|
||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
||||
ResumeArgsRaw::augment_args_for_update(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromArgMatches for ResumeArgs {
|
||||
fn from_arg_matches(matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
|
||||
ResumeArgsRaw::from_arg_matches(matches).map(Self::from)
|
||||
}
|
||||
|
||||
fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> {
|
||||
*self = ResumeArgsRaw::from_arg_matches(matches).map(Self::from)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
pub struct ReviewArgs {
|
||||
/// Review staged, unstaged, and untracked changes.
|
||||
|
||||
@@ -32,7 +32,6 @@ use codex_core::protocol::TurnCompleteEvent;
|
||||
use codex_core::protocol::TurnDiffEvent;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::protocol::WebSearchEndEvent;
|
||||
use codex_core::web_search::web_search_detail;
|
||||
use codex_protocol::num_format::format_with_separators;
|
||||
use owo_colors::OwoColorize;
|
||||
use owo_colors::Style;
|
||||
@@ -371,20 +370,8 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
}
|
||||
}
|
||||
}
|
||||
EventMsg::WebSearchBegin(_) => {
|
||||
ts_msg!(self, "🌐 Searching the web...");
|
||||
}
|
||||
EventMsg::WebSearchEnd(WebSearchEndEvent {
|
||||
call_id: _,
|
||||
query,
|
||||
action,
|
||||
}) => {
|
||||
let detail = web_search_detail(Some(&action), &query);
|
||||
if detail.is_empty() {
|
||||
ts_msg!(self, "🌐 Searched the web");
|
||||
} else {
|
||||
ts_msg!(self, "🌐 Searched: {detail}");
|
||||
}
|
||||
EventMsg::WebSearchEnd(WebSearchEndEvent { call_id: _, query }) => {
|
||||
ts_msg!(self, "🌐 Searched: {query}");
|
||||
}
|
||||
EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
|
||||
call_id,
|
||||
@@ -750,7 +737,8 @@ impl EventProcessor for EventProcessorWithHumanOutput {
|
||||
);
|
||||
}
|
||||
EventMsg::ShutdownComplete => return CodexStatus::Shutdown,
|
||||
EventMsg::ExecApprovalRequest(_)
|
||||
EventMsg::WebSearchBegin(_)
|
||||
| EventMsg::ExecApprovalRequest(_)
|
||||
| EventMsg::ApplyPatchApprovalRequest(_)
|
||||
| EventMsg::TerminalInteraction(_)
|
||||
| EventMsg::ExecCommandOutputDelta(_)
|
||||
|
||||
@@ -49,7 +49,6 @@ use codex_core::protocol::CollabCloseBeginEvent;
|
||||
use codex_core::protocol::CollabCloseEndEvent;
|
||||
use codex_core::protocol::CollabWaitingBeginEvent;
|
||||
use codex_core::protocol::CollabWaitingEndEvent;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::plan_tool::StepStatus;
|
||||
use codex_protocol::plan_tool::UpdatePlanArgs;
|
||||
use serde_json::Value as JsonValue;
|
||||
@@ -67,7 +66,6 @@ pub struct EventProcessorWithJsonOutput {
|
||||
last_total_token_usage: Option<codex_core::protocol::TokenUsage>,
|
||||
running_mcp_tool_calls: HashMap<String, RunningMcpToolCall>,
|
||||
running_collab_tool_calls: HashMap<String, RunningCollabToolCall>,
|
||||
running_web_search_calls: HashMap<String, String>,
|
||||
last_critical_error: Option<ThreadErrorEvent>,
|
||||
}
|
||||
|
||||
@@ -109,7 +107,6 @@ impl EventProcessorWithJsonOutput {
|
||||
last_total_token_usage: None,
|
||||
running_mcp_tool_calls: HashMap::new(),
|
||||
running_collab_tool_calls: HashMap::new(),
|
||||
running_web_search_calls: HashMap::new(),
|
||||
last_critical_error: None,
|
||||
}
|
||||
}
|
||||
@@ -141,7 +138,7 @@ impl EventProcessorWithJsonOutput {
|
||||
protocol::EventMsg::CollabCloseEnd(ev) => self.handle_collab_close_end(ev),
|
||||
protocol::EventMsg::PatchApplyBegin(ev) => self.handle_patch_apply_begin(ev),
|
||||
protocol::EventMsg::PatchApplyEnd(ev) => self.handle_patch_apply_end(ev),
|
||||
protocol::EventMsg::WebSearchBegin(ev) => self.handle_web_search_begin(ev),
|
||||
protocol::EventMsg::WebSearchBegin(_) => Vec::new(),
|
||||
protocol::EventMsg::WebSearchEnd(ev) => self.handle_web_search_end(ev),
|
||||
protocol::EventMsg::TokenCount(ev) => {
|
||||
if let Some(info) = &ev.info {
|
||||
@@ -198,36 +195,11 @@ impl EventProcessorWithJsonOutput {
|
||||
})]
|
||||
}
|
||||
|
||||
fn handle_web_search_begin(&mut self, ev: &protocol::WebSearchBeginEvent) -> Vec<ThreadEvent> {
|
||||
if self.running_web_search_calls.contains_key(&ev.call_id) {
|
||||
return Vec::new();
|
||||
}
|
||||
let item_id = self.get_next_item_id();
|
||||
self.running_web_search_calls
|
||||
.insert(ev.call_id.clone(), item_id.clone());
|
||||
fn handle_web_search_end(&self, ev: &protocol::WebSearchEndEvent) -> Vec<ThreadEvent> {
|
||||
let item = ThreadItem {
|
||||
id: item_id,
|
||||
id: self.get_next_item_id(),
|
||||
details: ThreadItemDetails::WebSearch(WebSearchItem {
|
||||
id: ev.call_id.clone(),
|
||||
query: String::new(),
|
||||
action: WebSearchAction::Other,
|
||||
}),
|
||||
};
|
||||
|
||||
vec![ThreadEvent::ItemStarted(ItemStartedEvent { item })]
|
||||
}
|
||||
|
||||
fn handle_web_search_end(&mut self, ev: &protocol::WebSearchEndEvent) -> Vec<ThreadEvent> {
|
||||
let item_id = self
|
||||
.running_web_search_calls
|
||||
.remove(&ev.call_id)
|
||||
.unwrap_or_else(|| self.get_next_item_id());
|
||||
let item = ThreadItem {
|
||||
id: item_id,
|
||||
details: ThreadItemDetails::WebSearch(WebSearchItem {
|
||||
id: ev.call_id.clone(),
|
||||
query: ev.query.clone(),
|
||||
action: ev.action.clone(),
|
||||
}),
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use mcp_types::ContentBlock as McpContentBlock;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
@@ -281,9 +280,7 @@ pub struct McpToolCallItem {
|
||||
/// A web search request.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, TS)]
|
||||
pub struct WebSearchItem {
|
||||
pub id: String,
|
||||
pub query: String,
|
||||
pub action: WebSearchAction,
|
||||
}
|
||||
|
||||
/// An error notification.
|
||||
|
||||
@@ -20,7 +20,6 @@ use codex_core::protocol::PatchApplyEndEvent;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::WarningEvent;
|
||||
use codex_core::protocol::WebSearchBeginEvent;
|
||||
use codex_core::protocol::WebSearchEndEvent;
|
||||
use codex_exec::event_processor_with_jsonl_output::EventProcessorWithJsonOutput;
|
||||
use codex_exec::exec_events::AgentMessageItem;
|
||||
@@ -55,7 +54,6 @@ use codex_exec::exec_events::TurnStartedEvent;
|
||||
use codex_exec::exec_events::Usage;
|
||||
use codex_exec::exec_events::WebSearchItem;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::plan_tool::PlanItemArg;
|
||||
use codex_protocol::plan_tool::StepStatus;
|
||||
use codex_protocol::plan_tool::UpdatePlanArgs;
|
||||
@@ -126,15 +124,11 @@ fn task_started_produces_turn_started_event() {
|
||||
fn web_search_end_emits_item_completed() {
|
||||
let mut ep = EventProcessorWithJsonOutput::new(None);
|
||||
let query = "rust async await".to_string();
|
||||
let action = WebSearchAction::Search {
|
||||
query: Some(query.clone()),
|
||||
};
|
||||
let out = ep.collect_thread_events(&event(
|
||||
"w1",
|
||||
EventMsg::WebSearchEnd(WebSearchEndEvent {
|
||||
call_id: "call-123".to_string(),
|
||||
query: query.clone(),
|
||||
action: action.clone(),
|
||||
}),
|
||||
));
|
||||
|
||||
@@ -143,82 +137,12 @@ fn web_search_end_emits_item_completed() {
|
||||
vec![ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: ThreadItem {
|
||||
id: "item_0".to_string(),
|
||||
details: ThreadItemDetails::WebSearch(WebSearchItem {
|
||||
id: "call-123".to_string(),
|
||||
query,
|
||||
action,
|
||||
}),
|
||||
details: ThreadItemDetails::WebSearch(WebSearchItem { query }),
|
||||
},
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_begin_emits_item_started() {
|
||||
let mut ep = EventProcessorWithJsonOutput::new(None);
|
||||
let out = ep.collect_thread_events(&event(
|
||||
"w0",
|
||||
EventMsg::WebSearchBegin(WebSearchBeginEvent {
|
||||
call_id: "call-0".to_string(),
|
||||
}),
|
||||
));
|
||||
|
||||
assert_eq!(out.len(), 1);
|
||||
let ThreadEvent::ItemStarted(ItemStartedEvent { item }) = &out[0] else {
|
||||
panic!("expected ItemStarted");
|
||||
};
|
||||
assert!(item.id.starts_with("item_"));
|
||||
assert_eq!(
|
||||
item.details,
|
||||
ThreadItemDetails::WebSearch(WebSearchItem {
|
||||
id: "call-0".to_string(),
|
||||
query: String::new(),
|
||||
action: WebSearchAction::Other,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_begin_then_end_reuses_item_id() {
|
||||
let mut ep = EventProcessorWithJsonOutput::new(None);
|
||||
let begin = ep.collect_thread_events(&event(
|
||||
"w0",
|
||||
EventMsg::WebSearchBegin(WebSearchBeginEvent {
|
||||
call_id: "call-1".to_string(),
|
||||
}),
|
||||
));
|
||||
let ThreadEvent::ItemStarted(ItemStartedEvent { item: started_item }) = &begin[0] else {
|
||||
panic!("expected ItemStarted");
|
||||
};
|
||||
let action = WebSearchAction::Search {
|
||||
query: Some("rust async await".to_string()),
|
||||
};
|
||||
let end = ep.collect_thread_events(&event(
|
||||
"w1",
|
||||
EventMsg::WebSearchEnd(WebSearchEndEvent {
|
||||
call_id: "call-1".to_string(),
|
||||
query: "rust async await".to_string(),
|
||||
action: action.clone(),
|
||||
}),
|
||||
));
|
||||
let ThreadEvent::ItemCompleted(ItemCompletedEvent {
|
||||
item: completed_item,
|
||||
}) = &end[0]
|
||||
else {
|
||||
panic!("expected ItemCompleted");
|
||||
};
|
||||
|
||||
assert_eq!(completed_item.id, started_item.id);
|
||||
assert_eq!(
|
||||
completed_item.details,
|
||||
ThreadItemDetails::WebSearch(WebSearchItem {
|
||||
id: "call-1".to_string(),
|
||||
query: "rust async await".to_string(),
|
||||
action,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn plan_update_emits_todo_list_started_updated_and_completed() {
|
||||
let mut ep = EventProcessorWithJsonOutput::new(None);
|
||||
|
||||
@@ -14,7 +14,7 @@ codex-core = { path = "../core" }
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["rt"] }
|
||||
tracing = { version = "0.1.44", features = ["log"] }
|
||||
tracing = { version = "0.1.43", features = ["log"] }
|
||||
which = "8.0"
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -94,8 +94,8 @@ pub enum Personality {
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
pub enum WebSearchMode {
|
||||
Disabled,
|
||||
#[default]
|
||||
Disabled,
|
||||
Cached,
|
||||
Live,
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::models::WebSearchAction;
|
||||
use crate::protocol::AgentMessageEvent;
|
||||
use crate::protocol::AgentReasoningEvent;
|
||||
use crate::protocol::AgentReasoningRawContentEvent;
|
||||
@@ -50,11 +49,10 @@ pub struct ReasoningItem {
|
||||
pub raw_content: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS, JsonSchema, PartialEq)]
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS, JsonSchema)]
|
||||
pub struct WebSearchItem {
|
||||
pub id: String,
|
||||
pub query: String,
|
||||
pub action: WebSearchAction,
|
||||
}
|
||||
|
||||
impl UserMessageItem {
|
||||
@@ -183,7 +181,6 @@ impl WebSearchItem {
|
||||
EventMsg::WebSearchEnd(WebSearchEndEvent {
|
||||
call_id: self.id.clone(),
|
||||
query: self.query.clone(),
|
||||
action: self.action.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,9 +157,7 @@ pub enum ResponseItem {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
status: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
action: Option<WebSearchAction>,
|
||||
action: WebSearchAction,
|
||||
},
|
||||
// Generated by the harness but considered exactly as a model response.
|
||||
GhostSnapshot {
|
||||
@@ -1036,12 +1034,10 @@ mod tests {
|
||||
"query": "weather seattle"
|
||||
}
|
||||
}"#,
|
||||
None,
|
||||
Some(WebSearchAction::Search {
|
||||
WebSearchAction::Search {
|
||||
query: Some("weather seattle".into()),
|
||||
}),
|
||||
},
|
||||
Some("completed".into()),
|
||||
true,
|
||||
),
|
||||
(
|
||||
r#"{
|
||||
@@ -1052,12 +1048,10 @@ mod tests {
|
||||
"url": "https://example.com"
|
||||
}
|
||||
}"#,
|
||||
None,
|
||||
Some(WebSearchAction::OpenPage {
|
||||
WebSearchAction::OpenPage {
|
||||
url: Some("https://example.com".into()),
|
||||
}),
|
||||
},
|
||||
Some("open".into()),
|
||||
true,
|
||||
),
|
||||
(
|
||||
r#"{
|
||||
@@ -1069,43 +1063,26 @@ mod tests {
|
||||
"pattern": "installation"
|
||||
}
|
||||
}"#,
|
||||
None,
|
||||
Some(WebSearchAction::FindInPage {
|
||||
WebSearchAction::FindInPage {
|
||||
url: Some("https://example.com/docs".into()),
|
||||
pattern: Some("installation".into()),
|
||||
}),
|
||||
},
|
||||
Some("in_progress".into()),
|
||||
true,
|
||||
),
|
||||
(
|
||||
r#"{
|
||||
"type": "web_search_call",
|
||||
"status": "in_progress",
|
||||
"id": "ws_partial"
|
||||
}"#,
|
||||
Some("ws_partial".into()),
|
||||
None,
|
||||
Some("in_progress".into()),
|
||||
false,
|
||||
),
|
||||
];
|
||||
|
||||
for (json_literal, expected_id, expected_action, expected_status, expect_roundtrip) in cases
|
||||
{
|
||||
for (json_literal, expected_action, expected_status) in cases {
|
||||
let parsed: ResponseItem = serde_json::from_str(json_literal)?;
|
||||
let expected = ResponseItem::WebSearchCall {
|
||||
id: expected_id.clone(),
|
||||
id: None,
|
||||
status: expected_status.clone(),
|
||||
action: expected_action.clone(),
|
||||
};
|
||||
assert_eq!(parsed, expected);
|
||||
|
||||
let serialized = serde_json::to_value(&parsed)?;
|
||||
let mut expected_serialized: serde_json::Value = serde_json::from_str(json_literal)?;
|
||||
if !expect_roundtrip && let Some(obj) = expected_serialized.as_object_mut() {
|
||||
obj.remove("id");
|
||||
}
|
||||
assert_eq!(serialized, expected_serialized);
|
||||
let original_value: serde_json::Value = serde_json::from_str(json_literal)?;
|
||||
assert_eq!(serialized, original_value);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -24,7 +24,6 @@ use crate::message_history::HistoryEntry;
|
||||
use crate::models::BaseInstructions;
|
||||
use crate::models::ContentItem;
|
||||
use crate::models::ResponseItem;
|
||||
use crate::models::WebSearchAction;
|
||||
use crate::num_format::format_with_separators;
|
||||
use crate::openai_models::ReasoningEffort as ReasoningEffortConfig;
|
||||
use crate::parse_command::ParsedCommand;
|
||||
@@ -1042,7 +1041,6 @@ impl HasLegacyEvent for ReasoningRawContentDeltaEvent {
|
||||
impl HasLegacyEvent for EventMsg {
|
||||
fn as_legacy_events(&self, show_raw_agent_reasoning: bool) -> Vec<EventMsg> {
|
||||
match self {
|
||||
EventMsg::ItemStarted(event) => event.as_legacy_events(show_raw_agent_reasoning),
|
||||
EventMsg::ItemCompleted(event) => event.as_legacy_events(show_raw_agent_reasoning),
|
||||
EventMsg::AgentMessageContentDelta(event) => {
|
||||
event.as_legacy_events(show_raw_agent_reasoning)
|
||||
@@ -1404,7 +1402,6 @@ pub struct WebSearchBeginEvent {
|
||||
pub struct WebSearchEndEvent {
|
||||
pub call_id: String,
|
||||
pub query: String,
|
||||
pub action: WebSearchAction,
|
||||
}
|
||||
|
||||
// Conversation kept for backward compatibility.
|
||||
@@ -2378,9 +2375,6 @@ mod tests {
|
||||
item: TurnItem::WebSearch(WebSearchItem {
|
||||
id: "search-1".into(),
|
||||
query: "find docs".into(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("find docs".into()),
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
|
||||
@@ -93,12 +93,11 @@ use tokio::sync::Mutex;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::error::TryRecvError;
|
||||
use tokio::sync::mpsc::error::TrySendError;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
const EXTERNAL_EDITOR_HINT: &str = "Save and close external editor to continue.";
|
||||
const THREAD_EVENT_CHANNEL_CAPACITY: usize = 32768;
|
||||
const THREAD_EVENT_CHANNEL_CAPACITY: usize = 1024;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AppExitInfo {
|
||||
@@ -715,23 +714,8 @@ impl App {
|
||||
guard.active
|
||||
};
|
||||
|
||||
if should_send {
|
||||
// Never await a bounded channel send on the main TUI loop: if the receiver falls behind,
|
||||
// `send().await` can block and the UI stops drawing. If the channel is full, wait in a
|
||||
// spawned task instead.
|
||||
match sender.try_send(event) {
|
||||
Ok(()) => {}
|
||||
Err(TrySendError::Full(event)) => {
|
||||
tokio::spawn(async move {
|
||||
if let Err(err) = sender.send(event).await {
|
||||
tracing::warn!("thread {thread_id} event channel closed: {err}");
|
||||
}
|
||||
});
|
||||
}
|
||||
Err(TrySendError::Closed(_)) => {
|
||||
tracing::warn!("thread {thread_id} event channel closed");
|
||||
}
|
||||
}
|
||||
if should_send && let Err(err) = sender.send(event).await {
|
||||
tracing::warn!("thread {thread_id} event channel closed: {err}");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -2416,7 +2400,6 @@ mod tests {
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use tempfile::tempdir;
|
||||
use tokio::time;
|
||||
|
||||
#[test]
|
||||
fn normalize_harness_overrides_resolves_relative_add_dirs() -> Result<()> {
|
||||
@@ -2437,47 +2420,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn enqueue_thread_event_does_not_block_when_channel_full() -> Result<()> {
|
||||
let mut app = make_test_app().await;
|
||||
let thread_id = ThreadId::new();
|
||||
app.thread_event_channels
|
||||
.insert(thread_id, ThreadEventChannel::new(1));
|
||||
app.set_thread_active(thread_id, true).await;
|
||||
|
||||
let event = Event {
|
||||
id: String::new(),
|
||||
msg: EventMsg::ShutdownComplete,
|
||||
};
|
||||
|
||||
app.enqueue_thread_event(thread_id, event.clone()).await?;
|
||||
time::timeout(
|
||||
Duration::from_millis(50),
|
||||
app.enqueue_thread_event(thread_id, event),
|
||||
)
|
||||
.await
|
||||
.expect("enqueue_thread_event blocked on a full channel")?;
|
||||
|
||||
let mut rx = app
|
||||
.thread_event_channels
|
||||
.get_mut(&thread_id)
|
||||
.expect("missing thread channel")
|
||||
.receiver
|
||||
.take()
|
||||
.expect("missing receiver");
|
||||
|
||||
time::timeout(Duration::from_millis(50), rx.recv())
|
||||
.await
|
||||
.expect("timed out waiting for first event")
|
||||
.expect("channel closed unexpectedly");
|
||||
time::timeout(Duration::from_millis(50), rx.recv())
|
||||
.await
|
||||
.expect("timed out waiting for second event")
|
||||
.expect("channel closed unexpectedly");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn make_test_app() -> App {
|
||||
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender().await;
|
||||
let config = chat_widget.config_ref().clone();
|
||||
|
||||
@@ -27,22 +27,6 @@ pub(crate) trait BottomPaneView: Renderable {
|
||||
false
|
||||
}
|
||||
|
||||
/// Flush any pending paste-burst state. Return true if state changed.
|
||||
///
|
||||
/// This lets a modal that reuses `ChatComposer` participate in the same
|
||||
/// time-based paste burst flushing as the primary composer.
|
||||
fn flush_paste_burst_if_due(&mut self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Whether the view is currently holding paste-burst transient state.
|
||||
///
|
||||
/// When `true`, the bottom pane will schedule a short delayed redraw to
|
||||
/// give the burst time window a chance to flush.
|
||||
fn is_in_paste_burst(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Try to handle approval request; return the original value if not
|
||||
/// consumed.
|
||||
fn try_consume_approval_request(
|
||||
|
||||
@@ -214,20 +214,6 @@ impl Default for ChatComposerConfig {
|
||||
}
|
||||
}
|
||||
|
||||
impl ChatComposerConfig {
|
||||
/// A minimal preset for plain-text inputs embedded in other surfaces.
|
||||
///
|
||||
/// This disables popups, slash commands, and image-path attachment behavior
|
||||
/// so the composer behaves like a simple notes field.
|
||||
pub(crate) const fn plain_text() -> Self {
|
||||
Self {
|
||||
popups_enabled: false,
|
||||
slash_commands_enabled: false,
|
||||
image_paste_enabled: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ChatComposer {
|
||||
textarea: TextArea,
|
||||
textarea_state: RefCell<TextAreaState>,
|
||||
@@ -683,16 +669,6 @@ impl ChatComposer {
|
||||
self.sync_popups();
|
||||
}
|
||||
|
||||
/// Update the placeholder text without changing input enablement.
|
||||
pub(crate) fn set_placeholder_text(&mut self, placeholder: String) {
|
||||
self.placeholder_text = placeholder;
|
||||
}
|
||||
|
||||
/// Move the cursor to the end of the current text buffer.
|
||||
pub(crate) fn move_cursor_to_end(&mut self) {
|
||||
self.textarea.set_cursor(self.textarea.text().len());
|
||||
}
|
||||
|
||||
pub(crate) fn clear_for_ctrl_c(&mut self) -> Option<String> {
|
||||
if self.is_empty() {
|
||||
return None;
|
||||
|
||||
@@ -105,7 +105,6 @@ pub(crate) enum CancellationEvent {
|
||||
}
|
||||
|
||||
pub(crate) use chat_composer::ChatComposer;
|
||||
pub(crate) use chat_composer::ChatComposerConfig;
|
||||
pub(crate) use chat_composer::InputResult;
|
||||
use codex_protocol::custom_prompts::CustomPrompt;
|
||||
|
||||
@@ -132,8 +131,6 @@ pub(crate) struct BottomPane {
|
||||
frame_requester: FrameRequester,
|
||||
|
||||
has_input_focus: bool,
|
||||
enhanced_keys_supported: bool,
|
||||
disable_paste_burst: bool,
|
||||
is_task_running: bool,
|
||||
esc_backtrack_hint: bool,
|
||||
animations_enabled: bool,
|
||||
@@ -186,8 +183,6 @@ impl BottomPane {
|
||||
app_event_tx,
|
||||
frame_requester,
|
||||
has_input_focus,
|
||||
enhanced_keys_supported,
|
||||
disable_paste_burst,
|
||||
is_task_running: false,
|
||||
status: None,
|
||||
unified_exec_footer: UnifiedExecFooter::new(),
|
||||
@@ -256,37 +251,19 @@ impl BottomPane {
|
||||
/// Forward a key event to the active view or the composer.
|
||||
pub fn handle_key_event(&mut self, key_event: KeyEvent) -> InputResult {
|
||||
// If a modal/view is active, handle it here; otherwise forward to composer.
|
||||
if !self.view_stack.is_empty() {
|
||||
// We need three pieces of information after routing the key:
|
||||
// whether Esc completed the view, whether the view finished for any
|
||||
// reason, and whether a paste-burst timer should be scheduled.
|
||||
let (ctrl_c_completed, view_complete, view_in_paste_burst) = {
|
||||
let last_index = self.view_stack.len() - 1;
|
||||
let view = &mut self.view_stack[last_index];
|
||||
let ctrl_c_completed = key_event.code == KeyCode::Esc
|
||||
&& matches!(view.on_ctrl_c(), CancellationEvent::Handled)
|
||||
&& view.is_complete();
|
||||
if ctrl_c_completed {
|
||||
(true, true, false)
|
||||
} else {
|
||||
view.handle_key_event(key_event);
|
||||
(false, view.is_complete(), view.is_in_paste_burst())
|
||||
}
|
||||
};
|
||||
|
||||
if ctrl_c_completed {
|
||||
if let Some(view) = self.view_stack.last_mut() {
|
||||
if key_event.code == KeyCode::Esc
|
||||
&& matches!(view.on_ctrl_c(), CancellationEvent::Handled)
|
||||
&& view.is_complete()
|
||||
{
|
||||
self.view_stack.pop();
|
||||
self.on_active_view_complete();
|
||||
if let Some(next_view) = self.view_stack.last()
|
||||
&& next_view.is_in_paste_burst()
|
||||
{
|
||||
self.request_redraw_in(ChatComposer::recommended_paste_flush_delay());
|
||||
} else {
|
||||
view.handle_key_event(key_event);
|
||||
if view.is_complete() {
|
||||
self.view_stack.clear();
|
||||
self.on_active_view_complete();
|
||||
}
|
||||
} else if view_complete {
|
||||
self.view_stack.clear();
|
||||
self.on_active_view_complete();
|
||||
} else if view_in_paste_burst {
|
||||
self.request_redraw_in(ChatComposer::recommended_paste_flush_delay());
|
||||
}
|
||||
self.request_redraw();
|
||||
InputResult::None
|
||||
@@ -652,13 +629,7 @@ impl BottomPane {
|
||||
request
|
||||
};
|
||||
|
||||
let modal = RequestUserInputOverlay::new(
|
||||
request,
|
||||
self.app_event_tx.clone(),
|
||||
self.has_input_focus,
|
||||
self.enhanced_keys_supported,
|
||||
self.disable_paste_burst,
|
||||
);
|
||||
let modal = RequestUserInputOverlay::new(request, self.app_event_tx.clone());
|
||||
self.pause_status_timer_for_modal();
|
||||
self.set_composer_input_enabled(
|
||||
false,
|
||||
@@ -700,23 +671,11 @@ impl BottomPane {
|
||||
}
|
||||
|
||||
pub(crate) fn flush_paste_burst_if_due(&mut self) -> bool {
|
||||
// Give the active view the first chance to flush paste-burst state so
|
||||
// overlays that reuse the composer behave consistently.
|
||||
if let Some(view) = self.view_stack.last_mut()
|
||||
&& view.flush_paste_burst_if_due()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
self.composer.flush_paste_burst_if_due()
|
||||
}
|
||||
|
||||
pub(crate) fn is_in_paste_burst(&self) -> bool {
|
||||
// A view can hold paste-burst state independently of the primary
|
||||
// composer, so check it first.
|
||||
self.view_stack
|
||||
.last()
|
||||
.is_some_and(|view| view.is_in_paste_burst())
|
||||
|| self.composer.is_in_paste_burst()
|
||||
self.composer.is_in_paste_burst()
|
||||
}
|
||||
|
||||
pub(crate) fn on_history_entry_response(
|
||||
|
||||
@@ -19,58 +19,122 @@ pub(super) struct LayoutSections {
|
||||
impl RequestUserInputOverlay {
|
||||
/// Compute layout sections, collapsing notes and hints as space shrinks.
|
||||
pub(super) fn layout_sections(&self, area: Rect) -> LayoutSections {
|
||||
let question_lines = self
|
||||
.current_question()
|
||||
.map(|q| {
|
||||
textwrap::wrap(&q.question, area.width.max(1) as usize)
|
||||
.into_iter()
|
||||
.map(|line| line.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let question_text_height = question_lines.len() as u16;
|
||||
let has_options = self.has_options();
|
||||
let footer_pref = if self.unanswered_count() > 0 { 2 } else { 1 };
|
||||
let notes_pref_height = self.notes_input_height(area.width);
|
||||
let mut question_lines = self.wrapped_question_lines(area.width);
|
||||
let question_height = question_lines.len() as u16;
|
||||
let mut notes_input_height = self.notes_input_height(area.width);
|
||||
// Keep the question + options visible first; notes and hints collapse as space shrinks.
|
||||
let footer_lines = if self.unanswered_count() > 0 { 2 } else { 1 };
|
||||
let mut notes_title_height = if has_options { 1 } else { 0 };
|
||||
|
||||
let (
|
||||
question_height,
|
||||
progress_height,
|
||||
answer_title_height,
|
||||
notes_title_height,
|
||||
notes_height,
|
||||
options_height,
|
||||
footer_lines,
|
||||
) = if has_options {
|
||||
self.layout_with_options(
|
||||
area.height,
|
||||
area.width,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
&mut question_lines,
|
||||
)
|
||||
} else {
|
||||
self.layout_without_options(
|
||||
area.height,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
&mut question_lines,
|
||||
)
|
||||
let mut cursor_y = area.y;
|
||||
let progress_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: 1,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(1);
|
||||
let header_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: 1,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(1);
|
||||
let question_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: question_text_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(question_text_height);
|
||||
// Remaining height after progress/header/question areas.
|
||||
let remaining = area.height.saturating_sub(cursor_y.saturating_sub(area.y));
|
||||
let mut answer_title_height = if has_options { 1 } else { 0 };
|
||||
let mut options_height = 0;
|
||||
if has_options {
|
||||
let remaining_content = remaining.saturating_sub(footer_lines);
|
||||
let options_len = self.options_len() as u16;
|
||||
if remaining_content == 0 {
|
||||
answer_title_height = 0;
|
||||
notes_title_height = 0;
|
||||
notes_input_height = 0;
|
||||
options_height = 0;
|
||||
} else {
|
||||
let min_notes = 1u16;
|
||||
let full_notes = 3u16;
|
||||
// Prefer to keep all options visible, then allocate notes height.
|
||||
if remaining_content
|
||||
>= options_len + answer_title_height + notes_title_height + full_notes
|
||||
{
|
||||
let max_notes = remaining_content
|
||||
.saturating_sub(options_len)
|
||||
.saturating_sub(answer_title_height)
|
||||
.saturating_sub(notes_title_height);
|
||||
notes_input_height = notes_input_height.min(max_notes).max(full_notes);
|
||||
} else if remaining_content > options_len + answer_title_height + min_notes {
|
||||
notes_title_height = 0;
|
||||
notes_input_height = min_notes;
|
||||
} else {
|
||||
// Tight layout: hide section titles and shrink notes to one line.
|
||||
answer_title_height = 0;
|
||||
notes_title_height = 0;
|
||||
notes_input_height = min_notes;
|
||||
}
|
||||
|
||||
let (
|
||||
progress_area,
|
||||
header_area,
|
||||
question_area,
|
||||
answer_title_area,
|
||||
options_area,
|
||||
notes_title_area,
|
||||
notes_area,
|
||||
) = self.build_layout_areas(
|
||||
area,
|
||||
LayoutHeights {
|
||||
progress_height,
|
||||
question_height,
|
||||
answer_title_height,
|
||||
options_height,
|
||||
notes_title_height,
|
||||
notes_height,
|
||||
},
|
||||
);
|
||||
// Reserve notes/answer title area so options are scrollable if needed.
|
||||
let reserved = answer_title_height
|
||||
.saturating_add(notes_title_height)
|
||||
.saturating_add(notes_input_height);
|
||||
options_height = remaining_content.saturating_sub(reserved);
|
||||
}
|
||||
} else {
|
||||
let max_notes = remaining.saturating_sub(footer_lines);
|
||||
if max_notes == 0 {
|
||||
notes_input_height = 0;
|
||||
} else {
|
||||
// When no options exist, notes are the primary input.
|
||||
notes_input_height = notes_input_height.min(max_notes).max(3.min(max_notes));
|
||||
}
|
||||
}
|
||||
|
||||
let answer_title_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: answer_title_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(answer_title_height);
|
||||
let options_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: options_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(options_height);
|
||||
|
||||
let notes_title_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: notes_title_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(notes_title_height);
|
||||
let notes_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: notes_input_height,
|
||||
};
|
||||
|
||||
LayoutSections {
|
||||
progress_area,
|
||||
@@ -84,281 +148,4 @@ impl RequestUserInputOverlay {
|
||||
footer_lines,
|
||||
}
|
||||
}
|
||||
|
||||
/// Layout calculation when options are present.
|
||||
///
|
||||
/// Handles both tight layout (when space is constrained) and normal layout
|
||||
/// (when there's sufficient space for all elements).
|
||||
///
|
||||
/// Returns: (question_height, progress_height, answer_title_height, notes_title_height, notes_height, options_height, footer_lines)
|
||||
fn layout_with_options(
|
||||
&self,
|
||||
available_height: u16,
|
||||
width: u16,
|
||||
question_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let options_required_height = self.options_required_height(width);
|
||||
let min_options_height = 1u16;
|
||||
let required = 1u16
|
||||
.saturating_add(question_height)
|
||||
.saturating_add(options_required_height);
|
||||
|
||||
if required > available_height {
|
||||
self.layout_with_options_tight(
|
||||
available_height,
|
||||
question_height,
|
||||
min_options_height,
|
||||
question_lines,
|
||||
)
|
||||
} else {
|
||||
self.layout_with_options_normal(
|
||||
available_height,
|
||||
question_height,
|
||||
options_required_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Tight layout for options case: allocate header + question + options first
|
||||
/// and drop everything else when space is constrained.
|
||||
fn layout_with_options_tight(
|
||||
&self,
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
min_options_height: u16,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let max_question_height =
|
||||
available_height.saturating_sub(1u16.saturating_add(min_options_height));
|
||||
let adjusted_question_height = question_height.min(max_question_height);
|
||||
question_lines.truncate(adjusted_question_height as usize);
|
||||
let options_height =
|
||||
available_height.saturating_sub(1u16.saturating_add(adjusted_question_height));
|
||||
|
||||
(adjusted_question_height, 0, 0, 0, 0, options_height, 0)
|
||||
}
|
||||
|
||||
/// Normal layout for options case: allocate space for all elements with
|
||||
/// preference order: notes, footer, labels, then progress.
|
||||
fn layout_with_options_normal(
|
||||
&self,
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
options_required_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let options_height = options_required_height;
|
||||
let used = 1u16
|
||||
.saturating_add(question_height)
|
||||
.saturating_add(options_height);
|
||||
let mut remaining = available_height.saturating_sub(used);
|
||||
|
||||
// Prefer notes next, then footer, then labels, with progress last.
|
||||
let mut notes_height = notes_pref_height.min(remaining);
|
||||
remaining = remaining.saturating_sub(notes_height);
|
||||
|
||||
let footer_lines = footer_pref.min(remaining);
|
||||
remaining = remaining.saturating_sub(footer_lines);
|
||||
|
||||
let mut answer_title_height = 0;
|
||||
if remaining > 0 {
|
||||
answer_title_height = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
|
||||
let mut notes_title_height = 0;
|
||||
if remaining > 0 {
|
||||
notes_title_height = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
|
||||
let mut progress_height = 0;
|
||||
if remaining > 0 {
|
||||
progress_height = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
|
||||
// Expand the notes composer with any leftover rows.
|
||||
notes_height = notes_height.saturating_add(remaining);
|
||||
|
||||
(
|
||||
question_height,
|
||||
progress_height,
|
||||
answer_title_height,
|
||||
notes_title_height,
|
||||
notes_height,
|
||||
options_height,
|
||||
footer_lines,
|
||||
)
|
||||
}
|
||||
|
||||
/// Layout calculation when no options are present.
|
||||
///
|
||||
/// Handles both tight layout (when space is constrained) and normal layout
|
||||
/// (when there's sufficient space for all elements).
|
||||
///
|
||||
/// Returns: (question_height, progress_height, answer_title_height, notes_title_height, notes_height, options_height, footer_lines)
|
||||
fn layout_without_options(
|
||||
&self,
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let required = 1u16.saturating_add(question_height);
|
||||
if required > available_height {
|
||||
self.layout_without_options_tight(available_height, question_height, question_lines)
|
||||
} else {
|
||||
self.layout_without_options_normal(
|
||||
available_height,
|
||||
question_height,
|
||||
notes_pref_height,
|
||||
footer_pref,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Tight layout for no-options case: truncate question to fit available space.
|
||||
fn layout_without_options_tight(
|
||||
&self,
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
question_lines: &mut Vec<String>,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let max_question_height = available_height.saturating_sub(1);
|
||||
let adjusted_question_height = question_height.min(max_question_height);
|
||||
question_lines.truncate(adjusted_question_height as usize);
|
||||
|
||||
(adjusted_question_height, 0, 0, 0, 0, 0, 0)
|
||||
}
|
||||
|
||||
/// Normal layout for no-options case: allocate space for notes, footer, and progress.
|
||||
fn layout_without_options_normal(
|
||||
&self,
|
||||
available_height: u16,
|
||||
question_height: u16,
|
||||
notes_pref_height: u16,
|
||||
footer_pref: u16,
|
||||
) -> (u16, u16, u16, u16, u16, u16, u16) {
|
||||
let required = 1u16.saturating_add(question_height);
|
||||
let mut remaining = available_height.saturating_sub(required);
|
||||
let mut notes_height = notes_pref_height.min(remaining);
|
||||
remaining = remaining.saturating_sub(notes_height);
|
||||
|
||||
let footer_lines = footer_pref.min(remaining);
|
||||
remaining = remaining.saturating_sub(footer_lines);
|
||||
|
||||
let mut progress_height = 0;
|
||||
if remaining > 0 {
|
||||
progress_height = 1;
|
||||
remaining = remaining.saturating_sub(1);
|
||||
}
|
||||
|
||||
notes_height = notes_height.saturating_add(remaining);
|
||||
|
||||
(
|
||||
question_height,
|
||||
progress_height,
|
||||
0,
|
||||
0,
|
||||
notes_height,
|
||||
0,
|
||||
footer_lines,
|
||||
)
|
||||
}
|
||||
|
||||
/// Build the final layout areas from computed heights.
|
||||
fn build_layout_areas(
|
||||
&self,
|
||||
area: Rect,
|
||||
heights: LayoutHeights,
|
||||
) -> (
|
||||
Rect, // progress_area
|
||||
Rect, // header_area
|
||||
Rect, // question_area
|
||||
Rect, // answer_title_area
|
||||
Rect, // options_area
|
||||
Rect, // notes_title_area
|
||||
Rect, // notes_area
|
||||
) {
|
||||
let mut cursor_y = area.y;
|
||||
let progress_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: heights.progress_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.progress_height);
|
||||
let header_height = area.height.saturating_sub(heights.progress_height).min(1);
|
||||
let header_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: header_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(header_height);
|
||||
let question_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: heights.question_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.question_height);
|
||||
|
||||
let answer_title_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: heights.answer_title_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.answer_title_height);
|
||||
let options_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: heights.options_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.options_height);
|
||||
|
||||
let notes_title_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: heights.notes_title_height,
|
||||
};
|
||||
cursor_y = cursor_y.saturating_add(heights.notes_title_height);
|
||||
let notes_area = Rect {
|
||||
x: area.x,
|
||||
y: cursor_y,
|
||||
width: area.width,
|
||||
height: heights.notes_height,
|
||||
};
|
||||
|
||||
(
|
||||
progress_area,
|
||||
header_area,
|
||||
question_area,
|
||||
answer_title_area,
|
||||
options_area,
|
||||
notes_title_area,
|
||||
notes_area,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct LayoutHeights {
|
||||
progress_height: u16,
|
||||
question_height: u16,
|
||||
answer_title_height: u16,
|
||||
options_height: u16,
|
||||
notes_title_height: u16,
|
||||
notes_height: u16,
|
||||
}
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
//!
|
||||
//! Core behaviors:
|
||||
//! - Each question can be answered by selecting one option and/or providing notes.
|
||||
//! - Notes are stored per question and appended as extra answers.
|
||||
//! - When options exist, notes are stored per selected option and appended as extra answers.
|
||||
//! - Typing while focused on options jumps into notes to keep freeform input fast.
|
||||
//! - Enter advances to the next question; the last question submits all answers.
|
||||
//! - Freeform-only questions submit an empty answer list when empty.
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::VecDeque;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
@@ -19,25 +19,18 @@ mod render;
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::bottom_pane::CancellationEvent;
|
||||
use crate::bottom_pane::ChatComposer;
|
||||
use crate::bottom_pane::ChatComposerConfig;
|
||||
use crate::bottom_pane::InputResult;
|
||||
use crate::bottom_pane::bottom_pane_view::BottomPaneView;
|
||||
use crate::bottom_pane::scroll_state::ScrollState;
|
||||
use crate::bottom_pane::selection_popup_common::GenericDisplayRow;
|
||||
use crate::bottom_pane::selection_popup_common::measure_rows_height;
|
||||
use crate::render::renderable::Renderable;
|
||||
use crate::bottom_pane::textarea::TextArea;
|
||||
use crate::bottom_pane::textarea::TextAreaState;
|
||||
|
||||
use codex_core::protocol::Op;
|
||||
use codex_protocol::request_user_input::RequestUserInputAnswer;
|
||||
use codex_protocol::request_user_input::RequestUserInputEvent;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use codex_protocol::user_input::TextElement;
|
||||
|
||||
const NOTES_PLACEHOLDER: &str = "Add notes (optional)";
|
||||
const ANSWER_PLACEHOLDER: &str = "Type your answer (optional)";
|
||||
// Keep in sync with ChatComposer's minimum composer height.
|
||||
const MIN_COMPOSER_HEIGHT: u16 = 3;
|
||||
const SELECT_OPTION_PLACEHOLDER: &str = "Select an option to add notes (optional)";
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
@@ -46,11 +39,18 @@ enum Focus {
|
||||
Notes,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
struct ComposerDraft {
|
||||
text: String,
|
||||
text_elements: Vec<TextElement>,
|
||||
local_image_paths: Vec<PathBuf>,
|
||||
struct NotesEntry {
|
||||
text: TextArea,
|
||||
state: RefCell<TextAreaState>,
|
||||
}
|
||||
|
||||
impl NotesEntry {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
text: TextArea::new(),
|
||||
state: RefCell::new(TextAreaState::default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct AnswerState {
|
||||
@@ -58,8 +58,10 @@ struct AnswerState {
|
||||
selected: Option<usize>,
|
||||
// Scrollable cursor state for option navigation/highlight.
|
||||
option_state: ScrollState,
|
||||
// Per-question notes draft.
|
||||
draft: ComposerDraft,
|
||||
// Notes for freeform-only questions.
|
||||
notes: NotesEntry,
|
||||
// Per-option notes for option questions.
|
||||
option_notes: Vec<NotesEntry>,
|
||||
}
|
||||
|
||||
pub(crate) struct RequestUserInputOverlay {
|
||||
@@ -67,10 +69,6 @@ pub(crate) struct RequestUserInputOverlay {
|
||||
request: RequestUserInputEvent,
|
||||
// Queue of incoming requests to process after the current one.
|
||||
queue: VecDeque<RequestUserInputEvent>,
|
||||
// Reuse the shared chat composer so notes/freeform answers match the
|
||||
// primary input styling and behavior.
|
||||
composer: ChatComposer,
|
||||
// One entry per question: selection state plus a stored notes draft.
|
||||
answers: Vec<AnswerState>,
|
||||
current_idx: usize,
|
||||
focus: Focus,
|
||||
@@ -78,30 +76,11 @@ pub(crate) struct RequestUserInputOverlay {
|
||||
}
|
||||
|
||||
impl RequestUserInputOverlay {
|
||||
pub(crate) fn new(
|
||||
request: RequestUserInputEvent,
|
||||
app_event_tx: AppEventSender,
|
||||
has_input_focus: bool,
|
||||
enhanced_keys_supported: bool,
|
||||
disable_paste_burst: bool,
|
||||
) -> Self {
|
||||
// Use the same composer widget, but disable popups/slash-commands and
|
||||
// image-path attachment so it behaves like a focused notes field.
|
||||
let mut composer = ChatComposer::new_with_config(
|
||||
has_input_focus,
|
||||
app_event_tx.clone(),
|
||||
enhanced_keys_supported,
|
||||
ANSWER_PLACEHOLDER.to_string(),
|
||||
disable_paste_burst,
|
||||
ChatComposerConfig::plain_text(),
|
||||
);
|
||||
// The overlay renders its own footer hints, so keep the composer footer empty.
|
||||
composer.set_footer_hint_override(Some(Vec::new()));
|
||||
pub(crate) fn new(request: RequestUserInputEvent, app_event_tx: AppEventSender) -> Self {
|
||||
let mut overlay = Self {
|
||||
app_event_tx,
|
||||
request,
|
||||
queue: VecDeque::new(),
|
||||
composer,
|
||||
answers: Vec::new(),
|
||||
current_idx: 0,
|
||||
focus: Focus::Options,
|
||||
@@ -109,7 +88,6 @@ impl RequestUserInputOverlay {
|
||||
};
|
||||
overlay.reset_for_request();
|
||||
overlay.ensure_focus_available();
|
||||
overlay.restore_current_draft();
|
||||
overlay
|
||||
}
|
||||
|
||||
@@ -166,96 +144,28 @@ impl RequestUserInputOverlay {
|
||||
.map(|option| option.label.as_str())
|
||||
}
|
||||
|
||||
pub(super) fn wrapped_question_lines(&self, width: u16) -> Vec<String> {
|
||||
self.current_question()
|
||||
.map(|q| {
|
||||
textwrap::wrap(&q.question, width.max(1) as usize)
|
||||
.into_iter()
|
||||
.map(|line| line.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub(super) fn option_rows(&self) -> Vec<GenericDisplayRow> {
|
||||
self.current_question()
|
||||
.and_then(|question| question.options.as_ref())
|
||||
.map(|options| {
|
||||
options
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, opt)| {
|
||||
let selected = self
|
||||
.current_answer()
|
||||
.and_then(|answer| answer.selected)
|
||||
.is_some_and(|sel| sel == idx);
|
||||
let prefix = if selected { "(x)" } else { "( )" };
|
||||
GenericDisplayRow {
|
||||
name: format!("{prefix} {}", opt.label),
|
||||
description: Some(opt.description.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub(super) fn options_required_height(&self, width: u16) -> u16 {
|
||||
fn current_notes_entry(&self) -> Option<&NotesEntry> {
|
||||
let answer = self.current_answer()?;
|
||||
if !self.has_options() {
|
||||
return 0;
|
||||
return Some(&answer.notes);
|
||||
}
|
||||
|
||||
let rows = self.option_rows();
|
||||
if rows.is_empty() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
let mut state = self
|
||||
.current_answer()
|
||||
.map(|answer| answer.option_state)
|
||||
.unwrap_or_default();
|
||||
if state.selected_idx.is_none() {
|
||||
state.selected_idx = Some(0);
|
||||
}
|
||||
|
||||
measure_rows_height(&rows, &state, rows.len(), width.max(1))
|
||||
let idx = self
|
||||
.selected_option_index()
|
||||
.or(answer.option_state.selected_idx)?;
|
||||
answer.option_notes.get(idx)
|
||||
}
|
||||
|
||||
fn capture_composer_draft(&self) -> ComposerDraft {
|
||||
ComposerDraft {
|
||||
text: self.composer.current_text_with_pending(),
|
||||
text_elements: self.composer.text_elements(),
|
||||
local_image_paths: self
|
||||
.composer
|
||||
.local_images()
|
||||
.into_iter()
|
||||
.map(|img| img.path)
|
||||
.collect(),
|
||||
fn current_notes_entry_mut(&mut self) -> Option<&mut NotesEntry> {
|
||||
let has_options = self.has_options();
|
||||
let answer = self.current_answer_mut()?;
|
||||
if !has_options {
|
||||
return Some(&mut answer.notes);
|
||||
}
|
||||
}
|
||||
|
||||
fn save_current_draft(&mut self) {
|
||||
let draft = self.capture_composer_draft();
|
||||
if let Some(answer) = self.current_answer_mut() {
|
||||
answer.draft = draft;
|
||||
}
|
||||
}
|
||||
|
||||
fn restore_current_draft(&mut self) {
|
||||
self.composer
|
||||
.set_placeholder_text(self.notes_placeholder().to_string());
|
||||
self.composer.set_footer_hint_override(Some(Vec::new()));
|
||||
let Some(answer) = self.current_answer() else {
|
||||
self.composer
|
||||
.set_text_content(String::new(), Vec::new(), Vec::new());
|
||||
self.composer.move_cursor_to_end();
|
||||
return;
|
||||
};
|
||||
let draft = answer.draft.clone();
|
||||
self.composer
|
||||
.set_text_content(draft.text, draft.text_elements, draft.local_image_paths);
|
||||
self.composer.move_cursor_to_end();
|
||||
let idx = answer
|
||||
.selected
|
||||
.or(answer.option_state.selected_idx)
|
||||
.or_else(|| answer.option_notes.is_empty().then_some(0))?;
|
||||
answer.option_notes.get_mut(idx)
|
||||
}
|
||||
|
||||
fn notes_placeholder(&self) -> &'static str {
|
||||
@@ -290,23 +200,24 @@ impl RequestUserInputOverlay {
|
||||
.iter()
|
||||
.map(|question| {
|
||||
let mut option_state = ScrollState::new();
|
||||
let mut option_notes = Vec::new();
|
||||
if let Some(options) = question.options.as_ref()
|
||||
&& !options.is_empty()
|
||||
{
|
||||
option_state.selected_idx = Some(0);
|
||||
option_notes = (0..options.len()).map(|_| NotesEntry::new()).collect();
|
||||
}
|
||||
AnswerState {
|
||||
selected: option_state.selected_idx,
|
||||
option_state,
|
||||
draft: ComposerDraft::default(),
|
||||
notes: NotesEntry::new(),
|
||||
option_notes,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.current_idx = 0;
|
||||
self.focus = Focus::Options;
|
||||
self.composer
|
||||
.set_text_content(String::new(), Vec::new(), Vec::new());
|
||||
}
|
||||
|
||||
/// Move to the next/previous question, wrapping in either direction.
|
||||
@@ -315,11 +226,9 @@ impl RequestUserInputOverlay {
|
||||
if len == 0 {
|
||||
return;
|
||||
}
|
||||
self.save_current_draft();
|
||||
let offset = if next { 1 } else { len.saturating_sub(1) };
|
||||
self.current_idx = (self.current_idx + offset) % len;
|
||||
self.ensure_focus_available();
|
||||
self.restore_current_draft();
|
||||
}
|
||||
|
||||
/// Synchronize selection state to the currently focused option.
|
||||
@@ -357,7 +266,6 @@ impl RequestUserInputOverlay {
|
||||
|
||||
/// Build the response payload and dispatch it to the app.
|
||||
fn submit_answers(&mut self) {
|
||||
self.save_current_draft();
|
||||
let mut answers = HashMap::new();
|
||||
for (idx, question) in self.request.questions.iter().enumerate() {
|
||||
let answer_state = &self.answers[idx];
|
||||
@@ -370,8 +278,15 @@ impl RequestUserInputOverlay {
|
||||
} else {
|
||||
answer_state.selected
|
||||
};
|
||||
// Notes are appended as extra answers.
|
||||
let notes = answer_state.draft.text.trim().to_string();
|
||||
// Notes are appended as extra answers. When options exist, notes are per selected option.
|
||||
let notes = if options.is_some_and(|opts| !opts.is_empty()) {
|
||||
selected_idx
|
||||
.and_then(|selected| answer_state.option_notes.get(selected))
|
||||
.map(|entry| entry.text.text().trim().to_string())
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
answer_state.notes.text.text().trim().to_string()
|
||||
};
|
||||
let selected_label = selected_idx.and_then(|selected_idx| {
|
||||
question
|
||||
.options
|
||||
@@ -399,7 +314,6 @@ impl RequestUserInputOverlay {
|
||||
self.request = next;
|
||||
self.reset_for_request();
|
||||
self.ensure_focus_available();
|
||||
self.restore_current_draft();
|
||||
} else {
|
||||
self.done = true;
|
||||
}
|
||||
@@ -407,7 +321,6 @@ impl RequestUserInputOverlay {
|
||||
|
||||
/// Count freeform-only questions that have no notes.
|
||||
fn unanswered_count(&self) -> usize {
|
||||
let current_text = self.composer.current_text();
|
||||
self.request
|
||||
.questions
|
||||
.iter()
|
||||
@@ -418,12 +331,7 @@ impl RequestUserInputOverlay {
|
||||
if options.is_some_and(|opts| !opts.is_empty()) {
|
||||
false
|
||||
} else {
|
||||
let notes = if *idx == self.current_index() {
|
||||
current_text.as_str()
|
||||
} else {
|
||||
answer.draft.text.as_str()
|
||||
};
|
||||
notes.trim().is_empty()
|
||||
answer.notes.text.text().trim().is_empty()
|
||||
}
|
||||
})
|
||||
.count()
|
||||
@@ -431,48 +339,12 @@ impl RequestUserInputOverlay {
|
||||
|
||||
/// Compute the preferred notes input height for the current question.
|
||||
fn notes_input_height(&self, width: u16) -> u16 {
|
||||
let min_height = MIN_COMPOSER_HEIGHT;
|
||||
self.composer
|
||||
.desired_height(width.max(1))
|
||||
.clamp(min_height, min_height.saturating_add(5))
|
||||
}
|
||||
|
||||
fn apply_submission_to_draft(&mut self, text: String, text_elements: Vec<TextElement>) {
|
||||
let local_image_paths = self
|
||||
.composer
|
||||
.local_images()
|
||||
.into_iter()
|
||||
.map(|img| img.path)
|
||||
.collect::<Vec<_>>();
|
||||
if let Some(answer) = self.current_answer_mut() {
|
||||
answer.draft = ComposerDraft {
|
||||
text: text.clone(),
|
||||
text_elements: text_elements.clone(),
|
||||
local_image_paths: local_image_paths.clone(),
|
||||
};
|
||||
}
|
||||
self.composer
|
||||
.set_text_content(text, text_elements, local_image_paths);
|
||||
self.composer.move_cursor_to_end();
|
||||
self.composer.set_footer_hint_override(Some(Vec::new()));
|
||||
}
|
||||
|
||||
fn handle_composer_input_result(&mut self, result: InputResult) -> bool {
|
||||
match result {
|
||||
InputResult::Submitted {
|
||||
text,
|
||||
text_elements,
|
||||
}
|
||||
| InputResult::Queued {
|
||||
text,
|
||||
text_elements,
|
||||
} => {
|
||||
self.apply_submission_to_draft(text, text_elements);
|
||||
self.go_next_or_submit();
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
let Some(entry) = self.current_notes_entry() else {
|
||||
return 3;
|
||||
};
|
||||
let usable_width = width.saturating_sub(2);
|
||||
let text_height = entry.text.desired_height(usable_width).clamp(1, 6);
|
||||
text_height.saturating_add(2).clamp(3, 8)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -504,19 +376,18 @@ impl BottomPaneView for RequestUserInputOverlay {
|
||||
match self.focus {
|
||||
Focus::Options => {
|
||||
let options_len = self.options_len();
|
||||
let Some(answer) = self.current_answer_mut() else {
|
||||
return;
|
||||
};
|
||||
// Keep selection synchronized as the user moves.
|
||||
match key_event.code {
|
||||
KeyCode::Up => {
|
||||
if let Some(answer) = self.current_answer_mut() {
|
||||
answer.option_state.move_up_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
answer.option_state.move_up_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
KeyCode::Down => {
|
||||
if let Some(answer) = self.current_answer_mut() {
|
||||
answer.option_state.move_down_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
answer.option_state.move_down_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
KeyCode::Char(' ') => {
|
||||
self.select_current_option();
|
||||
@@ -529,43 +400,41 @@ impl BottomPaneView for RequestUserInputOverlay {
|
||||
// Any typing while in options switches to notes for fast freeform input.
|
||||
self.focus = Focus::Notes;
|
||||
self.ensure_selected_for_notes();
|
||||
let (result, _) = self.composer.handle_key_event(key_event);
|
||||
self.handle_composer_input_result(result);
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.input(key_event);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Focus::Notes => {
|
||||
if matches!(key_event.code, KeyCode::Enter) {
|
||||
self.ensure_selected_for_notes();
|
||||
let (result, _) = self.composer.handle_key_event(key_event);
|
||||
if !self.handle_composer_input_result(result) {
|
||||
self.go_next_or_submit();
|
||||
}
|
||||
self.go_next_or_submit();
|
||||
return;
|
||||
}
|
||||
if self.has_options() && matches!(key_event.code, KeyCode::Up | KeyCode::Down) {
|
||||
let options_len = self.options_len();
|
||||
let Some(answer) = self.current_answer_mut() else {
|
||||
return;
|
||||
};
|
||||
match key_event.code {
|
||||
KeyCode::Up => {
|
||||
if let Some(answer) = self.current_answer_mut() {
|
||||
answer.option_state.move_up_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
answer.option_state.move_up_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
KeyCode::Down => {
|
||||
if let Some(answer) = self.current_answer_mut() {
|
||||
answer.option_state.move_down_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
answer.option_state.move_down_wrap(options_len);
|
||||
answer.selected = answer.option_state.selected_idx;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Notes are per option when options exist.
|
||||
self.ensure_selected_for_notes();
|
||||
let (result, _) = self.composer.handle_key_event(key_event);
|
||||
self.handle_composer_input_result(result);
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.input(key_event);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -584,20 +453,25 @@ impl BottomPaneView for RequestUserInputOverlay {
|
||||
if pasted.is_empty() {
|
||||
return false;
|
||||
}
|
||||
if matches!(self.focus, Focus::Notes) {
|
||||
self.ensure_selected_for_notes();
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.insert_str(&pasted);
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if matches!(self.focus, Focus::Options) {
|
||||
// Treat pastes the same as typing: switch into notes.
|
||||
self.focus = Focus::Notes;
|
||||
self.ensure_selected_for_notes();
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.insert_str(&pasted);
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
self.ensure_selected_for_notes();
|
||||
self.composer.handle_paste(pasted)
|
||||
}
|
||||
|
||||
fn flush_paste_burst_if_due(&mut self) -> bool {
|
||||
self.composer.flush_paste_burst_if_due()
|
||||
}
|
||||
|
||||
fn is_in_paste_burst(&self) -> bool {
|
||||
self.composer.is_in_paste_burst()
|
||||
false
|
||||
}
|
||||
|
||||
fn try_consume_user_input_request(
|
||||
@@ -652,35 +526,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn question_with_wrapped_options(id: &str, header: &str) -> RequestUserInputQuestion {
|
||||
RequestUserInputQuestion {
|
||||
id: id.to_string(),
|
||||
header: header.to_string(),
|
||||
question: "Choose the next step for this task.".to_string(),
|
||||
is_other: false,
|
||||
options: Some(vec![
|
||||
RequestUserInputQuestionOption {
|
||||
label: "Discuss a code change".to_string(),
|
||||
description:
|
||||
"Walk through a plan, then implement it together with careful checks."
|
||||
.to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: "Run targeted tests".to_string(),
|
||||
description:
|
||||
"Pick the most relevant crate and validate the current behavior first."
|
||||
.to_string(),
|
||||
},
|
||||
RequestUserInputQuestionOption {
|
||||
label: "Review the diff".to_string(),
|
||||
description:
|
||||
"Summarize the changes and highlight the most important risks and gaps."
|
||||
.to_string(),
|
||||
},
|
||||
]),
|
||||
}
|
||||
}
|
||||
|
||||
fn question_without_options(id: &str, header: &str) -> RequestUserInputQuestion {
|
||||
RequestUserInputQuestion {
|
||||
id: id.to_string(),
|
||||
@@ -726,9 +571,6 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "First")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
overlay.try_consume_user_input_request(request_event(
|
||||
"turn-2",
|
||||
@@ -752,9 +594,6 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Pick one")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
overlay.submit_answers();
|
||||
@@ -774,9 +613,6 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_without_options("q1", "Notes")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
overlay.submit_answers();
|
||||
@@ -795,9 +631,6 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Pick one")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
{
|
||||
@@ -806,9 +639,10 @@ mod tests {
|
||||
}
|
||||
overlay.select_current_option();
|
||||
overlay
|
||||
.composer
|
||||
.set_text_content("Notes for option 2".to_string(), Vec::new(), Vec::new());
|
||||
overlay.composer.move_cursor_to_end();
|
||||
.current_notes_entry_mut()
|
||||
.expect("notes entry missing")
|
||||
.text
|
||||
.insert_str("Notes for option 2");
|
||||
|
||||
overlay.submit_answers();
|
||||
|
||||
@@ -826,39 +660,12 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn large_paste_is_preserved_when_switching_questions() {
|
||||
let (tx, _rx) = test_sender();
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event(
|
||||
"turn-1",
|
||||
vec![
|
||||
question_without_options("q1", "First"),
|
||||
question_without_options("q2", "Second"),
|
||||
],
|
||||
),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
let large = "x".repeat(1_500);
|
||||
overlay.composer.handle_paste(large.clone());
|
||||
overlay.move_question(true);
|
||||
|
||||
assert_eq!(overlay.answers[0].draft.text, large);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_user_input_options_snapshot() {
|
||||
let (tx, _rx) = test_sender();
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Area")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
let area = Rect::new(0, 0, 64, 16);
|
||||
insta::assert_snapshot!(
|
||||
@@ -873,9 +680,6 @@ mod tests {
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Area")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
let area = Rect::new(0, 0, 60, 8);
|
||||
insta::assert_snapshot!(
|
||||
@@ -884,60 +688,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn layout_allocates_all_wrapped_options_when_space_allows() {
|
||||
let (tx, _rx) = test_sender();
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event(
|
||||
"turn-1",
|
||||
vec![question_with_wrapped_options("q1", "Next Step")],
|
||||
),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
let width = 48u16;
|
||||
let question_height = overlay.wrapped_question_lines(width).len() as u16;
|
||||
let options_height = overlay.options_required_height(width);
|
||||
let height = 1u16
|
||||
.saturating_add(question_height)
|
||||
.saturating_add(options_height)
|
||||
.saturating_add(4);
|
||||
let sections = overlay.layout_sections(Rect::new(0, 0, width, height));
|
||||
|
||||
assert_eq!(sections.options_area.height, options_height);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_user_input_wrapped_options_snapshot() {
|
||||
let (tx, _rx) = test_sender();
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event(
|
||||
"turn-1",
|
||||
vec![question_with_wrapped_options("q1", "Next Step")],
|
||||
),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
let width = 52u16;
|
||||
let question_height = overlay.wrapped_question_lines(width).len() as u16;
|
||||
let options_height = overlay.options_required_height(width);
|
||||
let height = 1u16
|
||||
.saturating_add(question_height)
|
||||
.saturating_add(options_height)
|
||||
.saturating_add(4);
|
||||
let area = Rect::new(0, 0, width, height);
|
||||
insta::assert_snapshot!(
|
||||
"request_user_input_wrapped_options",
|
||||
render_snapshot(&overlay, area)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_user_input_scroll_options_snapshot() {
|
||||
let (tx, _rx) = test_sender();
|
||||
@@ -974,9 +724,6 @@ mod tests {
|
||||
}],
|
||||
),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
{
|
||||
let answer = overlay.current_answer_mut().expect("answer missing");
|
||||
@@ -996,9 +743,6 @@ mod tests {
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_without_options("q1", "Goal")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
let area = Rect::new(0, 0, 64, 10);
|
||||
insta::assert_snapshot!(
|
||||
@@ -1013,15 +757,13 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Pick one")]),
|
||||
tx,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
overlay.focus = Focus::Notes;
|
||||
overlay
|
||||
.composer
|
||||
.set_text_content("Notes".to_string(), Vec::new(), Vec::new());
|
||||
overlay.composer.move_cursor_to_end();
|
||||
.current_notes_entry_mut()
|
||||
.expect("notes entry missing")
|
||||
.text
|
||||
.insert_str("Notes");
|
||||
|
||||
overlay.handle_key_event(KeyEvent::from(KeyCode::Down));
|
||||
|
||||
|
||||
@@ -3,12 +3,12 @@ use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::widgets::Clear;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::StatefulWidgetRef;
|
||||
use ratatui::widgets::Widget;
|
||||
|
||||
use crate::bottom_pane::selection_popup_common::menu_surface_inset;
|
||||
use crate::bottom_pane::selection_popup_common::menu_surface_padding_height;
|
||||
use crate::bottom_pane::selection_popup_common::render_menu_surface;
|
||||
use crate::bottom_pane::selection_popup_common::GenericDisplayRow;
|
||||
use crate::bottom_pane::selection_popup_common::render_rows;
|
||||
use crate::key_hint;
|
||||
use crate::render::renderable::Renderable;
|
||||
@@ -17,28 +17,18 @@ use super::RequestUserInputOverlay;
|
||||
|
||||
impl Renderable for RequestUserInputOverlay {
|
||||
fn desired_height(&self, width: u16) -> u16 {
|
||||
let outer = Rect::new(0, 0, width, u16::MAX);
|
||||
let inner = menu_surface_inset(outer);
|
||||
let inner_width = inner.width.max(1);
|
||||
let question_height = self.wrapped_question_lines(inner_width).len();
|
||||
let options_height = self.options_required_height(inner_width) as usize;
|
||||
let notes_height = self.notes_input_height(inner_width) as usize;
|
||||
let footer_height = if self.unanswered_count() > 0 { 2 } else { 1 };
|
||||
|
||||
// Tight minimum height: progress + header + question + (optional) titles/options
|
||||
// + notes composer + footer + menu padding.
|
||||
let mut height = question_height
|
||||
.saturating_add(options_height)
|
||||
.saturating_add(notes_height)
|
||||
.saturating_add(footer_height)
|
||||
.saturating_add(2); // progress + header
|
||||
let sections = self.layout_sections(Rect::new(0, 0, width, u16::MAX));
|
||||
let mut height = sections
|
||||
.question_lines
|
||||
.len()
|
||||
.saturating_add(5)
|
||||
.saturating_add(self.notes_input_height(width) as usize)
|
||||
.saturating_add(sections.footer_lines as usize);
|
||||
if self.has_options() {
|
||||
height = height
|
||||
.saturating_add(1) // answer title
|
||||
.saturating_add(1); // notes title
|
||||
height = height.saturating_add(2);
|
||||
}
|
||||
height = height.saturating_add(menu_surface_padding_height() as usize);
|
||||
height.max(8) as u16
|
||||
height = height.max(8);
|
||||
height as u16
|
||||
}
|
||||
|
||||
fn render(&self, area: Rect, buf: &mut Buffer) {
|
||||
@@ -56,13 +46,7 @@ impl RequestUserInputOverlay {
|
||||
if area.width == 0 || area.height == 0 {
|
||||
return;
|
||||
}
|
||||
// Paint the same menu surface used by other bottom-pane overlays and
|
||||
// then render the overlay content inside its inset area.
|
||||
let content_area = render_menu_surface(area, buf);
|
||||
if content_area.width == 0 || content_area.height == 0 {
|
||||
return;
|
||||
}
|
||||
let sections = self.layout_sections(content_area);
|
||||
let sections = self.layout_sections(area);
|
||||
|
||||
// Progress header keeps the user oriented across multiple questions.
|
||||
let progress_line = if self.question_count() > 0 {
|
||||
@@ -112,7 +96,28 @@ impl RequestUserInputOverlay {
|
||||
}
|
||||
|
||||
// Build rows with selection markers for the shared selection renderer.
|
||||
let option_rows = self.option_rows();
|
||||
let option_rows = self
|
||||
.current_question()
|
||||
.and_then(|question| question.options.as_ref())
|
||||
.map(|options| {
|
||||
options
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, opt)| {
|
||||
let selected = self
|
||||
.current_answer()
|
||||
.and_then(|answer| answer.selected)
|
||||
.is_some_and(|sel| sel == idx);
|
||||
let prefix = if selected { "(x)" } else { "( )" };
|
||||
GenericDisplayRow {
|
||||
name: format!("{prefix} {}", opt.label),
|
||||
description: Some(opt.description.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if self.has_options() {
|
||||
let mut option_state = self
|
||||
@@ -172,9 +177,9 @@ impl RequestUserInputOverlay {
|
||||
);
|
||||
Paragraph::new(Line::from(warning.dim())).render(
|
||||
Rect {
|
||||
x: content_area.x,
|
||||
x: area.x,
|
||||
y: footer_y,
|
||||
width: content_area.width,
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
@@ -213,9 +218,9 @@ impl RequestUserInputOverlay {
|
||||
]);
|
||||
Paragraph::new(Line::from(hint_spans).dim()).render(
|
||||
Rect {
|
||||
x: content_area.x,
|
||||
x: area.x,
|
||||
y: hint_y,
|
||||
width: content_area.width,
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
@@ -227,24 +232,129 @@ impl RequestUserInputOverlay {
|
||||
if !self.focus_is_notes() {
|
||||
return None;
|
||||
}
|
||||
let content_area = menu_surface_inset(area);
|
||||
if content_area.width == 0 || content_area.height == 0 {
|
||||
return None;
|
||||
}
|
||||
let sections = self.layout_sections(content_area);
|
||||
let sections = self.layout_sections(area);
|
||||
let entry = self.current_notes_entry()?;
|
||||
let input_area = sections.notes_area;
|
||||
if input_area.width == 0 || input_area.height == 0 {
|
||||
if input_area.width <= 2 || input_area.height == 0 {
|
||||
return None;
|
||||
}
|
||||
self.composer.cursor_pos(input_area)
|
||||
if input_area.height < 3 {
|
||||
// Inline notes layout uses a prefix and a single-line text area.
|
||||
let prefix = notes_prefix();
|
||||
let prefix_width = prefix.len() as u16;
|
||||
if input_area.width <= prefix_width {
|
||||
return None;
|
||||
}
|
||||
let textarea_rect = Rect {
|
||||
x: input_area.x.saturating_add(prefix_width),
|
||||
y: input_area.y,
|
||||
width: input_area.width.saturating_sub(prefix_width),
|
||||
height: 1,
|
||||
};
|
||||
let state = *entry.state.borrow();
|
||||
return entry.text.cursor_pos_with_state(textarea_rect, state);
|
||||
}
|
||||
let text_area_height = input_area.height.saturating_sub(2);
|
||||
let textarea_rect = Rect {
|
||||
x: input_area.x.saturating_add(1),
|
||||
y: input_area.y.saturating_add(1),
|
||||
width: input_area.width.saturating_sub(2),
|
||||
height: text_area_height,
|
||||
};
|
||||
let state = *entry.state.borrow();
|
||||
entry.text.cursor_pos_with_state(textarea_rect, state)
|
||||
}
|
||||
|
||||
/// Render the notes composer.
|
||||
/// Render the notes input box or inline notes field.
|
||||
fn render_notes_input(&self, area: Rect, buf: &mut Buffer) {
|
||||
if area.width == 0 || area.height == 0 {
|
||||
let Some(entry) = self.current_notes_entry() else {
|
||||
return;
|
||||
};
|
||||
if area.width < 2 || area.height == 0 {
|
||||
return;
|
||||
}
|
||||
self.composer.render(area, buf);
|
||||
if area.height < 3 {
|
||||
// Inline notes field for tight layouts.
|
||||
let prefix = notes_prefix();
|
||||
let prefix_width = prefix.len() as u16;
|
||||
if area.width <= prefix_width {
|
||||
Paragraph::new(Line::from(prefix.dim())).render(area, buf);
|
||||
return;
|
||||
}
|
||||
Paragraph::new(Line::from(prefix.dim())).render(
|
||||
Rect {
|
||||
x: area.x,
|
||||
y: area.y,
|
||||
width: prefix_width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
let textarea_rect = Rect {
|
||||
x: area.x.saturating_add(prefix_width),
|
||||
y: area.y,
|
||||
width: area.width.saturating_sub(prefix_width),
|
||||
height: 1,
|
||||
};
|
||||
let mut state = entry.state.borrow_mut();
|
||||
Clear.render(textarea_rect, buf);
|
||||
StatefulWidgetRef::render_ref(&(&entry.text), textarea_rect, buf, &mut state);
|
||||
if entry.text.text().is_empty() {
|
||||
Paragraph::new(Line::from(self.notes_placeholder().dim()))
|
||||
.render(textarea_rect, buf);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Draw a light ASCII frame around the notes area.
|
||||
let top_border = format!("+{}+", "-".repeat(area.width.saturating_sub(2) as usize));
|
||||
let bottom_border = top_border.clone();
|
||||
Paragraph::new(Line::from(top_border)).render(
|
||||
Rect {
|
||||
x: area.x,
|
||||
y: area.y,
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
Paragraph::new(Line::from(bottom_border)).render(
|
||||
Rect {
|
||||
x: area.x,
|
||||
y: area.y.saturating_add(area.height.saturating_sub(1)),
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
for row in 1..area.height.saturating_sub(1) {
|
||||
Line::from(vec![
|
||||
"|".into(),
|
||||
" ".repeat(area.width.saturating_sub(2) as usize).into(),
|
||||
"|".into(),
|
||||
])
|
||||
.render(
|
||||
Rect {
|
||||
x: area.x,
|
||||
y: area.y.saturating_add(row),
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
}
|
||||
let text_area_height = area.height.saturating_sub(2);
|
||||
let textarea_rect = Rect {
|
||||
x: area.x.saturating_add(1),
|
||||
y: area.y.saturating_add(1),
|
||||
width: area.width.saturating_sub(2),
|
||||
height: text_area_height,
|
||||
};
|
||||
let mut state = entry.state.borrow_mut();
|
||||
Clear.render(textarea_rect, buf);
|
||||
StatefulWidgetRef::render_ref(&(&entry.text), textarea_rect, buf, &mut state);
|
||||
if entry.text.text().is_empty() {
|
||||
Paragraph::new(Line::from(self.notes_placeholder().dim())).render(textarea_rect, buf);
|
||||
}
|
||||
}
|
||||
|
||||
fn focus_is_options(&self) -> bool {
|
||||
@@ -259,3 +369,7 @@ impl RequestUserInputOverlay {
|
||||
!self.has_options() && self.focus_is_notes()
|
||||
}
|
||||
}
|
||||
|
||||
fn notes_prefix() -> &'static str {
|
||||
"Notes: "
|
||||
}
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
source: tui/src/bottom_pane/request_user_input/mod.rs
|
||||
expression: "render_snapshot(&overlay, area)"
|
||||
---
|
||||
|
||||
Question 1/1
|
||||
Goal
|
||||
Share details.
|
||||
|
||||
› Type your answer (optional)
|
||||
|
||||
Unanswered: 1 | Will submit as skipped
|
||||
↑/↓ scroll | enter next question | esc interrupt
|
||||
Question 1/1
|
||||
Goal
|
||||
Share details.
|
||||
+--------------------------------------------------------------+
|
||||
|Type your answer (optional) |
|
||||
+--------------------------------------------------------------+
|
||||
Unanswered: 1 | Will submit as skipped
|
||||
↑/↓ scroll | enter next question | esc interrupt
|
||||
|
||||
@@ -2,18 +2,19 @@
|
||||
source: tui/src/bottom_pane/request_user_input/mod.rs
|
||||
expression: "render_snapshot(&overlay, area)"
|
||||
---
|
||||
|
||||
Question 1/1
|
||||
Area
|
||||
Choose an option.
|
||||
Answer
|
||||
(x) Option 1 First choice.
|
||||
( ) Option 2 Second choice.
|
||||
( ) Option 3 Third choice.
|
||||
Notes for Option 1 (optional)
|
||||
|
||||
› Add notes (optional)
|
||||
Question 1/1
|
||||
Area
|
||||
Choose an option.
|
||||
Answer
|
||||
(x) Option 1 First choice.
|
||||
( ) Option 2 Second choice.
|
||||
( ) Option 3 Third choice.
|
||||
|
||||
|
||||
|
||||
Option 1 of 3 | ↑/↓ scroll | enter next question | esc inter
|
||||
|
||||
Notes for Option 1 (optional)
|
||||
+--------------------------------------------------------------+
|
||||
|Add notes (optional) |
|
||||
+--------------------------------------------------------------+
|
||||
Option 1 of 3 | ↑/↓ scroll | enter next question | esc interrupt
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
source: tui/src/bottom_pane/request_user_input/mod.rs
|
||||
expression: "render_snapshot(&overlay, area)"
|
||||
---
|
||||
|
||||
Next Step
|
||||
What would you like to do next?
|
||||
( ) Discuss a code change (Recommended) Walk through a plan and
|
||||
edit code together.
|
||||
( ) Run tests Pick a crate and run
|
||||
its tests.
|
||||
( ) Review a diff Summarize or review
|
||||
current changes.
|
||||
Option 4 of 5 | ↑/↓ scroll | enter next question | esc interrupt
|
||||
Question 1/1
|
||||
Next Step
|
||||
What would you like to do next?
|
||||
( ) Discuss a code change (Recommended) Walk through a plan and
|
||||
edit code together.
|
||||
( ) Run tests Pick a crate and run its
|
||||
tests.
|
||||
( ) Review a diff Summarize or review current
|
||||
Notes: Add notes (optional)
|
||||
Option 4 of 5 | ↑/↓ scroll | enter next question | esc interrupt
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
source: tui/src/bottom_pane/request_user_input/mod.rs
|
||||
expression: "render_snapshot(&overlay, area)"
|
||||
---
|
||||
|
||||
Area
|
||||
Choose an option.
|
||||
(x) Option 1 First choice.
|
||||
( ) Option 2 Second choice.
|
||||
( ) Option 3 Third choice.
|
||||
|
||||
Option 1 of 3 | ↑/↓ scroll | enter next question | esc i
|
||||
Question 1/1
|
||||
Area
|
||||
Choose an option.
|
||||
(x) Option 1 First choice.
|
||||
( ) Option 2 Second choice.
|
||||
( ) Option 3 Third choice.
|
||||
Notes: Add notes (optional)
|
||||
Option 1 of 3 | ↑/↓ scroll | enter next question | esc inter
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
---
|
||||
source: tui/src/bottom_pane/request_user_input/mod.rs
|
||||
expression: "render_snapshot(&overlay, area)"
|
||||
---
|
||||
|
||||
Next Step
|
||||
Choose the next step for this task.
|
||||
(x) Discuss a code change Walk through a plan,
|
||||
then implement it
|
||||
together with careful
|
||||
checks.
|
||||
( ) Run targeted tests Pick the most
|
||||
relevant crate and
|
||||
validate the current
|
||||
behavior first.
|
||||
( ) Review the diff Summarize the changes
|
||||
and highlight the
|
||||
most important risks
|
||||
and gaps.
|
||||
|
||||
Option 1 of 3 | ↑/↓ scroll | enter next question
|
||||
@@ -42,11 +42,6 @@ pub(crate) fn menu_surface_inset(area: Rect) -> Rect {
|
||||
area.inset(Insets::vh(MENU_SURFACE_INSET_V, MENU_SURFACE_INSET_H))
|
||||
}
|
||||
|
||||
/// Total vertical padding introduced by the menu surface treatment.
|
||||
pub(crate) const fn menu_surface_padding_height() -> u16 {
|
||||
MENU_SURFACE_INSET_V * 2
|
||||
}
|
||||
|
||||
/// Paint the shared menu background and return the inset content area.
|
||||
///
|
||||
/// This keeps the surface treatment consistent across selection-style overlays
|
||||
|
||||
@@ -162,7 +162,6 @@ use crate::history_cell::AgentMessageCell;
|
||||
use crate::history_cell::HistoryCell;
|
||||
use crate::history_cell::McpToolCallCell;
|
||||
use crate::history_cell::PlainHistoryCell;
|
||||
use crate::history_cell::WebSearchCell;
|
||||
use crate::key_hint;
|
||||
use crate::key_hint::KeyBinding;
|
||||
use crate::markdown::append_markdown;
|
||||
@@ -1491,43 +1490,13 @@ impl ChatWidget {
|
||||
self.defer_or_handle(|q| q.push_mcp_end(ev), |s| s.handle_mcp_end_now(ev2));
|
||||
}
|
||||
|
||||
fn on_web_search_begin(&mut self, ev: WebSearchBeginEvent) {
|
||||
fn on_web_search_begin(&mut self, _ev: WebSearchBeginEvent) {
|
||||
self.flush_answer_stream_with_separator();
|
||||
self.flush_active_cell();
|
||||
self.active_cell = Some(Box::new(history_cell::new_active_web_search_call(
|
||||
ev.call_id,
|
||||
String::new(),
|
||||
self.config.animations,
|
||||
)));
|
||||
self.bump_active_cell_revision();
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
fn on_web_search_end(&mut self, ev: WebSearchEndEvent) {
|
||||
self.flush_answer_stream_with_separator();
|
||||
let WebSearchEndEvent {
|
||||
call_id,
|
||||
query,
|
||||
action,
|
||||
} = ev;
|
||||
let mut handled = false;
|
||||
if let Some(cell) = self
|
||||
.active_cell
|
||||
.as_mut()
|
||||
.and_then(|cell| cell.as_any_mut().downcast_mut::<WebSearchCell>())
|
||||
&& cell.call_id() == call_id
|
||||
{
|
||||
cell.update(action.clone(), query.clone());
|
||||
cell.complete();
|
||||
self.bump_active_cell_revision();
|
||||
self.flush_active_cell();
|
||||
handled = true;
|
||||
}
|
||||
|
||||
if !handled {
|
||||
self.add_to_history(history_cell::new_web_search_call(call_id, query, action));
|
||||
}
|
||||
self.had_work_activity = true;
|
||||
self.add_to_history(history_cell::new_web_search_call(ev.query));
|
||||
}
|
||||
|
||||
fn on_collab_event(&mut self, cell: PlainHistoryCell) {
|
||||
|
||||
@@ -43,8 +43,6 @@ use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_core::protocol::McpInvocation;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::web_search::web_search_detail;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::plan_tool::PlanItemArg;
|
||||
use codex_protocol::plan_tool::StepStatus;
|
||||
@@ -1344,89 +1342,9 @@ pub(crate) fn new_active_mcp_tool_call(
|
||||
McpToolCallCell::new(call_id, invocation, animations_enabled)
|
||||
}
|
||||
|
||||
fn web_search_header(completed: bool) -> &'static str {
|
||||
if completed {
|
||||
"Searched"
|
||||
} else {
|
||||
"Searching the web"
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct WebSearchCell {
|
||||
call_id: String,
|
||||
query: String,
|
||||
action: Option<WebSearchAction>,
|
||||
start_time: Instant,
|
||||
completed: bool,
|
||||
animations_enabled: bool,
|
||||
}
|
||||
|
||||
impl WebSearchCell {
|
||||
pub(crate) fn new(
|
||||
call_id: String,
|
||||
query: String,
|
||||
action: Option<WebSearchAction>,
|
||||
animations_enabled: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
call_id,
|
||||
query,
|
||||
action,
|
||||
start_time: Instant::now(),
|
||||
completed: false,
|
||||
animations_enabled,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn call_id(&self) -> &str {
|
||||
&self.call_id
|
||||
}
|
||||
|
||||
pub(crate) fn update(&mut self, action: WebSearchAction, query: String) {
|
||||
self.action = Some(action);
|
||||
self.query = query;
|
||||
}
|
||||
|
||||
pub(crate) fn complete(&mut self) {
|
||||
self.completed = true;
|
||||
}
|
||||
}
|
||||
|
||||
impl HistoryCell for WebSearchCell {
|
||||
fn display_lines(&self, width: u16) -> Vec<Line<'static>> {
|
||||
let bullet = if self.completed {
|
||||
"•".dim()
|
||||
} else {
|
||||
spinner(Some(self.start_time), self.animations_enabled)
|
||||
};
|
||||
let header = web_search_header(self.completed);
|
||||
let detail = web_search_detail(self.action.as_ref(), &self.query);
|
||||
let text: Text<'static> = if detail.is_empty() {
|
||||
Line::from(vec![header.bold()]).into()
|
||||
} else {
|
||||
Line::from(vec![header.bold(), " ".into(), detail.into()]).into()
|
||||
};
|
||||
PrefixedWrappedHistoryCell::new(text, vec![bullet, " ".into()], " ").display_lines(width)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_active_web_search_call(
|
||||
call_id: String,
|
||||
query: String,
|
||||
animations_enabled: bool,
|
||||
) -> WebSearchCell {
|
||||
WebSearchCell::new(call_id, query, None, animations_enabled)
|
||||
}
|
||||
|
||||
pub(crate) fn new_web_search_call(
|
||||
call_id: String,
|
||||
query: String,
|
||||
action: WebSearchAction,
|
||||
) -> WebSearchCell {
|
||||
let mut cell = WebSearchCell::new(call_id, query, Some(action), false);
|
||||
cell.complete();
|
||||
cell
|
||||
pub(crate) fn new_web_search_call(query: String) -> PrefixedWrappedHistoryCell {
|
||||
let text: Text<'static> = Line::from(vec!["Searched".bold(), " ".into(), query.into()]).into();
|
||||
PrefixedWrappedHistoryCell::new(text, "• ".dim(), " ")
|
||||
}
|
||||
|
||||
/// If the first content is an image, return a new cell with the image.
|
||||
@@ -1919,7 +1837,6 @@ mod tests {
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::protocol::McpAuthStatus;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::parse_command::ParsedCommand;
|
||||
use dirs::home_dir;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -2143,12 +2060,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn web_search_history_cell_snapshot() {
|
||||
let query =
|
||||
"example search query with several generic words to exercise wrapping".to_string();
|
||||
let cell = new_web_search_call(
|
||||
"call-1".to_string(),
|
||||
query.clone(),
|
||||
WebSearchAction::Search { query: Some(query) },
|
||||
"example search query with several generic words to exercise wrapping".to_string(),
|
||||
);
|
||||
let rendered = render_lines(&cell.display_lines(64)).join("\n");
|
||||
|
||||
@@ -2157,12 +2070,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn web_search_history_cell_wraps_with_indented_continuation() {
|
||||
let query =
|
||||
"example search query with several generic words to exercise wrapping".to_string();
|
||||
let cell = new_web_search_call(
|
||||
"call-1".to_string(),
|
||||
query.clone(),
|
||||
WebSearchAction::Search { query: Some(query) },
|
||||
"example search query with several generic words to exercise wrapping".to_string(),
|
||||
);
|
||||
let rendered = render_lines(&cell.display_lines(64));
|
||||
|
||||
@@ -2177,12 +2086,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn web_search_history_cell_short_query_does_not_wrap() {
|
||||
let query = "short query".to_string();
|
||||
let cell = new_web_search_call(
|
||||
"call-1".to_string(),
|
||||
query.clone(),
|
||||
WebSearchAction::Search { query: Some(query) },
|
||||
);
|
||||
let cell = new_web_search_call("short query".to_string());
|
||||
let rendered = render_lines(&cell.display_lines(64));
|
||||
|
||||
assert_eq!(rendered, vec!["• Searched short query".to_string()]);
|
||||
@@ -2190,12 +2094,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn web_search_history_cell_transcript_snapshot() {
|
||||
let query =
|
||||
"example search query with several generic words to exercise wrapping".to_string();
|
||||
let cell = new_web_search_call(
|
||||
"call-1".to_string(),
|
||||
query.clone(),
|
||||
WebSearchAction::Search { query: Some(query) },
|
||||
"example search query with several generic words to exercise wrapping".to_string(),
|
||||
);
|
||||
let rendered = render_lines(&cell.transcript_lines(64)).join("\n");
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ impl UpdateAction {
|
||||
match self {
|
||||
UpdateAction::NpmGlobalLatest => ("npm", &["install", "-g", "@openai/codex"]),
|
||||
UpdateAction::BunGlobalLatest => ("bun", &["install", "-g", "@openai/codex"]),
|
||||
UpdateAction::BrewUpgrade => ("brew", &["upgrade", "--cask", "codex"]),
|
||||
UpdateAction::BrewUpgrade => ("brew", &["upgrade", "codex"]),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,8 @@ Use /fork to branch the current chat into a new thread.
|
||||
Use /init to create an AGENTS.md with project-specific guidance.
|
||||
Use /mcp to list configured MCP tools.
|
||||
You can run any shell command from Codex using `!` (e.g. `!ls`)
|
||||
Type / to open the command popup; Tab autocompletes slash commands.
|
||||
Type / to open the command popup; Tab autocompletes slash commands and saved prompts.
|
||||
You can define your own `/` commands with custom prompts. More info: https://developers.openai.com/codex/guides/slash-commands#create-your-own-slash-commands-with-custom-prompts
|
||||
When the composer is empty, press Esc to step back and edit your last message; Enter confirms.
|
||||
Press Tab to queue a message instead of sending it immediately; Enter always sends immediately.
|
||||
Paste an image with Ctrl+V to attach it to your next message.
|
||||
|
||||
@@ -1,47 +1,29 @@
|
||||
## Contributing
|
||||
|
||||
**External contributions are by invitation only**
|
||||
This project is under active development and the code will likely change pretty significantly.
|
||||
|
||||
At this time, the Codex team does not accept unsolicited code contributions.
|
||||
**At the moment, we are generally accepting external contributions only for bugs fixes.**
|
||||
|
||||
If you would like to propose a new feature or a change in behavior, please open an issue describing the proposal or upvote an existing enhancement request. We prioritize new features based on community feedback, alignment with our roadmap, and consistency across all Codex surfaces (CLI, IDE extensions, web, etc.).
|
||||
If you want to add a new feature or change the behavior of an existing one, please open an issue proposing the feature or upvote an existing enhancement request. We will generally prioritize new features based on community feedback. New features must compose well with existing and upcoming features and fit into our roadmap. They must also be implemented consistently across all Codex surfaces (CLI, IDE extension, web, etc.).
|
||||
|
||||
If you encounter a bug, please open a bug report or verify that an existing report already covers the issue. If you would like to help, we encourage you to contribute by sharing analysis, reproduction details, root-cause hypotheses, or a high-level outline of a potential fix directly in the issue thread.
|
||||
If you want to contribute a bug fix, please open a bug report first - or verify that there is an existing bug report that discusses the issue. All bug fix PRs should include a link to a bug report.
|
||||
|
||||
The Codex team may invite an external contributor to submit a pull request when:
|
||||
|
||||
- the problem is well understood,
|
||||
- the proposed approach aligns with the team’s intended solution, and
|
||||
- the issue is deemed high-impact and high-priority.
|
||||
|
||||
Pull requests that have not been explicitly invited by a member of the Codex team will be closed without review.
|
||||
|
||||
**Why we do not generally accept external code contributions**
|
||||
|
||||
In the past, the Codex team accepted external pull requests for bug fixes. While we appreciated the effort and engagement from the community, this model did not scale well.
|
||||
|
||||
Many contributions were made without full visibility into the architectural context, system-level constraints, or near-term roadmap considerations that guide Codex development. Others focused on issues that were low priority or affected a very small subset of users. Reviewing and iterating on these PRs often took more time than implementing the fix directly, and diverted attention from higher-priority work.
|
||||
|
||||
The most valuable contributions consistently came from community members who demonstrated deep understanding of a problem domain. That expertise is most helpful when shared early -- through detailed bug reports, analysis, and design discussion in issues. Identifying the right solution is typically the hard part; implementing it is comparatively straightforward with the help of Codex itself.
|
||||
|
||||
For these reasons, we focus external contributions on discussion, analysis, and feedback, and reserve code changes for cases where a targeted invitation makes sense.
|
||||
**New contributions that don't go through this process may be closed** if they aren't aligned with our current roadmap or conflict with other priorities/upcoming features.
|
||||
|
||||
### Development workflow
|
||||
|
||||
If you are invited by a Codex team member to contribute a PR, here is the recommended development workflow.
|
||||
|
||||
- Create a _topic branch_ from `main` - e.g. `feat/interactive-prompt`.
|
||||
- Keep your changes focused. Multiple unrelated fixes should be opened as separate PRs.
|
||||
- Ensure your change is free of lint warnings and test failures.
|
||||
|
||||
### Guidance for invited code contributions
|
||||
### Writing high-impact code changes
|
||||
|
||||
1. **Start with an issue.** Open a new one or comment on an existing discussion so we can agree on the solution before code is written.
|
||||
2. **Add or update tests.** A bug fix should generally come with test coverage that fails before your change and passes afterwards. 100% coverage is not required, but aim for meaningful assertions.
|
||||
3. **Document behavior.** If your change affects user-facing behavior, update the README, inline help (`codex --help`), or relevant example projects.
|
||||
4. **Keep commits atomic.** Each commit should compile and the tests should pass. This makes reviews and potential rollbacks easier.
|
||||
|
||||
### Opening a pull request (by invitation only)
|
||||
### Opening a pull request
|
||||
|
||||
- Fill in the PR template (or include similar information) - **What? Why? How?**
|
||||
- Include a link to a bug report or enhancement request in the issue tracker
|
||||
@@ -52,7 +34,7 @@ If you are invited by a Codex team member to contribute a PR, here is the recomm
|
||||
### Review process
|
||||
|
||||
1. One maintainer will be assigned as a primary reviewer.
|
||||
2. If your invited PR introduces scope or behavior that was not previously discussed and approved, we may close the PR.
|
||||
2. If your PR adds a new feature that was not previously discussed and approved, we may close your PR (see [Contributing](#contributing)).
|
||||
3. We may ask for changes. Please do not take this personally. We value the work, but we also value consistency and long-term maintainability.
|
||||
4. When there is consensus that the PR meets the bar, a maintainer will squash-and-merge.
|
||||
|
||||
@@ -60,7 +42,7 @@ If you are invited by a Codex team member to contribute a PR, here is the recomm
|
||||
|
||||
- **Be kind and inclusive.** Treat others with respect; we follow the [Contributor Covenant](https://www.contributor-covenant.org/).
|
||||
- **Assume good intent.** Written communication is hard - err on the side of generosity.
|
||||
- **Teach & learn.** If you spot something confusing, open an issue or discussion with suggestions or clarifications.
|
||||
- **Teach & learn.** If you spot something confusing, open an issue or PR with improvements.
|
||||
|
||||
### Getting help
|
||||
|
||||
|
||||
Reference in New Issue
Block a user