mirror of
https://github.com/openai/codex.git
synced 2026-02-01 22:47:52 +00:00
Compare commits
14 Commits
dev/mzeng/
...
composer
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5eaf21a13 | ||
|
|
1b77070923 | ||
|
|
868d23f878 | ||
|
|
d27f2533a9 | ||
|
|
0f798173d7 | ||
|
|
cb2bbe5cba | ||
|
|
dd2d68e69e | ||
|
|
8fea8f73d6 | ||
|
|
73b5274443 | ||
|
|
a748600c42 | ||
|
|
b332482eb1 | ||
|
|
58450ba2a1 | ||
|
|
24230c066b | ||
|
|
18acec09df |
140
.github/scripts/install-musl-build-tools.sh
vendored
140
.github/scripts/install-musl-build-tools.sh
vendored
@@ -32,30 +32,132 @@ case "${TARGET}" in
|
||||
;;
|
||||
esac
|
||||
|
||||
if command -v clang++ >/dev/null; then
|
||||
cxx="$(command -v clang++)"
|
||||
echo "CXXFLAGS=--target=${TARGET} -stdlib=libc++ -pthread" >> "$GITHUB_ENV"
|
||||
echo "CFLAGS=--target=${TARGET} -pthread" >> "$GITHUB_ENV"
|
||||
if command -v clang >/dev/null; then
|
||||
cc="$(command -v clang)"
|
||||
echo "CC=${cc}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CC=${cc}" >> "$GITHUB_ENV"
|
||||
target_cc_var="CC_${TARGET}"
|
||||
target_cc_var="${target_cc_var//-/_}"
|
||||
echo "${target_cc_var}=${cc}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
elif command -v "${arch}-linux-musl-g++" >/dev/null; then
|
||||
cxx="$(command -v "${arch}-linux-musl-g++")"
|
||||
elif command -v musl-g++ >/dev/null; then
|
||||
cxx="$(command -v musl-g++)"
|
||||
# Use the musl toolchain as the Rust linker to avoid Zig injecting its own CRT.
|
||||
if command -v "${arch}-linux-musl-gcc" >/dev/null; then
|
||||
musl_linker="$(command -v "${arch}-linux-musl-gcc")"
|
||||
elif command -v musl-gcc >/dev/null; then
|
||||
cxx="$(command -v musl-gcc)"
|
||||
echo "CFLAGS=-pthread" >> "$GITHUB_ENV"
|
||||
musl_linker="$(command -v musl-gcc)"
|
||||
else
|
||||
echo "musl g++ not found after install; arch=${arch}" >&2
|
||||
echo "musl gcc not found after install; arch=${arch}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
zig_target="${TARGET/-unknown-linux-musl/-linux-musl}"
|
||||
runner_temp="${RUNNER_TEMP:-/tmp}"
|
||||
tool_root="${runner_temp}/codex-musl-tools-${TARGET}"
|
||||
mkdir -p "${tool_root}"
|
||||
|
||||
sysroot=""
|
||||
if command -v zig >/dev/null; then
|
||||
zig_bin="$(command -v zig)"
|
||||
cc="${tool_root}/zigcc"
|
||||
cxx="${tool_root}/zigcxx"
|
||||
|
||||
cat >"${cc}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
# Drop any explicit --target/-target flags. Zig expects -target and
|
||||
# rejects Rust triples like *-unknown-linux-musl.
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" cc -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
cat >"${cxx}" <<EOF
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
args=()
|
||||
skip_next=0
|
||||
for arg in "\$@"; do
|
||||
if [[ "\${skip_next}" -eq 1 ]]; then
|
||||
skip_next=0
|
||||
continue
|
||||
fi
|
||||
case "\${arg}" in
|
||||
--target)
|
||||
skip_next=1
|
||||
continue
|
||||
;;
|
||||
--target=*|-target=*|-target)
|
||||
if [[ "\${arg}" == "-target" ]]; then
|
||||
skip_next=1
|
||||
fi
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
args+=("\${arg}")
|
||||
done
|
||||
|
||||
exec "${zig_bin}" c++ -target "${zig_target}" "\${args[@]}"
|
||||
EOF
|
||||
chmod +x "${cc}" "${cxx}"
|
||||
|
||||
sysroot="$("${zig_bin}" cc -target "${zig_target}" -print-sysroot 2>/dev/null || true)"
|
||||
else
|
||||
cc="${musl_linker}"
|
||||
|
||||
if command -v "${arch}-linux-musl-g++" >/dev/null; then
|
||||
cxx="$(command -v "${arch}-linux-musl-g++")"
|
||||
elif command -v musl-g++ >/dev/null; then
|
||||
cxx="$(command -v musl-g++)"
|
||||
else
|
||||
cxx="${cc}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "${sysroot}" && "${sysroot}" != "/" ]]; then
|
||||
echo "BORING_BSSL_SYSROOT=${sysroot}" >> "$GITHUB_ENV"
|
||||
boring_sysroot_var="BORING_BSSL_SYSROOT_${TARGET}"
|
||||
boring_sysroot_var="${boring_sysroot_var//-/_}"
|
||||
echo "${boring_sysroot_var}=${sysroot}" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
cflags="-pthread"
|
||||
cxxflags="-pthread"
|
||||
if [[ "${TARGET}" == "aarch64-unknown-linux-musl" ]]; then
|
||||
# BoringSSL enables -Wframe-larger-than=25344 under clang and treats warnings as errors.
|
||||
cflags="${cflags} -Wno-error=frame-larger-than"
|
||||
cxxflags="${cxxflags} -Wno-error=frame-larger-than"
|
||||
fi
|
||||
|
||||
echo "CFLAGS=${cflags}" >> "$GITHUB_ENV"
|
||||
echo "CXXFLAGS=${cxxflags}" >> "$GITHUB_ENV"
|
||||
echo "CC=${cc}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CC=${cc}" >> "$GITHUB_ENV"
|
||||
target_cc_var="CC_${TARGET}"
|
||||
target_cc_var="${target_cc_var//-/_}"
|
||||
echo "${target_cc_var}=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "TARGET_CXX=${cxx}" >> "$GITHUB_ENV"
|
||||
target_cxx_var="CXX_${TARGET}"
|
||||
target_cxx_var="${target_cxx_var//-/_}"
|
||||
echo "${target_cxx_var}=${cxx}" >> "$GITHUB_ENV"
|
||||
|
||||
cargo_linker_var="CARGO_TARGET_${TARGET^^}_LINKER"
|
||||
cargo_linker_var="${cargo_linker_var//-/_}"
|
||||
echo "${cargo_linker_var}=${musl_linker}" >> "$GITHUB_ENV"
|
||||
|
||||
echo "CMAKE_C_COMPILER=${cc}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_CXX_COMPILER=${cxx}" >> "$GITHUB_ENV"
|
||||
echo "CMAKE_ARGS=-DCMAKE_HAVE_THREADS_LIBRARY=1 -DCMAKE_USE_PTHREADS_INIT=1 -DCMAKE_THREAD_LIBS_INIT=-pthread -DTHREADS_PREFER_PTHREAD_FLAG=ON" >> "$GITHUB_ENV"
|
||||
|
||||
6
.github/workflows/rust-ci.yml
vendored
6
.github/workflows/rust-ci.yml
vendored
@@ -261,6 +261,12 @@ jobs:
|
||||
/var/cache/apt
|
||||
key: apt-${{ matrix.runner }}-${{ matrix.target }}-v1
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
|
||||
6
.github/workflows/rust-release.yml
vendored
6
.github/workflows/rust-release.yml
vendored
@@ -104,6 +104,12 @@ jobs:
|
||||
${{ github.workspace }}/codex-rs/target/
|
||||
key: cargo-${{ matrix.runner }}-${{ matrix.target }}-release-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
|
||||
name: Install musl build tools
|
||||
env:
|
||||
|
||||
6
.github/workflows/shell-tool-mcp.yml
vendored
6
.github/workflows/shell-tool-mcp.yml
vendored
@@ -97,6 +97,12 @@ jobs:
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install Zig
|
||||
uses: mlugg/setup-zig@v2
|
||||
with:
|
||||
version: 0.14.0
|
||||
|
||||
- if: ${{ matrix.install_musl }}
|
||||
name: Install musl build dependencies
|
||||
env:
|
||||
|
||||
@@ -181,7 +181,7 @@ ratatui = "0.29.0"
|
||||
ratatui-macros = "0.6.0"
|
||||
regex = "1.12.2"
|
||||
regex-lite = "0.1.8"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["rustls-tls"] }
|
||||
reqwest = "0.12"
|
||||
rmcp = { version = "0.12.0", default-features = false }
|
||||
schemars = "0.8.22"
|
||||
seccompiler = "0.5.0"
|
||||
|
||||
@@ -5,6 +5,7 @@ use crate::protocol::common::AuthMode;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::approvals::ExecPolicyAmendment as CoreExecPolicyAmendment;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ForcedLoginMethod;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
@@ -937,7 +938,7 @@ pub struct CollaborationModeListParams {}
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub struct CollaborationModeListResponse {
|
||||
pub data: Vec<CollaborationMode>,
|
||||
pub data: Vec<CollaborationModeMask>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
|
||||
@@ -16,7 +16,7 @@ use codex_app_server_protocol::CollaborationModeListResponse;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_core::models_manager::test_builtin_collaboration_mode_presets;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
@@ -45,13 +45,23 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
|
||||
let CollaborationModeListResponse { data: items } =
|
||||
to_response::<CollaborationModeListResponse>(response)?;
|
||||
|
||||
let expected = vec![
|
||||
let expected = [
|
||||
plan_preset(),
|
||||
code_preset(),
|
||||
pair_programming_preset(),
|
||||
execute_preset(),
|
||||
];
|
||||
assert_eq!(expected, items);
|
||||
assert_eq!(expected.len(), items.len());
|
||||
for (expected_mask, actual_mask) in expected.iter().zip(items.iter()) {
|
||||
assert_eq!(expected_mask.name, actual_mask.name);
|
||||
assert_eq!(expected_mask.mode, actual_mask.mode);
|
||||
assert_eq!(expected_mask.model, actual_mask.model);
|
||||
assert_eq!(expected_mask.reasoning_effort, actual_mask.reasoning_effort);
|
||||
assert_eq!(
|
||||
expected_mask.developer_instructions,
|
||||
actual_mask.developer_instructions
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -59,11 +69,11 @@ async fn list_collaboration_modes_returns_presets() -> Result<()> {
|
||||
///
|
||||
/// If the defaults change in the app server, this helper should be updated alongside the
|
||||
/// contract, or the test will fail in ways that imply a regression in the API.
|
||||
fn plan_preset() -> CollaborationMode {
|
||||
fn plan_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| p.mode == ModeKind::Plan)
|
||||
.find(|p| p.mode == Some(ModeKind::Plan))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
@@ -71,20 +81,20 @@ fn plan_preset() -> CollaborationMode {
|
||||
///
|
||||
/// The helper keeps the expected model and reasoning defaults co-located with the test
|
||||
/// so that mismatches point directly at the API contract being exercised.
|
||||
fn pair_programming_preset() -> CollaborationMode {
|
||||
fn pair_programming_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| p.mode == ModeKind::PairProgramming)
|
||||
.find(|p| p.mode == Some(ModeKind::PairProgramming))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Builds the code preset that the list response is expected to return.
|
||||
fn code_preset() -> CollaborationMode {
|
||||
fn code_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| p.mode == ModeKind::Code)
|
||||
.find(|p| p.mode == Some(ModeKind::Code))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
@@ -92,10 +102,10 @@ fn code_preset() -> CollaborationMode {
|
||||
///
|
||||
/// The execute preset uses a different reasoning effort to capture the higher-effort
|
||||
/// execution contract the server currently exposes.
|
||||
fn execute_preset() -> CollaborationMode {
|
||||
fn execute_preset() -> CollaborationModeMask {
|
||||
let presets = test_builtin_collaboration_mode_presets();
|
||||
presets
|
||||
.into_iter()
|
||||
.find(|p| p.mode == ModeKind::Execute)
|
||||
.find(|p| p.mode == Some(ModeKind::Execute))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
@@ -396,8 +396,7 @@ fn run_update_action(action: UpdateAction) -> anyhow::Result<()> {
|
||||
if !status.success() {
|
||||
anyhow::bail!("`{cmd_str}` failed with status {status}");
|
||||
}
|
||||
println!();
|
||||
println!("🎉 Update ran successfully! Please restart Codex.");
|
||||
println!("\n🎉 Update ran successfully! Please restart Codex.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::error::CodexErr;
|
||||
use crate::error::Result;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
@@ -19,6 +21,25 @@ pub(crate) struct Guards {
|
||||
total_count: AtomicUsize,
|
||||
}
|
||||
|
||||
/// Initial agent is depth 0.
|
||||
pub(crate) const MAX_THREAD_SPAWN_DEPTH: i32 = 1;
|
||||
|
||||
fn session_depth(session_source: &SessionSource) -> i32 {
|
||||
match session_source {
|
||||
SessionSource::SubAgent(SubAgentSource::ThreadSpawn { depth, .. }) => *depth,
|
||||
SessionSource::SubAgent(_) => 0,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn next_thread_spawn_depth(session_source: &SessionSource) -> i32 {
|
||||
session_depth(session_source).saturating_add(1)
|
||||
}
|
||||
|
||||
pub(crate) fn exceeds_thread_spawn_depth_limit(depth: i32) -> bool {
|
||||
depth > MAX_THREAD_SPAWN_DEPTH
|
||||
}
|
||||
|
||||
impl Guards {
|
||||
pub(crate) fn reserve_spawn_slot(
|
||||
self: &Arc<Self>,
|
||||
@@ -102,6 +123,30 @@ mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn session_depth_defaults_to_zero_for_root_sources() {
|
||||
assert_eq!(session_depth(&SessionSource::Cli), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_spawn_depth_increments_and_enforces_limit() {
|
||||
let session_source = SessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id: ThreadId::new(),
|
||||
depth: 1,
|
||||
});
|
||||
let child_depth = next_thread_spawn_depth(&session_source);
|
||||
assert_eq!(child_depth, 2);
|
||||
assert!(exceeds_thread_spawn_depth_limit(child_depth));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_thread_spawn_subagents_default_to_depth_zero() {
|
||||
let session_source = SessionSource::SubAgent(SubAgentSource::Review);
|
||||
assert_eq!(session_depth(&session_source), 0);
|
||||
assert_eq!(next_thread_spawn_depth(&session_source), 1);
|
||||
assert!(!exceeds_thread_spawn_depth_limit(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reservation_drop_releases_slot() {
|
||||
let guards = Arc::new(Guards::default());
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
pub(crate) mod control;
|
||||
// Do not put in `pub` or `pub(crate)`. This code should not be used somewhere else.
|
||||
mod guards;
|
||||
pub(crate) mod role;
|
||||
pub(crate) mod status;
|
||||
|
||||
pub(crate) use codex_protocol::protocol::AgentStatus;
|
||||
pub(crate) use control::AgentControl;
|
||||
pub(crate) use guards::MAX_THREAD_SPAWN_DEPTH;
|
||||
pub(crate) use guards::exceeds_thread_spawn_depth_limit;
|
||||
pub(crate) use guards::next_thread_spawn_depth;
|
||||
pub(crate) use role::AgentRole;
|
||||
pub(crate) use status::agent_status_from_event;
|
||||
|
||||
@@ -89,7 +89,7 @@ pub use codex_git::GhostSnapshotConfig;
|
||||
/// files are *silently truncated* to this size so we do not take up too much of
|
||||
/// the context window.
|
||||
pub(crate) const PROJECT_DOC_MAX_BYTES: usize = 32 * 1024; // 32 KiB
|
||||
pub(crate) const DEFAULT_AGENT_MAX_THREADS: Option<usize> = None;
|
||||
pub(crate) const DEFAULT_AGENT_MAX_THREADS: Option<usize> = Some(6);
|
||||
|
||||
pub const CONFIG_TOML_FILE: &str = "config.toml";
|
||||
|
||||
@@ -3693,7 +3693,7 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
@@ -3775,7 +3775,7 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
@@ -3872,7 +3872,7 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
@@ -3955,7 +3955,7 @@ model_verbosity = "high"
|
||||
project_doc_max_bytes: PROJECT_DOC_MAX_BYTES,
|
||||
project_doc_fallback_filenames: Vec::new(),
|
||||
tool_output_token_limit: None,
|
||||
agent_max_threads: None,
|
||||
agent_max_threads: DEFAULT_AGENT_MAX_THREADS,
|
||||
codex_home: fixture.codex_home(),
|
||||
config_layer_stack: Default::default(),
|
||||
history: History::default(),
|
||||
|
||||
@@ -434,11 +434,7 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::Collab,
|
||||
key: "collab",
|
||||
stage: Stage::Experimental {
|
||||
name: "Multi-agents",
|
||||
menu_description: "Allow Codex to spawn and collaborate with other agents on request (formerly named `collab`).",
|
||||
announcement: "NEW! Codex can now spawn other agents and work with them to solve your problems. Enable in /experimental!",
|
||||
},
|
||||
stage: Stage::Beta,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Settings;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
|
||||
const COLLABORATION_MODE_PLAN: &str = include_str!("../../templates/collaboration_mode/plan.md");
|
||||
@@ -10,7 +9,7 @@ const COLLABORATION_MODE_PAIR_PROGRAMMING: &str =
|
||||
const COLLABORATION_MODE_EXECUTE: &str =
|
||||
include_str!("../../templates/collaboration_mode/execute.md");
|
||||
|
||||
pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
|
||||
pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
|
||||
vec![
|
||||
plan_preset(),
|
||||
code_preset(),
|
||||
@@ -20,50 +19,46 @@ pub(super) fn builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn test_builtin_collaboration_mode_presets() -> Vec<CollaborationMode> {
|
||||
pub fn test_builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
|
||||
builtin_collaboration_mode_presets()
|
||||
}
|
||||
|
||||
fn plan_preset() -> CollaborationMode {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
settings: Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::High),
|
||||
developer_instructions: Some(COLLABORATION_MODE_PLAN.to_string()),
|
||||
},
|
||||
fn plan_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Plan".to_string(),
|
||||
mode: Some(ModeKind::Plan),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::High)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_PLAN.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn code_preset() -> CollaborationMode {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Code,
|
||||
settings: Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
developer_instructions: Some(COLLABORATION_MODE_CODE.to_string()),
|
||||
},
|
||||
fn code_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Code".to_string(),
|
||||
mode: Some(ModeKind::Code),
|
||||
model: None,
|
||||
reasoning_effort: None,
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_CODE.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn pair_programming_preset() -> CollaborationMode {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::PairProgramming,
|
||||
settings: Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::Medium),
|
||||
developer_instructions: Some(COLLABORATION_MODE_PAIR_PROGRAMMING.to_string()),
|
||||
},
|
||||
fn pair_programming_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Pair Programming".to_string(),
|
||||
mode: Some(ModeKind::PairProgramming),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_PAIR_PROGRAMMING.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn execute_preset() -> CollaborationMode {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Execute,
|
||||
settings: Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::High),
|
||||
developer_instructions: Some(COLLABORATION_MODE_EXECUTE.to_string()),
|
||||
},
|
||||
fn execute_preset() -> CollaborationModeMask {
|
||||
CollaborationModeMask {
|
||||
name: "Execute".to_string(),
|
||||
mode: Some(ModeKind::Execute),
|
||||
model: None,
|
||||
reasoning_effort: Some(Some(ReasoningEffort::High)),
|
||||
developer_instructions: Some(Some(COLLABORATION_MODE_EXECUTE.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::models_manager::model_presets::builtin_model_presets;
|
||||
use codex_api::ModelsClient;
|
||||
use codex_api::ReqwestTransport;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelsResponse;
|
||||
@@ -91,7 +91,7 @@ impl ModelsManager {
|
||||
/// List collaboration mode presets.
|
||||
///
|
||||
/// Returns a static set of presets seeded with the configured model.
|
||||
pub fn list_collaboration_modes(&self) -> Vec<CollaborationMode> {
|
||||
pub fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
|
||||
builtin_collaboration_mode_presets()
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ use crate::rollout::RolloutRecorder;
|
||||
use crate::rollout::truncation;
|
||||
use crate::skills::SkillsManager;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::McpServerRefreshConfig;
|
||||
@@ -158,7 +158,7 @@ impl ThreadManager {
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn list_collaboration_modes(&self) -> Vec<CollaborationMode> {
|
||||
pub fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
|
||||
self.state.models_manager.list_collaboration_modes()
|
||||
}
|
||||
|
||||
|
||||
@@ -78,6 +78,9 @@ impl ToolHandler for CollabHandler {
|
||||
mod spawn {
|
||||
use super::*;
|
||||
use crate::agent::AgentRole;
|
||||
use crate::agent::MAX_THREAD_SPAWN_DEPTH;
|
||||
use crate::agent::exceeds_thread_spawn_depth_limit;
|
||||
use crate::agent::next_thread_spawn_depth;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use std::sync::Arc;
|
||||
@@ -107,6 +110,13 @@ mod spawn {
|
||||
"Empty message can't be sent to an agent".to_string(),
|
||||
));
|
||||
}
|
||||
let session_source = turn.client.get_session_source();
|
||||
let child_depth = next_thread_spawn_depth(&session_source);
|
||||
if exceeds_thread_spawn_depth_limit(child_depth) {
|
||||
return Err(FunctionCallError::RespondToModel(format!(
|
||||
"agent depth limit reached: max depth is {MAX_THREAD_SPAWN_DEPTH}"
|
||||
)));
|
||||
}
|
||||
session
|
||||
.send_event(
|
||||
&turn,
|
||||
@@ -132,6 +142,7 @@ mod spawn {
|
||||
prompt.clone(),
|
||||
Some(SessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id: session.conversation_id,
|
||||
depth: child_depth,
|
||||
})),
|
||||
)
|
||||
.await
|
||||
@@ -581,7 +592,6 @@ fn build_agent_spawn_config(
|
||||
config.model_reasoning_summary = turn.client.get_reasoning_summary();
|
||||
config.developer_instructions = turn.developer_instructions.clone();
|
||||
config.compact_prompt = turn.compact_prompt.clone();
|
||||
config.user_instructions = turn.user_instructions.clone();
|
||||
config.shell_environment_policy = turn.shell_environment_policy.clone();
|
||||
config.codex_linux_sandbox_exe = turn.codex_linux_sandbox_exe.clone();
|
||||
config.cwd = turn.cwd.clone();
|
||||
@@ -605,13 +615,17 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::CodexAuth;
|
||||
use crate::ThreadManager;
|
||||
use crate::agent::MAX_THREAD_SPAWN_DEPTH;
|
||||
use crate::built_in_model_providers;
|
||||
use crate::client::ModelClient;
|
||||
use crate::codex::make_session_and_context;
|
||||
use crate::config::types::ShellEnvironmentPolicy;
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::protocol::AskForApproval;
|
||||
use crate::protocol::Op;
|
||||
use crate::protocol::SandboxPolicy;
|
||||
use crate::protocol::SessionSource;
|
||||
use crate::protocol::SubAgentSource;
|
||||
use crate::turn_diff_tracker::TurnDiffTracker;
|
||||
use codex_protocol::ThreadId;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -731,6 +745,45 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn spawn_agent_rejects_when_depth_limit_exceeded() {
|
||||
let (mut session, mut turn) = make_session_and_context().await;
|
||||
let manager = thread_manager();
|
||||
session.services.agent_control = manager.agent_control();
|
||||
|
||||
let session_source = SessionSource::SubAgent(SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id: session.conversation_id,
|
||||
depth: MAX_THREAD_SPAWN_DEPTH,
|
||||
});
|
||||
turn.client = ModelClient::new(
|
||||
turn.client.config(),
|
||||
Some(session.services.auth_manager.clone()),
|
||||
turn.client.get_model_info(),
|
||||
turn.client.get_otel_manager(),
|
||||
turn.client.get_provider(),
|
||||
turn.client.get_reasoning_effort(),
|
||||
turn.client.get_reasoning_summary(),
|
||||
session.conversation_id,
|
||||
session_source,
|
||||
);
|
||||
|
||||
let invocation = invocation(
|
||||
Arc::new(session),
|
||||
Arc::new(turn),
|
||||
"spawn_agent",
|
||||
function_payload(json!({"message": "hello"})),
|
||||
);
|
||||
let Err(err) = CollabHandler.handle(invocation).await else {
|
||||
panic!("spawn should fail when depth limit exceeded");
|
||||
};
|
||||
assert_eq!(
|
||||
err,
|
||||
FunctionCallError::RespondToModel(format!(
|
||||
"agent depth limit reached: max depth is {MAX_THREAD_SPAWN_DEPTH}"
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn send_input_rejects_empty_message() {
|
||||
let (session, turn) = make_session_and_context().await;
|
||||
@@ -1081,7 +1134,6 @@ mod tests {
|
||||
};
|
||||
turn.developer_instructions = Some("dev".to_string());
|
||||
turn.compact_prompt = Some("compact".to_string());
|
||||
turn.user_instructions = Some("user".to_string());
|
||||
turn.shell_environment_policy = ShellEnvironmentPolicy {
|
||||
use_profile: true,
|
||||
..ShellEnvironmentPolicy::default()
|
||||
@@ -1101,7 +1153,6 @@ mod tests {
|
||||
expected.model_reasoning_summary = turn.client.get_reasoning_summary();
|
||||
expected.developer_instructions = turn.developer_instructions.clone();
|
||||
expected.compact_prompt = turn.compact_prompt.clone();
|
||||
expected.user_instructions = turn.user_instructions.clone();
|
||||
expected.shell_environment_policy = turn.shell_environment_policy.clone();
|
||||
expected.codex_linux_sandbox_exe = turn.codex_linux_sandbox_exe.clone();
|
||||
expected.cwd = turn.cwd.clone();
|
||||
@@ -1115,4 +1166,31 @@ mod tests {
|
||||
.expect("sandbox policy set");
|
||||
assert_eq!(config, expected);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn build_agent_spawn_config_preserves_base_user_instructions() {
|
||||
let (session, mut turn) = make_session_and_context().await;
|
||||
let session_source = turn.client.get_session_source();
|
||||
let mut base_config = (*turn.client.config()).clone();
|
||||
base_config.user_instructions = Some("base-user".to_string());
|
||||
turn.user_instructions = Some("resolved-user".to_string());
|
||||
turn.client = ModelClient::new(
|
||||
Arc::new(base_config.clone()),
|
||||
Some(session.services.auth_manager.clone()),
|
||||
turn.client.get_model_info(),
|
||||
turn.client.get_otel_manager(),
|
||||
turn.client.get_provider(),
|
||||
turn.client.get_reasoning_effort(),
|
||||
turn.client.get_reasoning_summary(),
|
||||
session.conversation_id,
|
||||
session_source,
|
||||
);
|
||||
let base_instructions = BaseInstructions {
|
||||
text: "base".to_string(),
|
||||
};
|
||||
|
||||
let config = build_agent_spawn_config(&base_instructions, &turn).expect("spawn config");
|
||||
|
||||
assert_eq!(config.user_instructions, base_config.user_instructions);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,283 +1,41 @@
|
||||
# Collaboration Style: Plan
|
||||
# Plan Mode (Conversational)
|
||||
|
||||
You work in **two phases**:
|
||||
You work in 2 phases and you should *chat your way* to a great plan before finalizing it.
|
||||
|
||||
- **PHASE 1 — Understand user intent**: Align on what the user is trying to accomplish and what “success” means. Focus on intent, scope, constraints, and preference tradeoffs.
|
||||
- **PHASE 2 — Technical spec & implementation plan**: Convert the intent into a decision‑complete technical spec and an implementation plan detailed enough that another agent could execute with minimal follow‑ups.
|
||||
PHASE 1 — Intent chat (what they actually want)
|
||||
- Keep asking until you can clearly state: goal + success criteria, audience, in/out of scope, constraints, current state, and the key preferences/tradeoffs.
|
||||
- Bias toward questions over guessing: if any high‑impact ambiguity remains, do NOT plan yet—ask.
|
||||
- Include a “Confirm my understanding” question in each round (so the user can correct you early).
|
||||
|
||||
---
|
||||
PHASE 2 — Implementation chat (what/how we’ll build)
|
||||
- Once intent is stable, keep asking until the spec is decision‑complete: approach, interfaces (APIs/schemas/I/O), data flow, edge cases/failure modes, testing + acceptance criteria, rollout/monitoring, and any migrations/compat constraints.
|
||||
|
||||
## Hard interaction rule (critical)
|
||||
|
||||
Every assistant turn MUST be **exactly one** of:
|
||||
|
||||
**A) A `request_user_input` tool call** (to gather requirements and iterate), OR
|
||||
**B) The final plan output** (**plan‑only**, with a good title).
|
||||
|
||||
Constraints:
|
||||
- **Do NOT ask questions in free text.** All questions MUST be asked via `request_user_input`.
|
||||
- **Do NOT mix** a `request_user_input` call with plan content in the same turn.
|
||||
- You may use internal tools to explore (repo search, file reading, environment inspection) **before** emitting either A or B, but the user‑visible output must still be exactly A or B.
|
||||
|
||||
---
|
||||
|
||||
## Two types of uncertainty (treat differently)
|
||||
|
||||
### Type 1 — Discoverable facts (repo/system truth)
|
||||
Examples: “Where is app‑server 2 defined?”, “Which config sets turn duration?”, “Which service emits this metric?”
|
||||
|
||||
Rule: **Evidence-first exploration applies.** Don’t ask the user until you’ve searched.
|
||||
|
||||
### Type 2 — Preferences & tradeoffs (product and engineering intent)
|
||||
|
||||
Rule: **Ask early** These are often *not discoverable* and should not be silently assumed when multiple approaches are viable.
|
||||
|
||||
---
|
||||
|
||||
## Evidence‑first exploration (precondition to asking discoverable questions)
|
||||
|
||||
When a repo / codebase / workspace is available (or implied), you MUST attempt to resolve discoverable questions by **exploring first**.
|
||||
|
||||
Before calling `request_user_input` for a discoverable fact, do a quick investigation pass:
|
||||
- Run at least **2 targeted searches** (exact match + a likely variant/synonym).
|
||||
- Check the most likely “source of truth” surfaces (service manifests, infra configs, env/config files, entrypoints, schemas/types/constants).
|
||||
|
||||
You may ask the user ONLY if, after exploration:
|
||||
- There are **multiple plausible candidates** and picking wrong would materially change the implementation, OR
|
||||
- Nothing is found and you need a **missing identifier**, environment name, external dependency, or non-repo context, OR
|
||||
- The repo reveals ambiguity that must be resolved by product intent (not code).
|
||||
|
||||
If you found a **single best match**, DO NOT ask the user — proceed and record it as an assumption in the final plan.
|
||||
|
||||
If you must ask, incorporate what you already found:
|
||||
- Provide **options listing the candidates** you discovered (paths/service names), with a **recommended** option.
|
||||
- Do NOT ask the user to “point to the path” unless you have **zero candidates** after searching.
|
||||
|
||||
---
|
||||
|
||||
## Preference capture (you SHOULD ask when it changes the plan)
|
||||
|
||||
If there are **multiple reasonable implementation approaches** with meaningful tradeoffs, you SHOULD ask the user to choose their preference even if you could assume a default.
|
||||
|
||||
Treat tradeoff choice as **high-impact** unless the user explicitly said:
|
||||
- “Use your best judgement,” or
|
||||
- “Pick whatever is simplest,” or
|
||||
- “I don’t care—ship fast.”
|
||||
|
||||
When asking a preference question:
|
||||
- Provide **2–4 mutually exclusive options**.
|
||||
- Include a **recommended default** that matches the user’s apparent goals.
|
||||
- If the user doesn’t answer, proceed with the recommended option and record it as an assumption.
|
||||
|
||||
---
|
||||
|
||||
## No‑trivia rule for questions (guardrail)
|
||||
|
||||
You MUST NOT ask questions whose answers are likely to be found by:
|
||||
- repo text search,
|
||||
- reading config/infra manifests,
|
||||
- following imports/types/constants,
|
||||
unless you already attempted those and can summarize what you found.
|
||||
|
||||
Every `request_user_input` question must:
|
||||
- materially change an implementation decision, OR
|
||||
- disambiguate between **concrete candidates** you already found, OR
|
||||
- capture a **preference/tradeoff** that is not discoverable from the repo.
|
||||
|
||||
---
|
||||
|
||||
## PHASE 1 — Understand user intent
|
||||
|
||||
### Purpose
|
||||
Identify what the user actually wants, what matters most, and what constraints + preferences shape the solution.
|
||||
|
||||
### Phase 1 principles
|
||||
- State what you think the user cares about (speed vs quality, prototype vs production, etc.).
|
||||
- Think out loud briefly when it helps weigh tradeoffs.
|
||||
- Use reasonable suggestions with explicit assumptions; make it easy to accept/override.
|
||||
- Ask fewer, better questions. Ask only what materially changes the spec/plan OR captures a real tradeoff.
|
||||
- Think ahead: propose helpful suggestions the user may need (testing, debug mode, observability, migration path).
|
||||
|
||||
### Phase 1 exit criteria (Intent gate)
|
||||
Before moving to Phase 2, ensure you have either a **user answer** OR an **explicit assumption** for:
|
||||
|
||||
**Intent basics**
|
||||
- Primary goal + success criteria (how we know it worked)
|
||||
- Primary user / audience
|
||||
- In-scope and out-of-scope
|
||||
- Constraints (time, budget, platform, security/compliance)
|
||||
- Current context (what exists today: code/system/data)
|
||||
|
||||
**Preference profile (don’t silently assume if unclear and high-impact)**
|
||||
- Risk posture: prototype vs production quality bar
|
||||
- Tradeoff priority: ship fast vs robust/maintainable
|
||||
- Compatibility expectations: backward compatibility / migrations / downtime tolerance (if relevant)
|
||||
|
||||
Use `request_user_input` to deeply understand the user's intent after exploring your environment.
|
||||
|
||||
---
|
||||
|
||||
## PHASE 2 — Technical spec & implementation plan
|
||||
|
||||
### Purpose
|
||||
Turn the intent into a buildable, decision-complete technical spec.
|
||||
|
||||
### Phase 2 exit criteria (Spec gate)
|
||||
Before finalizing the plan, ensure you’ve pinned down (answer or assumption):
|
||||
- Chosen approach + 1–2 alternatives with tradeoffs
|
||||
- Interfaces (APIs, schemas, inputs/outputs)
|
||||
- Data flow + key edge cases / failure modes
|
||||
- Testing + acceptance criteria
|
||||
- Rollout/monitoring expectations
|
||||
- Any key preference/tradeoff decisions (and rationale)
|
||||
|
||||
If something is high-impact and unknown, ask via `request_user_input`. Otherwise assume defaults and proceed.
|
||||
|
||||
---
|
||||
|
||||
## Using `request_user_input` in Plan Mode
|
||||
|
||||
Use `request_user_input` when either:
|
||||
1) You are genuinely blocked on a decision that materially changes the plan and cannot be resolved via evidence-first exploration, OR
|
||||
2) There is a meaningful **preference/tradeoff** the user should choose among.
|
||||
3) When an answer is skipped, assume the recommended path.
|
||||
|
||||
Every assistant turn MUST be exactly one of:
|
||||
A) a `request_user_input` tool call (questions/options only), OR
|
||||
B) the final output: a titled, plan‑only document.
|
||||
Rules:
|
||||
- **Default to options** when there are ≤ 4 common outcomes; include a **recommended** option.
|
||||
- Use **free-form only** when truly unbounded (e.g., “paste schema”, “share constraints”, “provide examples”).
|
||||
- Every question must be tied to a decision that changes the spec (A→X, B→Y).
|
||||
- If you found candidates in the repo, options MUST reference them (paths/service names) so the user chooses among concrete items.
|
||||
- No questions in free text (only via `request_user_input`).
|
||||
- Never mix a `request_user_input` call with plan content.
|
||||
- Internal tool/repo exploration is allowed privately before A or B.
|
||||
|
||||
Do **not** use `request_user_input` to ask:
|
||||
- “is my plan ready?” / “should I proceed?”
|
||||
- “where is X?” when repo search can answer it.
|
||||
## Ask a lot, but never ask trivia
|
||||
You SHOULD ask many questions, but each question must:
|
||||
- materially change the spec/plan, OR
|
||||
- confirm/lock an assumption, OR
|
||||
- choose between meaningful tradeoffs.
|
||||
Batch questions (e.g., 4–10) per `request_user_input` call to keep momentum.
|
||||
|
||||
(If your environment enforces a limit, aim to resolve within ~5 `request_user_input` calls; if still blocked, ask only the most decision-critical remaining question(s) and proceed with explicit assumptions.)
|
||||
## Two kinds of unknowns (treat differently)
|
||||
1) Discoverable facts (repo/system truth): explore first.
|
||||
- Before asking, run ≥2 targeted searches (exact + variant) and check likely sources of truth (configs/manifests/entrypoints/schemas/types/constants).
|
||||
- Ask only if: multiple plausible candidates; nothing found but you need a missing identifier/context; or ambiguity is actually product intent.
|
||||
- If asking, present concrete candidates (paths/service names) + recommend one.
|
||||
|
||||
### Examples (technical, schema-populated)
|
||||
2) Preferences/tradeoffs (not discoverable): ask early.
|
||||
- Provide 2–4 mutually exclusive options + a recommended default.
|
||||
- If unanswered, proceed with the recommended option and record it as an assumption in the final plan.
|
||||
|
||||
**1) Boolean (yes/no), no free-form**
|
||||
```json
|
||||
{
|
||||
"questions": [
|
||||
{
|
||||
"id": "enable_migration",
|
||||
"header": "Migrate",
|
||||
"question": "Enable the database migration in this release?",
|
||||
"options": [
|
||||
{ "label": "Yes (Recommended)", "description": "Ship the migration with this rollout." },
|
||||
{ "label": "No", "description": "Defer the migration to a later release." }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
````
|
||||
|
||||
**2) Preference/tradeoff question (recommended + options)**
|
||||
|
||||
```json
|
||||
{
|
||||
"questions": [
|
||||
{
|
||||
"id": "tradeoff_priority",
|
||||
"header": "Tradeoff",
|
||||
"question": "Which priority should guide the implementation?",
|
||||
"options": [
|
||||
{ "label": "Ship fast (Recommended)", "description": "Minimal changes, pragmatic shortcuts, faster delivery." },
|
||||
{ "label": "Robust & maintainable", "description": "Cleaner abstractions, more refactor, better long-term stability." },
|
||||
{ "label": "Performance-first", "description": "Optimize latency/throughput even if complexity rises." },
|
||||
{ "label": "Other", "description": "Specify a different priority or constraint." }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**3) Free-form only (no options)**
|
||||
|
||||
```json
|
||||
{
|
||||
"questions": [
|
||||
{
|
||||
"id": "acceptance_criteria",
|
||||
"header": "Success",
|
||||
"question": "What are the acceptance criteria or success metrics we should optimize for?"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Iterating and final output
|
||||
|
||||
Only AFTER you have all the information (or explicit assumptions for remaining low-impact unknowns), write the full plan.
|
||||
|
||||
A good plan here is **decision-complete**: it contains the concrete choices, interfaces, acceptance criteria, and rollout details needed for another agent to execute with minimal back-and-forth.
|
||||
|
||||
### Plan output (what to include)
|
||||
|
||||
Your plan MUST include the sections below. Keep them concise but specific; include only what’s relevant to the task.
|
||||
|
||||
1. **Title**
|
||||
|
||||
* A clear, specific title describing what will be built/delivered.
|
||||
|
||||
2. **Goal & Success Criteria**
|
||||
|
||||
* What outcome we’re driving.
|
||||
* Concrete acceptance criteria (tests, metrics, or observable behavior). Prefer “done when …”.
|
||||
|
||||
3. **Non-goals / Out of Scope**
|
||||
|
||||
* Explicit boundaries to prevent scope creep.
|
||||
|
||||
4. **Assumptions**
|
||||
|
||||
* Any defaults you assumed due to missing info, labeled clearly.
|
||||
|
||||
5. **Proposed Solution**
|
||||
|
||||
* The chosen approach (with rationale).
|
||||
* 1–2 alternatives considered and why they were not chosen (brief tradeoffs).
|
||||
|
||||
6. **System Design**
|
||||
|
||||
* Architecture / components / data flow (only as deep as needed).
|
||||
* Key invariants, edge cases, and failure modes (and how they’re handled).
|
||||
|
||||
7. **Interfaces & Data Contracts**
|
||||
|
||||
* APIs, schemas, inputs/outputs, event formats, config flags, etc.
|
||||
* Validation rules and backward/forward compatibility expectations if applicable.
|
||||
|
||||
8. **Execution Details**
|
||||
|
||||
* Concrete implementation steps and ordering.
|
||||
* **Codebase specifics are conditional**: include file/module/function names, directories, migrations, and dependencies **only when relevant and known** (or when you can reasonably infer them).
|
||||
* If unknown, specify what to discover and how (e.g., “search for X symbol”, “locate Y service entrypoint”).
|
||||
|
||||
9. **Testing & Quality**
|
||||
|
||||
* Test strategy (unit/integration/e2e) proportional to risk.
|
||||
* How to verify locally and in staging; include any test data or harness needs.
|
||||
|
||||
10. **Rollout, Observability, and Ops**
|
||||
|
||||
* Release strategy (flags, gradual rollout, migration plan).
|
||||
* Monitoring/alerts/logging and dashboards to add or update.
|
||||
* Rollback strategy and operational playbook notes (brief).
|
||||
|
||||
11. **Risks & Mitigations**
|
||||
|
||||
* Top risks (technical, product, security, privacy, performance).
|
||||
* Specific mitigations and “watch-outs”.
|
||||
|
||||
12. **Open Questions**
|
||||
|
||||
* Only if something truly must be resolved later; include how to resolve and what decision it affects.
|
||||
|
||||
### Plan output (strict)
|
||||
|
||||
**The final output should contain the plan and plan only with a good title.**
|
||||
PLEASE DO NOT confirm the plan with the user before ending. The user will be responsible for telling us to update, iterate or execute the plan.
|
||||
## Finalization rule
|
||||
Only output the final plan when remaining unknowns are low‑impact and explicitly listed as assumptions.
|
||||
Final output must be plan‑only with a good title (no “should I proceed?”).
|
||||
@@ -101,7 +101,8 @@ Each response yields:
|
||||
|
||||
Fetch the built-in collaboration mode presets with `collaborationMode/list`. This endpoint does not accept pagination and returns the full list in one response:
|
||||
|
||||
- `data` – ordered list of collaboration mode presets
|
||||
- `data` – ordered list of collaboration mode masks (partial settings to apply on top of the base mode)
|
||||
- For tri-state fields like `reasoning_effort` and `developer_instructions`, omit the field to keep the current value, set it to `null` to clear it, or set a concrete value to update it.
|
||||
|
||||
## Event stream
|
||||
|
||||
|
||||
@@ -188,21 +188,21 @@ impl CollaborationMode {
|
||||
///
|
||||
/// - `model`: `Some(s)` to update the model, `None` to keep the current model
|
||||
/// - `effort`: `Some(Some(e))` to set effort to `e`, `Some(None)` to clear effort, `None` to keep current effort
|
||||
/// - `developer_instructions`: `Some(s)` to update developer instructions, `None` to keep current
|
||||
/// - `developer_instructions`: `Some(Some(s))` to set instructions, `Some(None)` to clear them, `None` to keep current
|
||||
///
|
||||
/// Returns a new `CollaborationMode` with updated values, preserving the mode.
|
||||
pub fn with_updates(
|
||||
&self,
|
||||
model: Option<String>,
|
||||
effort: Option<Option<ReasoningEffort>>,
|
||||
developer_instructions: Option<String>,
|
||||
developer_instructions: Option<Option<String>>,
|
||||
) -> Self {
|
||||
let settings = self.settings_ref();
|
||||
let updated_settings = Settings {
|
||||
model: model.unwrap_or_else(|| settings.model.clone()),
|
||||
reasoning_effort: effort.unwrap_or(settings.reasoning_effort),
|
||||
developer_instructions: developer_instructions
|
||||
.or_else(|| settings.developer_instructions.clone()),
|
||||
.unwrap_or_else(|| settings.developer_instructions.clone()),
|
||||
};
|
||||
|
||||
CollaborationMode {
|
||||
@@ -210,6 +210,26 @@ impl CollaborationMode {
|
||||
settings: updated_settings,
|
||||
}
|
||||
}
|
||||
|
||||
/// Applies a mask to this collaboration mode, returning a new collaboration mode
|
||||
/// with the mask values applied. Fields in the mask that are `Some` will override
|
||||
/// the corresponding fields, while `None` values will preserve the original values.
|
||||
///
|
||||
/// The `name` field in the mask is ignored as it's metadata for the mask itself.
|
||||
pub fn apply_mask(&self, mask: &CollaborationModeMask) -> Self {
|
||||
let settings = self.settings_ref();
|
||||
CollaborationMode {
|
||||
mode: mask.mode.unwrap_or(self.mode),
|
||||
settings: Settings {
|
||||
model: mask.model.clone().unwrap_or_else(|| settings.model.clone()),
|
||||
reasoning_effort: mask.reasoning_effort.unwrap_or(settings.reasoning_effort),
|
||||
developer_instructions: mask
|
||||
.developer_instructions
|
||||
.clone()
|
||||
.unwrap_or_else(|| settings.developer_instructions.clone()),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Settings for a collaboration mode.
|
||||
@@ -219,3 +239,49 @@ pub struct Settings {
|
||||
pub reasoning_effort: Option<ReasoningEffort>,
|
||||
pub developer_instructions: Option<String>,
|
||||
}
|
||||
|
||||
/// A mask for collaboration mode settings, allowing partial updates.
|
||||
/// All fields except `name` are optional, enabling selective updates.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, JsonSchema, TS)]
|
||||
pub struct CollaborationModeMask {
|
||||
pub name: String,
|
||||
pub mode: Option<ModeKind>,
|
||||
pub model: Option<String>,
|
||||
pub reasoning_effort: Option<Option<ReasoningEffort>>,
|
||||
pub developer_instructions: Option<Option<String>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn apply_mask_can_clear_optional_fields() {
|
||||
let mode = CollaborationMode {
|
||||
mode: ModeKind::Code,
|
||||
settings: Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: Some(ReasoningEffort::High),
|
||||
developer_instructions: Some("stay focused".to_string()),
|
||||
},
|
||||
};
|
||||
let mask = CollaborationModeMask {
|
||||
name: "Clear".to_string(),
|
||||
mode: None,
|
||||
model: None,
|
||||
reasoning_effort: Some(None),
|
||||
developer_instructions: Some(None),
|
||||
};
|
||||
|
||||
let expected = CollaborationMode {
|
||||
mode: ModeKind::Code,
|
||||
settings: Settings {
|
||||
model: "gpt-5.2-codex".to_string(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
assert_eq!(expected, mode.apply_mask(&mask));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1518,7 +1518,10 @@ pub enum SessionSource {
|
||||
pub enum SubAgentSource {
|
||||
Review,
|
||||
Compact,
|
||||
ThreadSpawn { parent_thread_id: ThreadId },
|
||||
ThreadSpawn {
|
||||
parent_thread_id: ThreadId,
|
||||
depth: i32,
|
||||
},
|
||||
Other(String),
|
||||
}
|
||||
|
||||
@@ -1540,8 +1543,11 @@ impl fmt::Display for SubAgentSource {
|
||||
match self {
|
||||
SubAgentSource::Review => f.write_str("review"),
|
||||
SubAgentSource::Compact => f.write_str("compact"),
|
||||
SubAgentSource::ThreadSpawn { parent_thread_id } => {
|
||||
write!(f, "thread_spawn_{parent_thread_id}")
|
||||
SubAgentSource::ThreadSpawn {
|
||||
parent_thread_id,
|
||||
depth,
|
||||
} => {
|
||||
write!(f, "thread_spawn_{parent_thread_id}_d{depth}")
|
||||
}
|
||||
SubAgentSource::Other(other) => f.write_str(other),
|
||||
}
|
||||
|
||||
@@ -21,11 +21,7 @@ clap = { workspace = true, features = ["derive"] }
|
||||
codex-process-hardening = { workspace = true }
|
||||
ctor = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
reqwest = { workspace = true, features = [
|
||||
"blocking",
|
||||
"json",
|
||||
"rustls-tls",
|
||||
] }
|
||||
reqwest = { workspace = true, features = ["blocking", "json", "rustls-tls"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tiny_http = { workspace = true }
|
||||
|
||||
@@ -12,6 +12,7 @@ use crate::bottom_pane::SelectionViewParams;
|
||||
use crate::bottom_pane::popup_consts::standard_popup_hint_line;
|
||||
use crate::chatwidget::ChatWidget;
|
||||
use crate::chatwidget::ExternalEditorState;
|
||||
use crate::cwd_prompt::CwdPromptAction;
|
||||
use crate::diff_render::DiffSummary;
|
||||
use crate::exec_command::strip_bash_lc_and_escape;
|
||||
use crate::external_editor;
|
||||
@@ -36,6 +37,8 @@ use codex_core::AuthManager;
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::edit::ConfigEdit;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config_loader::ConfigLayerStackOrdering;
|
||||
@@ -44,12 +47,14 @@ use codex_core::features::Feature;
|
||||
use codex_core::models_manager::manager::RefreshStrategy;
|
||||
use codex_core::models_manager::model_presets::HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG;
|
||||
use codex_core::models_manager::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::DeprecationNoticeEvent;
|
||||
use codex_core::protocol::Event;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::FinalOutput;
|
||||
use codex_core::protocol::ListSkillsResponseEvent;
|
||||
use codex_core::protocol::Op;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::SessionSource;
|
||||
use codex_core::protocol::SkillErrorInfo;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
@@ -60,6 +65,7 @@ use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelUpgrade;
|
||||
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
|
||||
use codex_protocol::protocol::SessionConfiguredEvent;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use color_eyre::eyre::Result;
|
||||
use color_eyre::eyre::WrapErr;
|
||||
use crossterm::event::KeyCode;
|
||||
@@ -87,6 +93,7 @@ use tokio::sync::broadcast;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::mpsc::error::TryRecvError;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
const EXTERNAL_EDITOR_HINT: &str = "Save and close external editor to continue.";
|
||||
const THREAD_EVENT_CHANNEL_CAPACITY: usize = 1024;
|
||||
@@ -498,6 +505,10 @@ pub(crate) struct App {
|
||||
/// Config is stored here so we can recreate ChatWidgets as needed.
|
||||
pub(crate) config: Config,
|
||||
pub(crate) active_profile: Option<String>,
|
||||
cli_kv_overrides: Vec<(String, TomlValue)>,
|
||||
harness_overrides: ConfigOverrides,
|
||||
runtime_approval_policy_override: Option<AskForApproval>,
|
||||
runtime_sandbox_policy_override: Option<SandboxPolicy>,
|
||||
|
||||
pub(crate) file_search: FileSearchManager,
|
||||
|
||||
@@ -545,6 +556,23 @@ struct WindowsSandboxState {
|
||||
skip_world_writable_scan_once: bool,
|
||||
}
|
||||
|
||||
fn normalize_harness_overrides_for_cwd(
|
||||
mut overrides: ConfigOverrides,
|
||||
base_cwd: &Path,
|
||||
) -> Result<ConfigOverrides> {
|
||||
if overrides.additional_writable_roots.is_empty() {
|
||||
return Ok(overrides);
|
||||
}
|
||||
|
||||
let mut normalized = Vec::with_capacity(overrides.additional_writable_roots.len());
|
||||
for root in overrides.additional_writable_roots.drain(..) {
|
||||
let absolute = AbsolutePathBuf::resolve_path_against_base(root, base_cwd)?;
|
||||
normalized.push(absolute.into_path_buf());
|
||||
}
|
||||
overrides.additional_writable_roots = normalized;
|
||||
Ok(overrides)
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn chatwidget_init_for_forked_or_resumed_thread(
|
||||
&self,
|
||||
@@ -567,6 +595,38 @@ impl App {
|
||||
}
|
||||
}
|
||||
|
||||
async fn rebuild_config_for_cwd(&self, cwd: PathBuf) -> Result<Config> {
|
||||
let mut overrides = self.harness_overrides.clone();
|
||||
overrides.cwd = Some(cwd.clone());
|
||||
let cwd_display = cwd.display().to_string();
|
||||
ConfigBuilder::default()
|
||||
.codex_home(self.config.codex_home.clone())
|
||||
.cli_overrides(self.cli_kv_overrides.clone())
|
||||
.harness_overrides(overrides)
|
||||
.build()
|
||||
.await
|
||||
.wrap_err_with(|| format!("Failed to rebuild config for cwd {cwd_display}"))
|
||||
}
|
||||
|
||||
fn apply_runtime_policy_overrides(&mut self, config: &mut Config) {
|
||||
if let Some(policy) = self.runtime_approval_policy_override.as_ref()
|
||||
&& let Err(err) = config.approval_policy.set(*policy)
|
||||
{
|
||||
tracing::warn!(%err, "failed to carry forward approval policy override");
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to carry forward approval policy override: {err}"
|
||||
));
|
||||
}
|
||||
if let Some(policy) = self.runtime_sandbox_policy_override.as_ref()
|
||||
&& let Err(err) = config.sandbox_policy.set(policy.clone())
|
||||
{
|
||||
tracing::warn!(%err, "failed to carry forward sandbox policy override");
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to carry forward sandbox policy override: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
async fn shutdown_current_thread(&mut self) {
|
||||
if let Some(thread_id) = self.chat_widget.thread_id() {
|
||||
// Clear any in-flight rollback guard when switching threads.
|
||||
@@ -824,6 +884,8 @@ impl App {
|
||||
tui: &mut tui::Tui,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
mut config: Config,
|
||||
cli_kv_overrides: Vec<(String, TomlValue)>,
|
||||
harness_overrides: ConfigOverrides,
|
||||
active_profile: Option<String>,
|
||||
initial_prompt: Option<String>,
|
||||
initial_images: Vec<PathBuf>,
|
||||
@@ -838,6 +900,8 @@ impl App {
|
||||
emit_deprecation_notice(&app_event_tx, ollama_chat_support_notice);
|
||||
emit_project_config_warnings(&app_event_tx, &config);
|
||||
|
||||
let harness_overrides =
|
||||
normalize_harness_overrides_for_cwd(harness_overrides, &config.cwd)?;
|
||||
let thread_manager = Arc::new(ThreadManager::new(
|
||||
config.codex_home.clone(),
|
||||
auth_manager.clone(),
|
||||
@@ -979,6 +1043,10 @@ impl App {
|
||||
auth_manager: auth_manager.clone(),
|
||||
config,
|
||||
active_profile,
|
||||
cli_kv_overrides,
|
||||
harness_overrides,
|
||||
runtime_approval_policy_override: None,
|
||||
runtime_sandbox_policy_override: None,
|
||||
file_search,
|
||||
enhanced_keys_supported,
|
||||
transcript_cells: Vec::new(),
|
||||
@@ -1203,6 +1271,34 @@ impl App {
|
||||
.await?
|
||||
{
|
||||
SessionSelection::Resume(path) => {
|
||||
let current_cwd = self.config.cwd.clone();
|
||||
let resume_cwd = match crate::resolve_cwd_for_resume_or_fork(
|
||||
tui,
|
||||
¤t_cwd,
|
||||
&path,
|
||||
CwdPromptAction::Resume,
|
||||
true,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
Some(cwd) => cwd,
|
||||
None => current_cwd.clone(),
|
||||
};
|
||||
let mut resume_config = if crate::cwds_differ(¤t_cwd, &resume_cwd) {
|
||||
match self.rebuild_config_for_cwd(resume_cwd).await {
|
||||
Ok(cfg) => cfg,
|
||||
Err(err) => {
|
||||
self.chat_widget.add_error_message(format!(
|
||||
"Failed to rebuild configuration for resume: {err}"
|
||||
));
|
||||
return Ok(AppRunControl::Continue);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No rebuild needed: current_cwd comes from self.config.cwd.
|
||||
self.config.clone()
|
||||
};
|
||||
self.apply_runtime_policy_overrides(&mut resume_config);
|
||||
let summary = session_summary(
|
||||
self.chat_widget.token_usage(),
|
||||
self.chat_widget.thread_id(),
|
||||
@@ -1210,7 +1306,7 @@ impl App {
|
||||
match self
|
||||
.server
|
||||
.resume_thread_from_rollout(
|
||||
self.config.clone(),
|
||||
resume_config.clone(),
|
||||
path.clone(),
|
||||
self.auth_manager.clone(),
|
||||
)
|
||||
@@ -1218,6 +1314,11 @@ impl App {
|
||||
{
|
||||
Ok(resumed) => {
|
||||
self.shutdown_current_thread().await;
|
||||
self.config = resume_config;
|
||||
self.file_search = FileSearchManager::new(
|
||||
self.config.cwd.clone(),
|
||||
self.app_event_tx.clone(),
|
||||
);
|
||||
let init = self.chatwidget_init_for_forked_or_resumed_thread(
|
||||
tui,
|
||||
self.config.clone(),
|
||||
@@ -1401,10 +1502,8 @@ impl App {
|
||||
AppEvent::UpdateModel(model) => {
|
||||
self.chat_widget.set_model(&model);
|
||||
}
|
||||
AppEvent::UpdateCollaborationMode(mode) => {
|
||||
let model = mode.model().to_string();
|
||||
self.chat_widget.set_collaboration_mode(mode);
|
||||
self.chat_widget.set_model(&model);
|
||||
AppEvent::UpdateCollaborationMode(mask) => {
|
||||
self.chat_widget.set_collaboration_mask(mask);
|
||||
}
|
||||
AppEvent::OpenReasoningPopup { model } => {
|
||||
self.chat_widget.open_reasoning_popup(model);
|
||||
@@ -1660,6 +1759,13 @@ impl App {
|
||||
}
|
||||
}
|
||||
AppEvent::UpdateAskForApprovalPolicy(policy) => {
|
||||
self.runtime_approval_policy_override = Some(policy);
|
||||
if let Err(err) = self.config.approval_policy.set(policy) {
|
||||
tracing::warn!(%err, "failed to set approval policy on app config");
|
||||
self.chat_widget
|
||||
.add_error_message(format!("Failed to set approval policy: {err}"));
|
||||
return Ok(AppRunControl::Continue);
|
||||
}
|
||||
self.chat_widget.set_approval_policy(policy);
|
||||
}
|
||||
AppEvent::UpdateSandboxPolicy(policy) => {
|
||||
@@ -1688,6 +1794,8 @@ impl App {
|
||||
.add_error_message(format!("Failed to set sandbox policy: {err}"));
|
||||
return Ok(AppRunControl::Continue);
|
||||
}
|
||||
self.runtime_sandbox_policy_override =
|
||||
Some(self.config.sandbox_policy.get().clone());
|
||||
|
||||
// If sandbox policy becomes workspace-write or read-only, run the Windows world-writable scan.
|
||||
#[cfg(target_os = "windows")]
|
||||
@@ -2236,6 +2344,7 @@ mod tests {
|
||||
use codex_core::CodexAuth;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::models_manager::manager::ModelsManager;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::Event;
|
||||
@@ -2254,6 +2363,25 @@ mod tests {
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn normalize_harness_overrides_resolves_relative_add_dirs() -> Result<()> {
|
||||
let temp_dir = tempdir()?;
|
||||
let base_cwd = temp_dir.path().join("base");
|
||||
std::fs::create_dir_all(&base_cwd)?;
|
||||
|
||||
let overrides = ConfigOverrides {
|
||||
additional_writable_roots: vec![PathBuf::from("rel")],
|
||||
..Default::default()
|
||||
};
|
||||
let normalized = normalize_harness_overrides_for_cwd(overrides, &base_cwd)?;
|
||||
|
||||
assert_eq!(
|
||||
normalized.additional_writable_roots,
|
||||
vec![base_cwd.join("rel")]
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn make_test_app() -> App {
|
||||
let (chat_widget, app_event_tx, _rx, _op_rx) = make_chatwidget_manual_with_sender().await;
|
||||
let config = chat_widget.config_ref().clone();
|
||||
@@ -2275,6 +2403,10 @@ mod tests {
|
||||
auth_manager,
|
||||
config,
|
||||
active_profile: None,
|
||||
cli_kv_overrides: Vec::new(),
|
||||
harness_overrides: ConfigOverrides::default(),
|
||||
runtime_approval_policy_override: None,
|
||||
runtime_sandbox_policy_override: None,
|
||||
file_search,
|
||||
transcript_cells: Vec::new(),
|
||||
overlay: None,
|
||||
@@ -2323,6 +2455,10 @@ mod tests {
|
||||
auth_manager,
|
||||
config,
|
||||
active_profile: None,
|
||||
cli_kv_overrides: Vec::new(),
|
||||
harness_overrides: ConfigOverrides::default(),
|
||||
runtime_approval_policy_override: None,
|
||||
runtime_sandbox_policy_override: None,
|
||||
file_search,
|
||||
transcript_cells: Vec::new(),
|
||||
overlay: None,
|
||||
|
||||
@@ -23,7 +23,7 @@ use crate::history_cell::HistoryCell;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::openai_models::ReasoningEffort;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
@@ -103,8 +103,8 @@ pub(crate) enum AppEvent {
|
||||
/// Update the current model slug in the running app and widget.
|
||||
UpdateModel(String),
|
||||
|
||||
/// Update the current collaboration mode in the running app and widget.
|
||||
UpdateCollaborationMode(CollaborationMode),
|
||||
/// Update the active collaboration mask in the running app and widget.
|
||||
UpdateCollaborationMode(CollaborationModeMask),
|
||||
|
||||
/// Persist the selected model and reasoning effort to the appropriate config.
|
||||
PersistModelSelection {
|
||||
@@ -240,10 +240,10 @@ pub(crate) enum AppEvent {
|
||||
/// Open the custom prompt option from the review popup.
|
||||
OpenReviewCustomPrompt,
|
||||
|
||||
/// Submit a user message with an explicit collaboration mode.
|
||||
/// Submit a user message with an explicit collaboration mask.
|
||||
SubmitUserMessageWithMode {
|
||||
text: String,
|
||||
collaboration_mode: CollaborationMode,
|
||||
collaboration_mode: CollaborationModeMask,
|
||||
},
|
||||
|
||||
/// Open the approval popup.
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::bottom_pane::ApprovalRequest;
|
||||
use crate::render::renderable::Renderable;
|
||||
use codex_protocol::request_user_input::RequestUserInputEvent;
|
||||
@@ -27,6 +29,22 @@ pub(crate) trait BottomPaneView: Renderable {
|
||||
false
|
||||
}
|
||||
|
||||
/// Flush a pending paste-burst when due. Return true if the view modified
|
||||
/// its state and needs a redraw.
|
||||
fn flush_paste_burst_if_due(&mut self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Return true if the view is currently capturing a paste-burst.
|
||||
fn is_in_paste_burst(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Recommended delay to schedule the next redraw while capturing a burst.
|
||||
fn recommended_redraw_delay(&self) -> Option<Duration> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Try to handle approval request; return the original value if not
|
||||
/// consumed.
|
||||
fn try_consume_approval_request(
|
||||
|
||||
@@ -253,6 +253,22 @@ enum ActivePopup {
|
||||
|
||||
const FOOTER_SPACING_HEIGHT: u16 = 0;
|
||||
|
||||
pub(crate) fn default_chat_composer(
|
||||
has_input_focus: bool,
|
||||
app_event_tx: AppEventSender,
|
||||
enhanced_keys_supported: bool,
|
||||
placeholder_text: String,
|
||||
disable_paste_burst: bool,
|
||||
) -> ChatComposer {
|
||||
ChatComposer::new(
|
||||
has_input_focus,
|
||||
app_event_tx,
|
||||
enhanced_keys_supported,
|
||||
placeholder_text,
|
||||
disable_paste_burst,
|
||||
)
|
||||
}
|
||||
|
||||
impl ChatComposer {
|
||||
pub fn new(
|
||||
has_input_focus: bool,
|
||||
@@ -320,6 +336,47 @@ impl ChatComposer {
|
||||
self.collaboration_modes_enabled = enabled;
|
||||
}
|
||||
|
||||
pub(crate) fn desired_textarea_height(&self, width: u16) -> u16 {
|
||||
const COLS_WITH_MARGIN: u16 = LIVE_PREFIX_COLS + 1;
|
||||
self.textarea
|
||||
.desired_height(width.saturating_sub(COLS_WITH_MARGIN))
|
||||
+ 2
|
||||
}
|
||||
|
||||
pub(crate) fn cursor_pos_textarea_only(&self, area: Rect) -> Option<(u16, u16)> {
|
||||
if !self.input_enabled {
|
||||
return None;
|
||||
}
|
||||
let textarea_rect = if area.height > 2 && area.width > 2 {
|
||||
area.inset(Insets::tlbr(1, 1, 1, 1))
|
||||
} else {
|
||||
area
|
||||
};
|
||||
if textarea_rect.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let state = *self.textarea_state.borrow();
|
||||
let render_rect = self.textarea_render_rect(textarea_rect);
|
||||
if render_rect.is_empty() {
|
||||
return None;
|
||||
}
|
||||
self.textarea.cursor_pos_with_state(render_rect, state)
|
||||
}
|
||||
|
||||
pub(crate) fn render_textarea_only(&self, area: Rect, buf: &mut Buffer) {
|
||||
let style = user_message_style();
|
||||
Block::default().style(style).render_ref(area, buf);
|
||||
let textarea_rect = if area.height > 2 && area.width > 2 {
|
||||
area.inset(Insets::tlbr(1, 1, 1, 1))
|
||||
} else {
|
||||
area
|
||||
};
|
||||
if textarea_rect.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.render_textarea(textarea_rect, buf);
|
||||
}
|
||||
|
||||
pub fn set_collaboration_mode_indicator(
|
||||
&mut self,
|
||||
indicator: Option<CollaborationModeIndicator>,
|
||||
@@ -350,6 +407,50 @@ impl ChatComposer {
|
||||
[composer_rect, textarea_rect, popup_rect]
|
||||
}
|
||||
|
||||
fn textarea_render_rect(&self, textarea_rect: Rect) -> Rect {
|
||||
if textarea_rect.x < LIVE_PREFIX_COLS {
|
||||
Rect {
|
||||
x: textarea_rect.x.saturating_add(LIVE_PREFIX_COLS),
|
||||
width: textarea_rect.width.saturating_sub(LIVE_PREFIX_COLS),
|
||||
..textarea_rect
|
||||
}
|
||||
} else {
|
||||
textarea_rect
|
||||
}
|
||||
}
|
||||
|
||||
fn render_textarea(&self, textarea_rect: Rect, buf: &mut Buffer) {
|
||||
let render_rect = self.textarea_render_rect(textarea_rect);
|
||||
if render_rect.is_empty() {
|
||||
return;
|
||||
}
|
||||
let prompt_x = textarea_rect
|
||||
.x
|
||||
.checked_sub(LIVE_PREFIX_COLS)
|
||||
.unwrap_or(textarea_rect.x);
|
||||
let prompt = if self.input_enabled {
|
||||
"›".bold()
|
||||
} else {
|
||||
"›".dim()
|
||||
};
|
||||
buf.set_span(prompt_x, textarea_rect.y, &prompt, render_rect.width);
|
||||
|
||||
let mut state = self.textarea_state.borrow_mut();
|
||||
StatefulWidgetRef::render_ref(&(&self.textarea), render_rect, buf, &mut state);
|
||||
if self.textarea.text().is_empty() {
|
||||
let text = if self.input_enabled {
|
||||
self.placeholder_text.as_str().to_string()
|
||||
} else {
|
||||
self.input_disabled_placeholder
|
||||
.as_deref()
|
||||
.unwrap_or("Input disabled.")
|
||||
.to_string()
|
||||
};
|
||||
let placeholder = Span::from(text).dim();
|
||||
Line::from(vec![placeholder]).render_ref(render_rect.inner(Margin::new(0, 0)), buf);
|
||||
}
|
||||
}
|
||||
|
||||
fn footer_spacing(footer_hint_height: u16) -> u16 {
|
||||
if footer_hint_height == 0 {
|
||||
0
|
||||
@@ -2446,7 +2547,11 @@ impl Renderable for ChatComposer {
|
||||
|
||||
let [_, textarea_rect, _] = self.layout_areas(area);
|
||||
let state = *self.textarea_state.borrow();
|
||||
self.textarea.cursor_pos_with_state(textarea_rect, state)
|
||||
let render_rect = self.textarea_render_rect(textarea_rect);
|
||||
if render_rect.is_empty() {
|
||||
return None;
|
||||
}
|
||||
self.textarea.cursor_pos_with_state(render_rect, state)
|
||||
}
|
||||
|
||||
fn desired_height(&self, width: u16) -> u16 {
|
||||
@@ -2457,15 +2562,16 @@ impl Renderable for ChatComposer {
|
||||
let footer_spacing = Self::footer_spacing(footer_hint_height);
|
||||
let footer_total_height = footer_hint_height + footer_spacing;
|
||||
const COLS_WITH_MARGIN: u16 = LIVE_PREFIX_COLS + 1;
|
||||
let popup_height = match &self.active_popup {
|
||||
ActivePopup::None => footer_total_height,
|
||||
ActivePopup::Command(c) => c.calculate_required_height(width),
|
||||
ActivePopup::File(c) => c.calculate_required_height(),
|
||||
ActivePopup::Skill(c) => c.calculate_required_height(width),
|
||||
};
|
||||
self.textarea
|
||||
.desired_height(width.saturating_sub(COLS_WITH_MARGIN))
|
||||
+ 2
|
||||
+ match &self.active_popup {
|
||||
ActivePopup::None => footer_total_height,
|
||||
ActivePopup::Command(c) => c.calculate_required_height(width),
|
||||
ActivePopup::File(c) => c.calculate_required_height(),
|
||||
ActivePopup::Skill(c) => c.calculate_required_height(width),
|
||||
}
|
||||
+ popup_height
|
||||
}
|
||||
|
||||
fn render(&self, area: Rect, buf: &mut Buffer) {
|
||||
@@ -2496,19 +2602,18 @@ impl Renderable for ChatComposer {
|
||||
} else {
|
||||
popup_rect
|
||||
};
|
||||
let mut left_content_width = None;
|
||||
if self.footer_flash_visible() {
|
||||
if let Some(flash) = self.footer_flash.as_ref() {
|
||||
flash.line.render(inset_footer_hint_area(hint_rect), buf);
|
||||
left_content_width = Some(flash.line.width() as u16);
|
||||
}
|
||||
let left_content_width = if self.footer_flash_visible()
|
||||
&& let Some(flash) = self.footer_flash.as_ref()
|
||||
{
|
||||
flash.line.render(inset_footer_hint_area(hint_rect), buf);
|
||||
Some(flash.line.width() as u16)
|
||||
} else if let Some(items) = self.footer_hint_override.as_ref() {
|
||||
render_footer_hint_items(hint_rect, buf, items);
|
||||
left_content_width = Some(footer_hint_items_width(items));
|
||||
Some(footer_hint_items_width(items))
|
||||
} else {
|
||||
render_footer(hint_rect, buf, footer_props);
|
||||
left_content_width = Some(footer_line_width(footer_props));
|
||||
}
|
||||
Some(footer_line_width(footer_props))
|
||||
};
|
||||
render_mode_indicator(
|
||||
hint_rect,
|
||||
buf,
|
||||
@@ -2520,34 +2625,7 @@ impl Renderable for ChatComposer {
|
||||
}
|
||||
let style = user_message_style();
|
||||
Block::default().style(style).render_ref(composer_rect, buf);
|
||||
if !textarea_rect.is_empty() {
|
||||
let prompt = if self.input_enabled {
|
||||
"›".bold()
|
||||
} else {
|
||||
"›".dim()
|
||||
};
|
||||
buf.set_span(
|
||||
textarea_rect.x - LIVE_PREFIX_COLS,
|
||||
textarea_rect.y,
|
||||
&prompt,
|
||||
textarea_rect.width,
|
||||
);
|
||||
}
|
||||
|
||||
let mut state = self.textarea_state.borrow_mut();
|
||||
StatefulWidgetRef::render_ref(&(&self.textarea), textarea_rect, buf, &mut state);
|
||||
if self.textarea.text().is_empty() {
|
||||
let text = if self.input_enabled {
|
||||
self.placeholder_text.as_str().to_string()
|
||||
} else {
|
||||
self.input_disabled_placeholder
|
||||
.as_deref()
|
||||
.unwrap_or("Input disabled.")
|
||||
.to_string()
|
||||
};
|
||||
let placeholder = Span::from(text).dim();
|
||||
Line::from(vec![placeholder]).render_ref(textarea_rect.inner(Margin::new(0, 0)), buf);
|
||||
}
|
||||
self.render_textarea(textarea_rect, buf);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -105,6 +105,7 @@ pub(crate) enum CancellationEvent {
|
||||
|
||||
pub(crate) use chat_composer::ChatComposer;
|
||||
pub(crate) use chat_composer::InputResult;
|
||||
pub(crate) use chat_composer::default_chat_composer;
|
||||
use codex_protocol::custom_prompts::CustomPrompt;
|
||||
|
||||
use crate::status_indicator_widget::StatusIndicatorWidget;
|
||||
@@ -130,6 +131,7 @@ pub(crate) struct BottomPane {
|
||||
frame_requester: FrameRequester,
|
||||
|
||||
has_input_focus: bool,
|
||||
enhanced_keys_supported: bool,
|
||||
is_task_running: bool,
|
||||
esc_backtrack_hint: bool,
|
||||
animations_enabled: bool,
|
||||
@@ -167,7 +169,7 @@ impl BottomPane {
|
||||
animations_enabled,
|
||||
skills,
|
||||
} = params;
|
||||
let mut composer = ChatComposer::new(
|
||||
let mut composer = default_chat_composer(
|
||||
has_input_focus,
|
||||
app_event_tx.clone(),
|
||||
enhanced_keys_supported,
|
||||
@@ -182,6 +184,7 @@ impl BottomPane {
|
||||
app_event_tx,
|
||||
frame_requester,
|
||||
has_input_focus,
|
||||
enhanced_keys_supported,
|
||||
is_task_running: false,
|
||||
status: None,
|
||||
unified_exec_footer: UnifiedExecFooter::new(),
|
||||
@@ -246,6 +249,7 @@ impl BottomPane {
|
||||
pub fn handle_key_event(&mut self, key_event: KeyEvent) -> InputResult {
|
||||
// If a modal/view is active, handle it here; otherwise forward to composer.
|
||||
if let Some(view) = self.view_stack.last_mut() {
|
||||
let mut paste_burst_delay = None;
|
||||
if key_event.code == KeyCode::Esc
|
||||
&& matches!(view.on_ctrl_c(), CancellationEvent::Handled)
|
||||
&& view.is_complete()
|
||||
@@ -257,9 +261,14 @@ impl BottomPane {
|
||||
if view.is_complete() {
|
||||
self.view_stack.clear();
|
||||
self.on_active_view_complete();
|
||||
} else if view.is_in_paste_burst() {
|
||||
paste_burst_delay = view.recommended_redraw_delay();
|
||||
}
|
||||
}
|
||||
self.request_redraw();
|
||||
if let Some(delay) = paste_burst_delay {
|
||||
self.request_redraw_in(delay);
|
||||
}
|
||||
InputResult::None
|
||||
} else {
|
||||
// If a task is running and a status line is visible, allow Esc to
|
||||
@@ -320,6 +329,10 @@ impl BottomPane {
|
||||
let needs_redraw = view.handle_paste(pasted);
|
||||
if view.is_complete() {
|
||||
self.on_active_view_complete();
|
||||
} else if view.is_in_paste_burst()
|
||||
&& let Some(delay) = view.recommended_redraw_delay()
|
||||
{
|
||||
self.request_redraw_in(delay);
|
||||
}
|
||||
if needs_redraw {
|
||||
self.request_redraw();
|
||||
@@ -329,6 +342,9 @@ impl BottomPane {
|
||||
if needs_redraw {
|
||||
self.request_redraw();
|
||||
}
|
||||
if self.composer.is_in_paste_burst() {
|
||||
self.request_redraw_in(ChatComposer::recommended_paste_flush_delay());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -623,7 +639,11 @@ impl BottomPane {
|
||||
request
|
||||
};
|
||||
|
||||
let modal = RequestUserInputOverlay::new(request, self.app_event_tx.clone());
|
||||
let modal = RequestUserInputOverlay::new(
|
||||
request,
|
||||
self.app_event_tx.clone(),
|
||||
self.enhanced_keys_supported,
|
||||
);
|
||||
self.pause_status_timer_for_modal();
|
||||
self.set_composer_input_enabled(
|
||||
false,
|
||||
@@ -665,11 +685,28 @@ impl BottomPane {
|
||||
}
|
||||
|
||||
pub(crate) fn flush_paste_burst_if_due(&mut self) -> bool {
|
||||
self.composer.flush_paste_burst_if_due()
|
||||
if let Some(view) = self.view_stack.last_mut() {
|
||||
view.flush_paste_burst_if_due()
|
||||
} else {
|
||||
self.composer.flush_paste_burst_if_due()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_in_paste_burst(&self) -> bool {
|
||||
self.composer.is_in_paste_burst()
|
||||
if let Some(view) = self.view_stack.last() {
|
||||
view.is_in_paste_burst()
|
||||
} else {
|
||||
self.composer.is_in_paste_burst()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn recommended_paste_burst_delay(&self) -> Duration {
|
||||
if let Some(view) = self.view_stack.last() {
|
||||
view.recommended_redraw_delay()
|
||||
.unwrap_or_else(ChatComposer::recommended_paste_flush_delay)
|
||||
} else {
|
||||
ChatComposer::recommended_paste_flush_delay()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn on_history_entry_response(
|
||||
|
||||
@@ -6,23 +6,24 @@
|
||||
//! - Typing while focused on options jumps into notes to keep freeform input fast.
|
||||
//! - Enter advances to the next question; the last question submits all answers.
|
||||
//! - Freeform-only questions submit an empty answer list when empty.
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::VecDeque;
|
||||
use std::time::Duration;
|
||||
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyEventKind;
|
||||
use crossterm::event::KeyModifiers;
|
||||
mod layout;
|
||||
mod render;
|
||||
|
||||
use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::bottom_pane::CancellationEvent;
|
||||
use crate::bottom_pane::ChatComposer;
|
||||
use crate::bottom_pane::bottom_pane_view::BottomPaneView;
|
||||
use crate::bottom_pane::default_chat_composer;
|
||||
use crate::bottom_pane::scroll_state::ScrollState;
|
||||
use crate::bottom_pane::textarea::TextArea;
|
||||
use crate::bottom_pane::textarea::TextAreaState;
|
||||
|
||||
use codex_core::protocol::Op;
|
||||
use codex_protocol::request_user_input::RequestUserInputAnswer;
|
||||
@@ -31,7 +32,6 @@ use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
|
||||
const NOTES_PLACEHOLDER: &str = "Add notes (optional)";
|
||||
const ANSWER_PLACEHOLDER: &str = "Type your answer (optional)";
|
||||
const SELECT_OPTION_PLACEHOLDER: &str = "Select an option to add notes (optional)";
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum Focus {
|
||||
@@ -40,16 +40,24 @@ enum Focus {
|
||||
}
|
||||
|
||||
struct NotesEntry {
|
||||
text: TextArea,
|
||||
state: RefCell<TextAreaState>,
|
||||
composer: ChatComposer,
|
||||
}
|
||||
|
||||
impl NotesEntry {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
text: TextArea::new(),
|
||||
state: RefCell::new(TextAreaState::default()),
|
||||
}
|
||||
fn new(
|
||||
app_event_tx: AppEventSender,
|
||||
enhanced_keys_supported: bool,
|
||||
placeholder_text: &'static str,
|
||||
) -> Self {
|
||||
let mut composer = default_chat_composer(
|
||||
true,
|
||||
app_event_tx,
|
||||
enhanced_keys_supported,
|
||||
placeholder_text.to_string(),
|
||||
false,
|
||||
);
|
||||
composer.set_task_running(false);
|
||||
Self { composer }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,10 +81,15 @@ pub(crate) struct RequestUserInputOverlay {
|
||||
current_idx: usize,
|
||||
focus: Focus,
|
||||
done: bool,
|
||||
enhanced_keys_supported: bool,
|
||||
}
|
||||
|
||||
impl RequestUserInputOverlay {
|
||||
pub(crate) fn new(request: RequestUserInputEvent, app_event_tx: AppEventSender) -> Self {
|
||||
pub(crate) fn new(
|
||||
request: RequestUserInputEvent,
|
||||
app_event_tx: AppEventSender,
|
||||
enhanced_keys_supported: bool,
|
||||
) -> Self {
|
||||
let mut overlay = Self {
|
||||
app_event_tx,
|
||||
request,
|
||||
@@ -85,6 +98,7 @@ impl RequestUserInputOverlay {
|
||||
current_idx: 0,
|
||||
focus: Focus::Options,
|
||||
done: false,
|
||||
enhanced_keys_supported,
|
||||
};
|
||||
overlay.reset_for_request();
|
||||
overlay.ensure_focus_available();
|
||||
@@ -168,20 +182,6 @@ impl RequestUserInputOverlay {
|
||||
answer.option_notes.get_mut(idx)
|
||||
}
|
||||
|
||||
fn notes_placeholder(&self) -> &'static str {
|
||||
if self.has_options()
|
||||
&& self
|
||||
.current_answer()
|
||||
.is_some_and(|answer| answer.selected.is_none())
|
||||
{
|
||||
SELECT_OPTION_PLACEHOLDER
|
||||
} else if self.has_options() {
|
||||
NOTES_PLACEHOLDER
|
||||
} else {
|
||||
ANSWER_PLACEHOLDER
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensure the focus mode is valid for the current question.
|
||||
fn ensure_focus_available(&mut self) {
|
||||
if self.question_count() == 0 {
|
||||
@@ -194,23 +194,44 @@ impl RequestUserInputOverlay {
|
||||
|
||||
/// Rebuild local answer state from the current request.
|
||||
fn reset_for_request(&mut self) {
|
||||
let app_event_tx = self.app_event_tx.clone();
|
||||
let enhanced_keys_supported = self.enhanced_keys_supported;
|
||||
self.answers = self
|
||||
.request
|
||||
.questions
|
||||
.iter()
|
||||
.map(|question| {
|
||||
let mut option_state = ScrollState::new();
|
||||
let mut has_options = false;
|
||||
let mut option_notes = Vec::new();
|
||||
if let Some(options) = question.options.as_ref()
|
||||
&& !options.is_empty()
|
||||
{
|
||||
has_options = true;
|
||||
option_state.selected_idx = Some(0);
|
||||
option_notes = (0..options.len()).map(|_| NotesEntry::new()).collect();
|
||||
option_notes = (0..options.len())
|
||||
.map(|_| {
|
||||
NotesEntry::new(
|
||||
app_event_tx.clone(),
|
||||
enhanced_keys_supported,
|
||||
NOTES_PLACEHOLDER,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
let placeholder_text = if has_options {
|
||||
NOTES_PLACEHOLDER
|
||||
} else {
|
||||
ANSWER_PLACEHOLDER
|
||||
};
|
||||
AnswerState {
|
||||
selected: option_state.selected_idx,
|
||||
option_state,
|
||||
notes: NotesEntry::new(),
|
||||
notes: NotesEntry::new(
|
||||
app_event_tx.clone(),
|
||||
enhanced_keys_supported,
|
||||
placeholder_text,
|
||||
),
|
||||
option_notes,
|
||||
}
|
||||
})
|
||||
@@ -282,10 +303,15 @@ impl RequestUserInputOverlay {
|
||||
let notes = if options.is_some_and(|opts| !opts.is_empty()) {
|
||||
selected_idx
|
||||
.and_then(|selected| answer_state.option_notes.get(selected))
|
||||
.map(|entry| entry.text.text().trim().to_string())
|
||||
.map(|entry| entry.composer.current_text().trim().to_string())
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
answer_state.notes.text.text().trim().to_string()
|
||||
answer_state
|
||||
.notes
|
||||
.composer
|
||||
.current_text()
|
||||
.trim()
|
||||
.to_string()
|
||||
};
|
||||
let selected_label = selected_idx.and_then(|selected_idx| {
|
||||
question
|
||||
@@ -331,7 +357,7 @@ impl RequestUserInputOverlay {
|
||||
if options.is_some_and(|opts| !opts.is_empty()) {
|
||||
false
|
||||
} else {
|
||||
answer.notes.text.text().trim().is_empty()
|
||||
answer.notes.composer.current_text().trim().is_empty()
|
||||
}
|
||||
})
|
||||
.count()
|
||||
@@ -342,8 +368,8 @@ impl RequestUserInputOverlay {
|
||||
let Some(entry) = self.current_notes_entry() else {
|
||||
return 3;
|
||||
};
|
||||
let usable_width = width.saturating_sub(2);
|
||||
let text_height = entry.text.desired_height(usable_width).clamp(1, 6);
|
||||
let composer_height = entry.composer.desired_textarea_height(width);
|
||||
let text_height = composer_height.saturating_sub(2).clamp(1, 6);
|
||||
text_height.saturating_add(2).clamp(3, 8)
|
||||
}
|
||||
}
|
||||
@@ -401,14 +427,16 @@ impl BottomPaneView for RequestUserInputOverlay {
|
||||
self.focus = Focus::Notes;
|
||||
self.ensure_selected_for_notes();
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.input(key_event);
|
||||
entry.composer.handle_key_event(key_event);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Focus::Notes => {
|
||||
if matches!(key_event.code, KeyCode::Enter) {
|
||||
if matches!(key_event.code, KeyCode::Enter)
|
||||
&& key_event.modifiers == KeyModifiers::NONE
|
||||
{
|
||||
self.go_next_or_submit();
|
||||
return;
|
||||
}
|
||||
@@ -433,12 +461,30 @@ impl BottomPaneView for RequestUserInputOverlay {
|
||||
// Notes are per option when options exist.
|
||||
self.ensure_selected_for_notes();
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.input(key_event);
|
||||
entry.composer.handle_key_event(key_event);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush_paste_burst_if_due(&mut self) -> bool {
|
||||
self.ensure_selected_for_notes();
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.composer.flush_paste_burst_if_due()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_in_paste_burst(&self) -> bool {
|
||||
self.current_notes_entry()
|
||||
.is_some_and(|entry| entry.composer.is_in_paste_burst())
|
||||
}
|
||||
|
||||
fn recommended_redraw_delay(&self) -> Option<Duration> {
|
||||
Some(ChatComposer::recommended_paste_flush_delay())
|
||||
}
|
||||
|
||||
fn on_ctrl_c(&mut self) -> CancellationEvent {
|
||||
self.app_event_tx.send(AppEvent::CodexOp(Op::Interrupt));
|
||||
self.done = true;
|
||||
@@ -453,21 +499,14 @@ impl BottomPaneView for RequestUserInputOverlay {
|
||||
if pasted.is_empty() {
|
||||
return false;
|
||||
}
|
||||
if matches!(self.focus, Focus::Notes) {
|
||||
self.ensure_selected_for_notes();
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.insert_str(&pasted);
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if matches!(self.focus, Focus::Options) {
|
||||
// Treat pastes the same as typing: switch into notes.
|
||||
self.focus = Focus::Notes;
|
||||
}
|
||||
if matches!(self.focus, Focus::Notes) {
|
||||
self.ensure_selected_for_notes();
|
||||
if let Some(entry) = self.current_notes_entry_mut() {
|
||||
entry.text.insert_str(&pasted);
|
||||
return true;
|
||||
return entry.composer.handle_paste(pasted);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -569,6 +608,7 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "First")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
overlay.try_consume_user_input_request(request_event(
|
||||
"turn-2",
|
||||
@@ -592,6 +632,7 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Pick one")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
|
||||
overlay.submit_answers();
|
||||
@@ -611,6 +652,7 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_without_options("q1", "Notes")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
|
||||
overlay.submit_answers();
|
||||
@@ -629,6 +671,7 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Pick one")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
|
||||
{
|
||||
@@ -639,7 +682,7 @@ mod tests {
|
||||
overlay
|
||||
.current_notes_entry_mut()
|
||||
.expect("notes entry missing")
|
||||
.text
|
||||
.composer
|
||||
.insert_str("Notes for option 2");
|
||||
|
||||
overlay.submit_answers();
|
||||
@@ -664,6 +707,7 @@ mod tests {
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Area")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
let area = Rect::new(0, 0, 64, 16);
|
||||
insta::assert_snapshot!(
|
||||
@@ -678,6 +722,7 @@ mod tests {
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Area")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
let area = Rect::new(0, 0, 60, 8);
|
||||
insta::assert_snapshot!(
|
||||
@@ -721,6 +766,7 @@ mod tests {
|
||||
}],
|
||||
),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
{
|
||||
let answer = overlay.current_answer_mut().expect("answer missing");
|
||||
@@ -740,6 +786,7 @@ mod tests {
|
||||
let overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_without_options("q1", "Goal")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
let area = Rect::new(0, 0, 64, 10);
|
||||
insta::assert_snapshot!(
|
||||
@@ -754,12 +801,13 @@ mod tests {
|
||||
let mut overlay = RequestUserInputOverlay::new(
|
||||
request_event("turn-1", vec![question_with_options("q1", "Pick one")]),
|
||||
tx,
|
||||
true,
|
||||
);
|
||||
overlay.focus = Focus::Notes;
|
||||
overlay
|
||||
.current_notes_entry_mut()
|
||||
.expect("notes entry missing")
|
||||
.text
|
||||
.composer
|
||||
.insert_str("Notes");
|
||||
|
||||
overlay.handle_key_event(KeyEvent::from(KeyCode::Down));
|
||||
|
||||
@@ -5,13 +5,13 @@ use ratatui::style::Stylize;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::widgets::Clear;
|
||||
use ratatui::widgets::Paragraph;
|
||||
use ratatui::widgets::StatefulWidgetRef;
|
||||
use ratatui::widgets::Widget;
|
||||
|
||||
use crate::bottom_pane::selection_popup_common::GenericDisplayRow;
|
||||
use crate::bottom_pane::selection_popup_common::render_rows;
|
||||
use crate::key_hint;
|
||||
use crate::render::renderable::Renderable;
|
||||
use crate::ui_consts::LIVE_PREFIX_COLS;
|
||||
|
||||
use super::RequestUserInputOverlay;
|
||||
|
||||
@@ -242,27 +242,19 @@ impl RequestUserInputOverlay {
|
||||
// Inline notes layout uses a prefix and a single-line text area.
|
||||
let prefix = notes_prefix();
|
||||
let prefix_width = prefix.len() as u16;
|
||||
if input_area.width <= prefix_width {
|
||||
let min_width = prefix_width.saturating_add(LIVE_PREFIX_COLS);
|
||||
if input_area.width <= min_width {
|
||||
return None;
|
||||
}
|
||||
let textarea_rect = Rect {
|
||||
x: input_area.x.saturating_add(prefix_width),
|
||||
x: input_area.x.saturating_add(min_width),
|
||||
y: input_area.y,
|
||||
width: input_area.width.saturating_sub(prefix_width),
|
||||
width: input_area.width.saturating_sub(min_width),
|
||||
height: 1,
|
||||
};
|
||||
let state = *entry.state.borrow();
|
||||
return entry.text.cursor_pos_with_state(textarea_rect, state);
|
||||
return entry.composer.cursor_pos_textarea_only(textarea_rect);
|
||||
}
|
||||
let text_area_height = input_area.height.saturating_sub(2);
|
||||
let textarea_rect = Rect {
|
||||
x: input_area.x.saturating_add(1),
|
||||
y: input_area.y.saturating_add(1),
|
||||
width: input_area.width.saturating_sub(2),
|
||||
height: text_area_height,
|
||||
};
|
||||
let state = *entry.state.borrow();
|
||||
entry.text.cursor_pos_with_state(textarea_rect, state)
|
||||
entry.composer.cursor_pos_textarea_only(input_area)
|
||||
}
|
||||
|
||||
/// Render the notes input box or inline notes field.
|
||||
@@ -277,7 +269,8 @@ impl RequestUserInputOverlay {
|
||||
// Inline notes field for tight layouts.
|
||||
let prefix = notes_prefix();
|
||||
let prefix_width = prefix.len() as u16;
|
||||
if area.width <= prefix_width {
|
||||
let min_width = prefix_width.saturating_add(LIVE_PREFIX_COLS);
|
||||
if area.width <= min_width {
|
||||
Paragraph::new(Line::from(prefix.dim())).render(area, buf);
|
||||
return;
|
||||
}
|
||||
@@ -291,70 +284,17 @@ impl RequestUserInputOverlay {
|
||||
buf,
|
||||
);
|
||||
let textarea_rect = Rect {
|
||||
x: area.x.saturating_add(prefix_width),
|
||||
x: area.x.saturating_add(min_width),
|
||||
y: area.y,
|
||||
width: area.width.saturating_sub(prefix_width),
|
||||
width: area.width.saturating_sub(min_width),
|
||||
height: 1,
|
||||
};
|
||||
let mut state = entry.state.borrow_mut();
|
||||
Clear.render(textarea_rect, buf);
|
||||
StatefulWidgetRef::render_ref(&(&entry.text), textarea_rect, buf, &mut state);
|
||||
if entry.text.text().is_empty() {
|
||||
Paragraph::new(Line::from(self.notes_placeholder().dim()))
|
||||
.render(textarea_rect, buf);
|
||||
}
|
||||
entry.composer.render_textarea_only(textarea_rect, buf);
|
||||
return;
|
||||
}
|
||||
// Draw a light ASCII frame around the notes area.
|
||||
let top_border = format!("+{}+", "-".repeat(area.width.saturating_sub(2) as usize));
|
||||
let bottom_border = top_border.clone();
|
||||
Paragraph::new(Line::from(top_border)).render(
|
||||
Rect {
|
||||
x: area.x,
|
||||
y: area.y,
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
Paragraph::new(Line::from(bottom_border)).render(
|
||||
Rect {
|
||||
x: area.x,
|
||||
y: area.y.saturating_add(area.height.saturating_sub(1)),
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
for row in 1..area.height.saturating_sub(1) {
|
||||
Line::from(vec![
|
||||
"|".into(),
|
||||
" ".repeat(area.width.saturating_sub(2) as usize).into(),
|
||||
"|".into(),
|
||||
])
|
||||
.render(
|
||||
Rect {
|
||||
x: area.x,
|
||||
y: area.y.saturating_add(row),
|
||||
width: area.width,
|
||||
height: 1,
|
||||
},
|
||||
buf,
|
||||
);
|
||||
}
|
||||
let text_area_height = area.height.saturating_sub(2);
|
||||
let textarea_rect = Rect {
|
||||
x: area.x.saturating_add(1),
|
||||
y: area.y.saturating_add(1),
|
||||
width: area.width.saturating_sub(2),
|
||||
height: text_area_height,
|
||||
};
|
||||
let mut state = entry.state.borrow_mut();
|
||||
Clear.render(textarea_rect, buf);
|
||||
StatefulWidgetRef::render_ref(&(&entry.text), textarea_rect, buf, &mut state);
|
||||
if entry.text.text().is_empty() {
|
||||
Paragraph::new(Line::from(self.notes_placeholder().dim())).render(textarea_rect, buf);
|
||||
}
|
||||
Clear.render(area, buf);
|
||||
entry.composer.render_textarea_only(area, buf);
|
||||
}
|
||||
|
||||
fn focus_is_options(&self) -> bool {
|
||||
@@ -371,5 +311,5 @@ impl RequestUserInputOverlay {
|
||||
}
|
||||
|
||||
fn notes_prefix() -> &'static str {
|
||||
"Notes: "
|
||||
"Notes"
|
||||
}
|
||||
|
||||
@@ -5,8 +5,8 @@ expression: "render_snapshot(&overlay, area)"
|
||||
Question 1/1
|
||||
Goal
|
||||
Share details.
|
||||
+--------------------------------------------------------------+
|
||||
|Type your answer (optional) |
|
||||
+--------------------------------------------------------------+
|
||||
|
||||
› Type your answer (optional)
|
||||
|
||||
Unanswered: 1 | Will submit as skipped
|
||||
↑/↓ scroll | enter next question | esc interrupt
|
||||
|
||||
@@ -14,7 +14,7 @@ Answer
|
||||
|
||||
|
||||
Notes for Option 1 (optional)
|
||||
+--------------------------------------------------------------+
|
||||
|Add notes (optional) |
|
||||
+--------------------------------------------------------------+
|
||||
|
||||
› Add notes (optional)
|
||||
|
||||
Option 1 of 3 | ↑/↓ scroll | enter next question | esc interrupt
|
||||
|
||||
@@ -10,5 +10,5 @@ What would you like to do next?
|
||||
( ) Run tests Pick a crate and run its
|
||||
tests.
|
||||
( ) Review a diff Summarize or review current
|
||||
Notes: Add notes (optional)
|
||||
Notes› Add notes (optional)
|
||||
Option 4 of 5 | ↑/↓ scroll | enter next question | esc interrupt
|
||||
|
||||
@@ -8,5 +8,5 @@ Choose an option.
|
||||
(x) Option 1 First choice.
|
||||
( ) Option 2 Second choice.
|
||||
( ) Option 3 Third choice.
|
||||
Notes: Add notes (optional)
|
||||
Notes› Add notes (optional)
|
||||
Option 1 of 3 | ↑/↓ scroll | enter next question | esc inter
|
||||
|
||||
@@ -93,6 +93,7 @@ use codex_protocol::ThreadId;
|
||||
use codex_protocol::account::PlanType;
|
||||
use codex_protocol::approvals::ElicitationRequestEvent;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
use codex_protocol::config_types::Settings;
|
||||
use codex_protocol::models::local_image_label_text;
|
||||
@@ -415,12 +416,12 @@ pub(crate) struct ChatWidget {
|
||||
/// where the overlay may briefly treat new tail content as already cached.
|
||||
active_cell_revision: u64,
|
||||
config: Config,
|
||||
/// Stored collaboration mode with model and reasoning effort.
|
||||
/// The unmasked collaboration mode settings (always Custom mode).
|
||||
///
|
||||
/// When collaboration modes feature is enabled, this is initialized to the first preset.
|
||||
/// When disabled, this is Custom. The model and reasoning effort are stored here instead of
|
||||
/// being read from config or current_model.
|
||||
stored_collaboration_mode: CollaborationMode,
|
||||
/// Masks are applied on top of this base mode to derive the effective mode.
|
||||
current_collaboration_mode: CollaborationMode,
|
||||
/// The currently active collaboration mask, if any.
|
||||
active_collaboration_mask: Option<CollaborationModeMask>,
|
||||
auth_manager: Arc<AuthManager>,
|
||||
models_manager: Arc<ModelsManager>,
|
||||
otel_manager: OtelManager,
|
||||
@@ -731,16 +732,12 @@ impl ChatWidget {
|
||||
let initial_messages = event.initial_messages.clone();
|
||||
let model_for_header = event.model.clone();
|
||||
self.session_header.set_model(&model_for_header);
|
||||
// Only update stored collaboration settings when collaboration modes are disabled.
|
||||
// When enabled, we preserve the selected variant (Plan/Pair/Execute/Custom) and its
|
||||
// instructions as-is; the session configured event should not override it.
|
||||
if !self.collaboration_modes_enabled() {
|
||||
self.stored_collaboration_mode = self.stored_collaboration_mode.with_updates(
|
||||
Some(model_for_header.clone()),
|
||||
Some(event.reasoning_effort),
|
||||
None,
|
||||
);
|
||||
}
|
||||
self.current_collaboration_mode = self.current_collaboration_mode.with_updates(
|
||||
Some(model_for_header.clone()),
|
||||
Some(event.reasoning_effort),
|
||||
None,
|
||||
);
|
||||
self.refresh_model_display();
|
||||
let session_info_cell = history_cell::new_session_info(
|
||||
&self.config,
|
||||
&model_for_header,
|
||||
@@ -910,7 +907,7 @@ impl ChatWidget {
|
||||
if !self.queued_user_messages.is_empty() {
|
||||
return;
|
||||
}
|
||||
if self.stored_collaboration_mode.mode != ModeKind::Plan {
|
||||
if self.active_mode_kind() != ModeKind::Plan {
|
||||
return;
|
||||
}
|
||||
let has_message = last_agent_message.is_some_and(|message| !message.trim().is_empty());
|
||||
@@ -932,14 +929,14 @@ impl ChatWidget {
|
||||
}
|
||||
|
||||
fn open_plan_implementation_prompt(&mut self) {
|
||||
let code_mode = collaboration_modes::code_mode(self.models_manager.as_ref());
|
||||
let (implement_actions, implement_disabled_reason) = match code_mode {
|
||||
Some(collaboration_mode) => {
|
||||
let code_mask = collaboration_modes::code_mask(self.models_manager.as_ref());
|
||||
let (implement_actions, implement_disabled_reason) = match code_mask {
|
||||
Some(mask) => {
|
||||
let user_text = PLAN_IMPLEMENTATION_CODING_MESSAGE.to_string();
|
||||
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
|
||||
tx.send(AppEvent::SubmitUserMessageWithMode {
|
||||
text: user_text.clone(),
|
||||
collaboration_mode: collaboration_mode.clone(),
|
||||
collaboration_mode: mask.clone(),
|
||||
});
|
||||
})];
|
||||
(actions, None)
|
||||
@@ -1930,23 +1927,25 @@ impl ChatWidget {
|
||||
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
|
||||
let codex_op_tx = spawn_agent(config.clone(), app_event_tx.clone(), thread_manager);
|
||||
|
||||
let model_for_header = model.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
|
||||
let model_override = model.as_deref();
|
||||
let model_for_header = model
|
||||
.clone()
|
||||
.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
|
||||
let active_collaboration_mask =
|
||||
Self::initial_collaboration_mask(&config, models_manager.as_ref(), model_override);
|
||||
let header_model = active_collaboration_mask
|
||||
.as_ref()
|
||||
.and_then(|mask| mask.model.clone())
|
||||
.unwrap_or_else(|| model_for_header.clone());
|
||||
let fallback_custom = Settings {
|
||||
model: model_for_header.clone(),
|
||||
model: header_model.clone(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
};
|
||||
let stored_collaboration_mode = if config.features.enabled(Feature::CollaborationModes) {
|
||||
initial_collaboration_mode(
|
||||
models_manager.as_ref(),
|
||||
fallback_custom,
|
||||
config.experimental_mode,
|
||||
)
|
||||
} else {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
}
|
||||
// Collaboration modes start in Custom mode (not activated).
|
||||
let current_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
};
|
||||
|
||||
let active_cell = Some(Self::placeholder_session_header_cell(&config));
|
||||
@@ -1970,11 +1969,12 @@ impl ChatWidget {
|
||||
config,
|
||||
skills_all: Vec::new(),
|
||||
skills_initial_state: None,
|
||||
stored_collaboration_mode,
|
||||
current_collaboration_mode,
|
||||
active_collaboration_mask,
|
||||
auth_manager,
|
||||
models_manager,
|
||||
otel_manager,
|
||||
session_header: SessionHeader::new(model_for_header),
|
||||
session_header: SessionHeader::new(header_model),
|
||||
initial_user_message,
|
||||
token_info: None,
|
||||
rate_limit_snapshot: None,
|
||||
@@ -2051,23 +2051,25 @@ impl ChatWidget {
|
||||
let mut rng = rand::rng();
|
||||
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
|
||||
|
||||
let model_for_header = model.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
|
||||
let model_override = model.as_deref();
|
||||
let model_for_header = model
|
||||
.clone()
|
||||
.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
|
||||
let active_collaboration_mask =
|
||||
Self::initial_collaboration_mask(&config, models_manager.as_ref(), model_override);
|
||||
let header_model = active_collaboration_mask
|
||||
.as_ref()
|
||||
.and_then(|mask| mask.model.clone())
|
||||
.unwrap_or_else(|| model_for_header.clone());
|
||||
let fallback_custom = Settings {
|
||||
model: model_for_header.clone(),
|
||||
model: header_model.clone(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
};
|
||||
let stored_collaboration_mode = if config.features.enabled(Feature::CollaborationModes) {
|
||||
initial_collaboration_mode(
|
||||
models_manager.as_ref(),
|
||||
fallback_custom,
|
||||
config.experimental_mode,
|
||||
)
|
||||
} else {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
}
|
||||
// Collaboration modes start in Custom mode (not activated).
|
||||
let current_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
};
|
||||
|
||||
let active_cell = Some(Self::placeholder_session_header_cell(&config));
|
||||
@@ -2091,11 +2093,12 @@ impl ChatWidget {
|
||||
config,
|
||||
skills_all: Vec::new(),
|
||||
skills_initial_state: None,
|
||||
stored_collaboration_mode,
|
||||
current_collaboration_mode,
|
||||
active_collaboration_mask,
|
||||
auth_manager,
|
||||
models_manager,
|
||||
otel_manager,
|
||||
session_header: SessionHeader::new(model_for_header),
|
||||
session_header: SessionHeader::new(header_model),
|
||||
initial_user_message,
|
||||
token_info: None,
|
||||
rate_limit_snapshot: None,
|
||||
@@ -2171,7 +2174,16 @@ impl ChatWidget {
|
||||
let mut rng = rand::rng();
|
||||
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
|
||||
|
||||
let header_model = model.unwrap_or_else(|| session_configured.model.clone());
|
||||
let model_override = model.as_deref();
|
||||
let header_model = model
|
||||
.clone()
|
||||
.unwrap_or_else(|| session_configured.model.clone());
|
||||
let active_collaboration_mask =
|
||||
Self::initial_collaboration_mask(&config, models_manager.as_ref(), model_override);
|
||||
let header_model = active_collaboration_mask
|
||||
.as_ref()
|
||||
.and_then(|mask| mask.model.clone())
|
||||
.unwrap_or(header_model);
|
||||
|
||||
let codex_op_tx =
|
||||
spawn_agent_from_existing(conversation, session_configured, app_event_tx.clone());
|
||||
@@ -2181,17 +2193,10 @@ impl ChatWidget {
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
};
|
||||
let stored_collaboration_mode = if config.features.enabled(Feature::CollaborationModes) {
|
||||
initial_collaboration_mode(
|
||||
models_manager.as_ref(),
|
||||
fallback_custom,
|
||||
config.experimental_mode,
|
||||
)
|
||||
} else {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
}
|
||||
// Collaboration modes start in Custom mode (not activated).
|
||||
let current_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
};
|
||||
|
||||
let mut widget = Self {
|
||||
@@ -2213,7 +2218,8 @@ impl ChatWidget {
|
||||
config,
|
||||
skills_all: Vec::new(),
|
||||
skills_initial_state: None,
|
||||
stored_collaboration_mode,
|
||||
current_collaboration_mode,
|
||||
active_collaboration_mask,
|
||||
auth_manager,
|
||||
models_manager,
|
||||
otel_manager,
|
||||
@@ -2707,9 +2713,7 @@ impl ChatWidget {
|
||||
} else if self.bottom_pane.is_in_paste_burst() {
|
||||
// While capturing a burst, schedule a follow-up tick and skip this frame
|
||||
// to avoid redundant renders between ticks.
|
||||
frame_requester.schedule_frame_in(
|
||||
crate::bottom_pane::ChatComposer::recommended_paste_flush_delay(),
|
||||
);
|
||||
frame_requester.schedule_frame_in(self.bottom_pane.recommended_paste_burst_delay());
|
||||
true
|
||||
} else {
|
||||
false
|
||||
@@ -2816,18 +2820,24 @@ impl ChatWidget {
|
||||
}
|
||||
}
|
||||
|
||||
let effective_mode = self.effective_collaboration_mode();
|
||||
let collaboration_mode = if self.collaboration_modes_enabled() {
|
||||
self.active_collaboration_mask
|
||||
.as_ref()
|
||||
.map(|_| effective_mode.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let op = Op::UserTurn {
|
||||
items,
|
||||
cwd: self.config.cwd.clone(),
|
||||
approval_policy: self.config.approval_policy.value(),
|
||||
sandbox_policy: self.config.sandbox_policy.get().clone(),
|
||||
model: self.stored_collaboration_mode.model().to_string(),
|
||||
effort: self.stored_collaboration_mode.reasoning_effort(),
|
||||
model: effective_mode.model().to_string(),
|
||||
effort: effective_mode.reasoning_effort(),
|
||||
summary: self.config.model_reasoning_summary,
|
||||
final_output_json_schema: None,
|
||||
collaboration_mode: self
|
||||
.collaboration_modes_enabled()
|
||||
.then(|| self.stored_collaboration_mode.clone()),
|
||||
collaboration_mode,
|
||||
personality: None,
|
||||
};
|
||||
|
||||
@@ -3180,7 +3190,7 @@ impl ChatWidget {
|
||||
.map(|ti| &ti.total_token_usage)
|
||||
.unwrap_or(&default_usage);
|
||||
let collaboration_mode = self.collaboration_mode_label();
|
||||
let reasoning_effort_override = Some(self.stored_collaboration_mode.reasoning_effort());
|
||||
let reasoning_effort_override = Some(self.effective_reasoning_effort());
|
||||
self.add_to_history(crate::status::new_status_output(
|
||||
&self.config,
|
||||
self.auth_manager.as_ref(),
|
||||
@@ -3558,23 +3568,24 @@ impl ChatWidget {
|
||||
return;
|
||||
}
|
||||
|
||||
let current_kind = self
|
||||
.active_collaboration_mask
|
||||
.as_ref()
|
||||
.and_then(|mask| mask.mode)
|
||||
.or_else(|| {
|
||||
collaboration_modes::default_mask(self.models_manager.as_ref())
|
||||
.and_then(|mask| mask.mode)
|
||||
});
|
||||
let items: Vec<SelectionItem> = presets
|
||||
.into_iter()
|
||||
.map(|preset| {
|
||||
let name = match preset.mode {
|
||||
ModeKind::Plan => "Plan",
|
||||
ModeKind::Code => "Code",
|
||||
ModeKind::PairProgramming => "Pair Programming",
|
||||
ModeKind::Execute => "Execute",
|
||||
ModeKind::Custom => "Custom",
|
||||
};
|
||||
let is_current =
|
||||
collaboration_modes::same_variant(&self.stored_collaboration_mode, &preset);
|
||||
.map(|mask| {
|
||||
let name = mask.name.clone();
|
||||
let is_current = current_kind == mask.mode;
|
||||
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
|
||||
tx.send(AppEvent::UpdateCollaborationMode(preset.clone()));
|
||||
tx.send(AppEvent::UpdateCollaborationMode(mask.clone()));
|
||||
})];
|
||||
SelectionItem {
|
||||
name: name.to_string(),
|
||||
name,
|
||||
is_current,
|
||||
actions,
|
||||
dismiss_on_select: true,
|
||||
@@ -3690,7 +3701,7 @@ impl ChatWidget {
|
||||
let model_slug = preset.model.to_string();
|
||||
let is_current_model = self.current_model() == preset.model.as_str();
|
||||
let highlight_choice = if is_current_model {
|
||||
self.stored_collaboration_mode.reasoning_effort()
|
||||
self.effective_reasoning_effort()
|
||||
} else {
|
||||
default_choice
|
||||
};
|
||||
@@ -4548,21 +4559,15 @@ impl ChatWidget {
|
||||
}
|
||||
if feature == Feature::CollaborationModes {
|
||||
self.bottom_pane.set_collaboration_modes_enabled(enabled);
|
||||
let settings = self.stored_collaboration_mode.settings.clone();
|
||||
let fallback_custom = settings.clone();
|
||||
self.stored_collaboration_mode = if enabled {
|
||||
initial_collaboration_mode(
|
||||
self.models_manager.as_ref(),
|
||||
fallback_custom,
|
||||
self.config.experimental_mode,
|
||||
)
|
||||
} else {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings,
|
||||
}
|
||||
let settings = self.current_collaboration_mode.settings.clone();
|
||||
self.current_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings,
|
||||
};
|
||||
self.active_collaboration_mask = None;
|
||||
self.update_collaboration_mode_indicator();
|
||||
self.refresh_model_display();
|
||||
self.request_redraw();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4591,31 +4596,52 @@ impl ChatWidget {
|
||||
|
||||
/// Set the reasoning effort in the stored collaboration mode.
|
||||
pub(crate) fn set_reasoning_effort(&mut self, effort: Option<ReasoningEffortConfig>) {
|
||||
self.stored_collaboration_mode =
|
||||
self.stored_collaboration_mode
|
||||
self.current_collaboration_mode =
|
||||
self.current_collaboration_mode
|
||||
.with_updates(None, Some(effort), None);
|
||||
if self.collaboration_modes_enabled()
|
||||
&& let Some(mask) = self.active_collaboration_mask.as_mut()
|
||||
{
|
||||
mask.reasoning_effort = Some(effort);
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the model in the widget's config copy and stored collaboration mode.
|
||||
pub(crate) fn set_model(&mut self, model: &str) {
|
||||
self.session_header.set_model(model);
|
||||
self.stored_collaboration_mode =
|
||||
self.stored_collaboration_mode
|
||||
self.current_collaboration_mode =
|
||||
self.current_collaboration_mode
|
||||
.with_updates(Some(model.to_string()), None, None);
|
||||
if self.collaboration_modes_enabled()
|
||||
&& let Some(mask) = self.active_collaboration_mask.as_mut()
|
||||
{
|
||||
mask.model = Some(model.to_string());
|
||||
}
|
||||
self.refresh_model_display();
|
||||
}
|
||||
|
||||
pub(crate) fn current_model(&self) -> &str {
|
||||
self.stored_collaboration_mode.model()
|
||||
if !self.collaboration_modes_enabled() {
|
||||
return self.current_collaboration_mode.model();
|
||||
}
|
||||
self.active_collaboration_mask
|
||||
.as_ref()
|
||||
.and_then(|mask| mask.model.as_deref())
|
||||
.unwrap_or_else(|| self.current_collaboration_mode.model())
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // Used in tests
|
||||
pub(crate) fn stored_collaboration_mode(&self) -> &CollaborationMode {
|
||||
&self.stored_collaboration_mode
|
||||
pub(crate) fn current_collaboration_mode(&self) -> &CollaborationMode {
|
||||
&self.current_collaboration_mode
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn current_reasoning_effort(&self) -> Option<ReasoningEffortConfig> {
|
||||
self.stored_collaboration_mode.reasoning_effort()
|
||||
self.effective_reasoning_effort()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn active_collaboration_mode_kind(&self) -> ModeKind {
|
||||
self.active_mode_kind()
|
||||
}
|
||||
|
||||
fn is_session_configured(&self) -> bool {
|
||||
@@ -4626,6 +4652,57 @@ impl ChatWidget {
|
||||
self.config.features.enabled(Feature::CollaborationModes)
|
||||
}
|
||||
|
||||
fn initial_collaboration_mask(
|
||||
config: &Config,
|
||||
models_manager: &ModelsManager,
|
||||
model_override: Option<&str>,
|
||||
) -> Option<CollaborationModeMask> {
|
||||
if !config.features.enabled(Feature::CollaborationModes) {
|
||||
return None;
|
||||
}
|
||||
let mut mask = match config.experimental_mode {
|
||||
Some(kind) => collaboration_modes::mask_for_kind(models_manager, kind)?,
|
||||
None => collaboration_modes::default_mask(models_manager)?,
|
||||
};
|
||||
if let Some(model_override) = model_override {
|
||||
mask.model = Some(model_override.to_string());
|
||||
}
|
||||
Some(mask)
|
||||
}
|
||||
|
||||
fn active_mode_kind(&self) -> ModeKind {
|
||||
self.active_collaboration_mask
|
||||
.as_ref()
|
||||
.and_then(|mask| mask.mode)
|
||||
.unwrap_or(ModeKind::Custom)
|
||||
}
|
||||
|
||||
fn effective_reasoning_effort(&self) -> Option<ReasoningEffortConfig> {
|
||||
if !self.collaboration_modes_enabled() {
|
||||
return self.current_collaboration_mode.reasoning_effort();
|
||||
}
|
||||
let current_effort = self.current_collaboration_mode.reasoning_effort();
|
||||
self.active_collaboration_mask
|
||||
.as_ref()
|
||||
.and_then(|mask| mask.reasoning_effort)
|
||||
.unwrap_or(current_effort)
|
||||
}
|
||||
|
||||
fn effective_collaboration_mode(&self) -> CollaborationMode {
|
||||
if !self.collaboration_modes_enabled() {
|
||||
return self.current_collaboration_mode.clone();
|
||||
}
|
||||
self.active_collaboration_mask.as_ref().map_or_else(
|
||||
|| self.current_collaboration_mode.clone(),
|
||||
|mask| self.current_collaboration_mode.apply_mask(mask),
|
||||
)
|
||||
}
|
||||
|
||||
fn refresh_model_display(&mut self) {
|
||||
let effective = self.effective_collaboration_mode();
|
||||
self.session_header.set_model(effective.model());
|
||||
}
|
||||
|
||||
fn model_display_name(&self) -> &str {
|
||||
let model = self.current_model();
|
||||
if model.is_empty() {
|
||||
@@ -4640,7 +4717,7 @@ impl ChatWidget {
|
||||
if !self.collaboration_modes_enabled() {
|
||||
return None;
|
||||
}
|
||||
match self.stored_collaboration_mode.mode {
|
||||
match self.active_mode_kind() {
|
||||
ModeKind::Plan => Some("Plan"),
|
||||
ModeKind::Code => Some("Code"),
|
||||
ModeKind::PairProgramming => Some("Pair Programming"),
|
||||
@@ -4653,7 +4730,7 @@ impl ChatWidget {
|
||||
if !self.collaboration_modes_enabled() {
|
||||
return None;
|
||||
}
|
||||
match self.stored_collaboration_mode.mode {
|
||||
match self.active_mode_kind() {
|
||||
ModeKind::Plan => Some(CollaborationModeIndicator::Plan),
|
||||
ModeKind::Code => Some(CollaborationModeIndicator::Code),
|
||||
ModeKind::PairProgramming => Some(CollaborationModeIndicator::PairProgramming),
|
||||
@@ -4673,26 +4750,25 @@ impl ChatWidget {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(next_mode) = collaboration_modes::next_mode(
|
||||
if let Some(next_mask) = collaboration_modes::next_mask(
|
||||
self.models_manager.as_ref(),
|
||||
&self.stored_collaboration_mode,
|
||||
self.active_collaboration_mask.as_ref(),
|
||||
) {
|
||||
self.set_collaboration_mode(next_mode);
|
||||
self.set_collaboration_mask(next_mask);
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the stored collaboration mode.
|
||||
/// Update the active collaboration mask.
|
||||
///
|
||||
/// When collaboration modes are enabled, the current mode is attached to *every*
|
||||
/// submission as `Op::UserTurn { collaboration_mode: Some(...) }`.
|
||||
pub(crate) fn set_collaboration_mode(&mut self, mode: CollaborationMode) {
|
||||
/// When collaboration modes are enabled and a preset is selected (not Custom),
|
||||
/// the current mode is attached to submissions as `Op::UserTurn { collaboration_mode: Some(...) }`.
|
||||
pub(crate) fn set_collaboration_mask(&mut self, mask: CollaborationModeMask) {
|
||||
if !self.collaboration_modes_enabled() {
|
||||
return;
|
||||
}
|
||||
let old_model = self.stored_collaboration_mode.model().to_string();
|
||||
let mode = mode.with_updates(Some(old_model), None, None);
|
||||
self.stored_collaboration_mode = mode;
|
||||
self.active_collaboration_mask = Some(mask);
|
||||
self.update_collaboration_mode_indicator();
|
||||
self.refresh_model_display();
|
||||
self.request_redraw();
|
||||
}
|
||||
|
||||
@@ -4875,11 +4951,9 @@ impl ChatWidget {
|
||||
pub(crate) fn submit_user_message_with_mode(
|
||||
&mut self,
|
||||
text: String,
|
||||
collaboration_mode: CollaborationMode,
|
||||
collaboration_mode: CollaborationModeMask,
|
||||
) {
|
||||
let model = collaboration_mode.model().to_string();
|
||||
self.set_collaboration_mode(collaboration_mode);
|
||||
self.set_model(&model);
|
||||
self.set_collaboration_mask(collaboration_mode);
|
||||
self.submit_user_message(text.into());
|
||||
}
|
||||
|
||||
@@ -5306,29 +5380,6 @@ fn extract_first_bold(s: &str) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn initial_collaboration_mode(
|
||||
models_manager: &ModelsManager,
|
||||
fallback_custom: Settings,
|
||||
desired_mode: Option<ModeKind>,
|
||||
) -> CollaborationMode {
|
||||
if let Some(kind) = desired_mode {
|
||||
if kind == ModeKind::Custom {
|
||||
return CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
};
|
||||
}
|
||||
if let Some(mode) = collaboration_modes::mode_for_kind(models_manager, kind) {
|
||||
return mode;
|
||||
}
|
||||
}
|
||||
|
||||
collaboration_modes::default_mode(models_manager).unwrap_or(CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: fallback_custom,
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_rate_limits(base_url: String, auth: CodexAuth) -> Option<RateLimitSnapshot> {
|
||||
match BackendClient::from_auth(base_url, &auth) {
|
||||
Ok(client) => match client.get_rate_limits().await {
|
||||
|
||||
@@ -89,6 +89,7 @@ use tempfile::NamedTempFile;
|
||||
use tempfile::tempdir;
|
||||
use tokio::sync::mpsc::error::TryRecvError;
|
||||
use tokio::sync::mpsc::unbounded_channel;
|
||||
use toml::Value as TomlValue;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn set_windows_sandbox_enabled(enabled: bool) {
|
||||
@@ -777,29 +778,16 @@ async fn make_chatwidget_manual(
|
||||
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
|
||||
let codex_home = cfg.codex_home.clone();
|
||||
let models_manager = Arc::new(ModelsManager::new(codex_home, auth_manager.clone()));
|
||||
let collaboration_modes_enabled = cfg.features.enabled(Feature::CollaborationModes);
|
||||
let reasoning_effort = None;
|
||||
let stored_collaboration_mode = if collaboration_modes_enabled {
|
||||
collaboration_modes::default_mode(models_manager.as_ref()).unwrap_or_else(|| {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: Settings {
|
||||
model: resolved_model.clone(),
|
||||
reasoning_effort,
|
||||
developer_instructions: None,
|
||||
},
|
||||
}
|
||||
})
|
||||
} else {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: Settings {
|
||||
model: resolved_model.clone(),
|
||||
reasoning_effort,
|
||||
developer_instructions: None,
|
||||
},
|
||||
}
|
||||
let base_mode = CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
settings: Settings {
|
||||
model: resolved_model.clone(),
|
||||
reasoning_effort,
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
let current_collaboration_mode = base_mode;
|
||||
let widget = ChatWidget {
|
||||
app_event_tx,
|
||||
codex_op_tx: op_tx,
|
||||
@@ -807,7 +795,8 @@ async fn make_chatwidget_manual(
|
||||
active_cell: None,
|
||||
active_cell_revision: 0,
|
||||
config: cfg,
|
||||
stored_collaboration_mode,
|
||||
current_collaboration_mode,
|
||||
active_collaboration_mask: None,
|
||||
auth_manager,
|
||||
models_manager,
|
||||
otel_manager,
|
||||
@@ -1214,7 +1203,7 @@ async fn plan_implementation_popup_yes_emits_submit_message_event() {
|
||||
panic!("expected SubmitUserMessageWithMode, got {event:?}");
|
||||
};
|
||||
assert_eq!(text, PLAN_IMPLEMENTATION_CODING_MESSAGE);
|
||||
assert_eq!(collaboration_mode.mode, ModeKind::Code);
|
||||
assert_eq!(collaboration_mode.mode, Some(ModeKind::Code));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -1223,7 +1212,7 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
|
||||
let code_mode = collaboration_modes::code_mode(chat.models_manager.as_ref())
|
||||
let code_mode = collaboration_modes::code_mask(chat.models_manager.as_ref())
|
||||
.expect("expected code collaboration mode");
|
||||
chat.submit_user_message_with_mode("Implement the plan.".to_string(), code_mode);
|
||||
|
||||
@@ -1247,14 +1236,10 @@ async fn submit_user_message_with_mode_sets_coding_collaboration_mode() {
|
||||
async fn plan_implementation_popup_skips_replayed_turn_complete() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
chat.stored_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
settings: Settings {
|
||||
model: chat.current_model().to_string(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
|
||||
chat.replay_initial_messages(vec![EventMsg::TurnComplete(TurnCompleteEvent {
|
||||
last_agent_message: Some("Plan details".to_string()),
|
||||
@@ -1271,14 +1256,10 @@ async fn plan_implementation_popup_skips_replayed_turn_complete() {
|
||||
async fn plan_implementation_popup_skips_when_messages_queued() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
chat.stored_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
settings: Settings {
|
||||
model: chat.current_model().to_string(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
chat.bottom_pane.set_task_running(true);
|
||||
chat.queue_user_message("Queued message".into());
|
||||
|
||||
@@ -1295,14 +1276,10 @@ async fn plan_implementation_popup_skips_when_messages_queued() {
|
||||
async fn plan_implementation_popup_shows_on_plan_update_without_message() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5")).await;
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
chat.stored_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
settings: Settings {
|
||||
model: chat.current_model().to_string(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
|
||||
chat.on_task_started();
|
||||
chat.on_plan_update(UpdatePlanArgs {
|
||||
@@ -1327,14 +1304,10 @@ async fn plan_implementation_popup_skips_when_rate_limit_prompt_pending() {
|
||||
chat.auth_manager =
|
||||
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
chat.stored_collaboration_mode = CollaborationMode {
|
||||
mode: ModeKind::Plan,
|
||||
settings: Settings {
|
||||
model: chat.current_model().to_string(),
|
||||
reasoning_effort: None,
|
||||
developer_instructions: None,
|
||||
},
|
||||
};
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
|
||||
chat.on_task_started();
|
||||
chat.on_plan_update(UpdatePlanArgs {
|
||||
@@ -2222,22 +2195,25 @@ async fn collab_mode_shift_tab_cycles_only_when_enabled_and_idle() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, false);
|
||||
|
||||
let initial = chat.stored_collaboration_mode.clone();
|
||||
let initial = chat.current_collaboration_mode().clone();
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
|
||||
assert_eq!(chat.stored_collaboration_mode, initial);
|
||||
assert_eq!(chat.current_collaboration_mode(), &initial);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Custom);
|
||||
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
|
||||
assert_eq!(chat.stored_collaboration_mode.mode, ModeKind::Plan);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert_eq!(chat.current_collaboration_mode(), &initial);
|
||||
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
|
||||
assert_eq!(chat.stored_collaboration_mode.mode, ModeKind::Code);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Code);
|
||||
assert_eq!(chat.current_collaboration_mode(), &initial);
|
||||
|
||||
chat.on_task_started();
|
||||
let before = chat.stored_collaboration_mode.clone();
|
||||
let before = chat.active_collaboration_mode_kind();
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::BackTab));
|
||||
assert_eq!(chat.stored_collaboration_mode, before);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), before);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -2254,11 +2230,11 @@ async fn collab_slash_command_opens_picker_and_updates_mode() {
|
||||
);
|
||||
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
|
||||
let selected_mode = match rx.try_recv() {
|
||||
Ok(AppEvent::UpdateCollaborationMode(mode)) => mode,
|
||||
let selected_mask = match rx.try_recv() {
|
||||
Ok(AppEvent::UpdateCollaborationMode(mask)) => mask,
|
||||
other => panic!("expected UpdateCollaborationMode event, got {other:?}"),
|
||||
};
|
||||
chat.set_collaboration_mode(selected_mode);
|
||||
chat.set_collaboration_mask(selected_mask);
|
||||
|
||||
chat.bottom_pane
|
||||
.set_composer_text("hello".to_string(), Vec::new(), Vec::new());
|
||||
@@ -2298,7 +2274,119 @@ async fn collab_slash_command_opens_picker_and_updates_mode() {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn collab_mode_defaults_to_coding_when_enabled() {
|
||||
async fn collaboration_modes_defaults_to_code_on_startup() {
|
||||
let codex_home = tempdir().expect("tempdir");
|
||||
let cfg = ConfigBuilder::default()
|
||||
.codex_home(codex_home.path().to_path_buf())
|
||||
.cli_overrides(vec![(
|
||||
"features.collaboration_modes".to_string(),
|
||||
TomlValue::Boolean(true),
|
||||
)])
|
||||
.build()
|
||||
.await
|
||||
.expect("config");
|
||||
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
|
||||
let otel_manager = test_otel_manager(&cfg, resolved_model.as_str());
|
||||
let thread_manager = Arc::new(ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("test"),
|
||||
cfg.model_provider.clone(),
|
||||
));
|
||||
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
|
||||
let init = ChatWidgetInit {
|
||||
config: cfg,
|
||||
frame_requester: FrameRequester::test_dummy(),
|
||||
app_event_tx: AppEventSender::new(unbounded_channel::<AppEvent>().0),
|
||||
initial_user_message: None,
|
||||
enhanced_keys_supported: false,
|
||||
auth_manager,
|
||||
models_manager: thread_manager.get_models_manager(),
|
||||
feedback: codex_feedback::CodexFeedback::new(),
|
||||
is_first_run: true,
|
||||
model: Some(resolved_model.clone()),
|
||||
otel_manager,
|
||||
};
|
||||
|
||||
let chat = ChatWidget::new(init, thread_manager);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Code);
|
||||
assert_eq!(chat.current_model(), resolved_model);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn experimental_mode_plan_applies_on_startup() {
|
||||
let codex_home = tempdir().expect("tempdir");
|
||||
let cfg = ConfigBuilder::default()
|
||||
.codex_home(codex_home.path().to_path_buf())
|
||||
.cli_overrides(vec![
|
||||
(
|
||||
"features.collaboration_modes".to_string(),
|
||||
TomlValue::Boolean(true),
|
||||
),
|
||||
(
|
||||
"tui.experimental_mode".to_string(),
|
||||
TomlValue::String("plan".to_string()),
|
||||
),
|
||||
])
|
||||
.build()
|
||||
.await
|
||||
.expect("config");
|
||||
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
|
||||
let otel_manager = test_otel_manager(&cfg, resolved_model.as_str());
|
||||
let thread_manager = Arc::new(ThreadManager::with_models_provider(
|
||||
CodexAuth::from_api_key("test"),
|
||||
cfg.model_provider.clone(),
|
||||
));
|
||||
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
|
||||
let init = ChatWidgetInit {
|
||||
config: cfg,
|
||||
frame_requester: FrameRequester::test_dummy(),
|
||||
app_event_tx: AppEventSender::new(unbounded_channel::<AppEvent>().0),
|
||||
initial_user_message: None,
|
||||
enhanced_keys_supported: false,
|
||||
auth_manager,
|
||||
models_manager: thread_manager.get_models_manager(),
|
||||
feedback: codex_feedback::CodexFeedback::new(),
|
||||
is_first_run: true,
|
||||
model: Some(resolved_model.clone()),
|
||||
otel_manager,
|
||||
};
|
||||
|
||||
let chat = ChatWidget::new(init, thread_manager);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
assert_eq!(chat.current_model(), resolved_model);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn set_model_updates_active_collaboration_mask() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1")).await;
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
|
||||
chat.set_model("gpt-5.1-codex-mini");
|
||||
|
||||
assert_eq!(chat.current_model(), "gpt-5.1-codex-mini");
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn set_reasoning_effort_updates_active_collaboration_mask() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1")).await;
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
let plan_mask =
|
||||
collaboration_modes::mask_for_kind(chat.models_manager.as_ref(), ModeKind::Plan)
|
||||
.expect("expected plan collaboration mask");
|
||||
chat.set_collaboration_mask(plan_mask);
|
||||
|
||||
chat.set_reasoning_effort(None);
|
||||
|
||||
assert_eq!(chat.current_reasoning_effort(), None);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Plan);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn collab_mode_is_not_sent_until_selected() {
|
||||
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
|
||||
chat.thread_id = Some(ThreadId::new());
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
@@ -2308,25 +2396,24 @@ async fn collab_mode_defaults_to_coding_when_enabled() {
|
||||
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
|
||||
match next_submit_op(&mut op_rx) {
|
||||
Op::UserTurn {
|
||||
collaboration_mode:
|
||||
Some(CollaborationMode {
|
||||
mode: ModeKind::Code,
|
||||
..
|
||||
}),
|
||||
collaboration_mode,
|
||||
personality: None,
|
||||
..
|
||||
} => {}
|
||||
} => {
|
||||
assert_eq!(collaboration_mode, None);
|
||||
}
|
||||
other => {
|
||||
panic!("expected Op::UserTurn with code collab mode, got {other:?}")
|
||||
panic!("expected Op::UserTurn, got {other:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn collab_mode_enabling_sets_coding_default() {
|
||||
async fn collab_mode_enabling_keeps_custom_until_selected() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
|
||||
chat.set_feature_enabled(Feature::CollaborationModes, true);
|
||||
assert_eq!(chat.stored_collaboration_mode.mode, ModeKind::Code);
|
||||
assert_eq!(chat.active_collaboration_mode_kind(), ModeKind::Custom);
|
||||
assert_eq!(chat.current_collaboration_mode().mode, ModeKind::Custom);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
||||
@@ -1,70 +1,61 @@
|
||||
use codex_core::models_manager::manager::ModelsManager;
|
||||
use codex_protocol::config_types::CollaborationMode;
|
||||
use codex_protocol::config_types::CollaborationModeMask;
|
||||
use codex_protocol::config_types::ModeKind;
|
||||
|
||||
fn mode_kind(mode: &CollaborationMode) -> ModeKind {
|
||||
mode.mode
|
||||
}
|
||||
|
||||
fn is_tui_mode(kind: ModeKind) -> bool {
|
||||
matches!(kind, ModeKind::Plan | ModeKind::Code)
|
||||
}
|
||||
|
||||
fn filtered_presets(models_manager: &ModelsManager) -> Vec<CollaborationMode> {
|
||||
fn filtered_presets(models_manager: &ModelsManager) -> Vec<CollaborationModeMask> {
|
||||
models_manager
|
||||
.list_collaboration_modes()
|
||||
.into_iter()
|
||||
.filter(|preset| is_tui_mode(mode_kind(preset)))
|
||||
.filter(|mask| mask.mode.is_some_and(is_tui_mode))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn presets_for_tui(models_manager: &ModelsManager) -> Vec<CollaborationMode> {
|
||||
pub(crate) fn presets_for_tui(models_manager: &ModelsManager) -> Vec<CollaborationModeMask> {
|
||||
filtered_presets(models_manager)
|
||||
}
|
||||
|
||||
pub(crate) fn default_mode(models_manager: &ModelsManager) -> Option<CollaborationMode> {
|
||||
pub(crate) fn default_mask(models_manager: &ModelsManager) -> Option<CollaborationModeMask> {
|
||||
let presets = filtered_presets(models_manager);
|
||||
presets
|
||||
.iter()
|
||||
.find(|preset| preset.mode == ModeKind::Code)
|
||||
.find(|mask| mask.mode == Some(ModeKind::Code))
|
||||
.cloned()
|
||||
.or_else(|| presets.into_iter().next())
|
||||
}
|
||||
|
||||
pub(crate) fn mode_for_kind(
|
||||
pub(crate) fn mask_for_kind(
|
||||
models_manager: &ModelsManager,
|
||||
kind: ModeKind,
|
||||
) -> Option<CollaborationMode> {
|
||||
) -> Option<CollaborationModeMask> {
|
||||
if !is_tui_mode(kind) {
|
||||
return None;
|
||||
}
|
||||
let presets = filtered_presets(models_manager);
|
||||
presets.into_iter().find(|preset| mode_kind(preset) == kind)
|
||||
}
|
||||
|
||||
pub(crate) fn same_variant(a: &CollaborationMode, b: &CollaborationMode) -> bool {
|
||||
mode_kind(a) == mode_kind(b)
|
||||
filtered_presets(models_manager)
|
||||
.into_iter()
|
||||
.find(|mask| mask.mode == Some(kind))
|
||||
}
|
||||
|
||||
/// Cycle to the next collaboration mode preset in list order.
|
||||
pub(crate) fn next_mode(
|
||||
pub(crate) fn next_mask(
|
||||
models_manager: &ModelsManager,
|
||||
current: &CollaborationMode,
|
||||
) -> Option<CollaborationMode> {
|
||||
current: Option<&CollaborationModeMask>,
|
||||
) -> Option<CollaborationModeMask> {
|
||||
let presets = filtered_presets(models_manager);
|
||||
if presets.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let current_kind = mode_kind(current);
|
||||
let current_kind = current.and_then(|mask| mask.mode);
|
||||
let next_index = presets
|
||||
.iter()
|
||||
.position(|preset| mode_kind(preset) == current_kind)
|
||||
.position(|mask| mask.mode == current_kind)
|
||||
.map_or(0, |idx| (idx + 1) % presets.len());
|
||||
presets.get(next_index).cloned()
|
||||
}
|
||||
|
||||
pub(crate) fn code_mode(models_manager: &ModelsManager) -> Option<CollaborationMode> {
|
||||
filtered_presets(models_manager)
|
||||
.into_iter()
|
||||
.find(|preset| mode_kind(preset) == ModeKind::Code)
|
||||
pub(crate) fn code_mask(models_manager: &ModelsManager) -> Option<CollaborationModeMask> {
|
||||
mask_for_kind(models_manager, ModeKind::Code)
|
||||
}
|
||||
|
||||
286
codex-rs/tui/src/cwd_prompt.rs
Normal file
286
codex-rs/tui/src/cwd_prompt.rs
Normal file
@@ -0,0 +1,286 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::key_hint;
|
||||
use crate::render::Insets;
|
||||
use crate::render::renderable::ColumnRenderable;
|
||||
use crate::render::renderable::Renderable;
|
||||
use crate::render::renderable::RenderableExt as _;
|
||||
use crate::selection_list::selection_option_row;
|
||||
use crate::tui::FrameRequester;
|
||||
use crate::tui::Tui;
|
||||
use crate::tui::TuiEvent;
|
||||
use color_eyre::Result;
|
||||
use crossterm::event::KeyCode;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyEventKind;
|
||||
use crossterm::event::KeyModifiers;
|
||||
use ratatui::buffer::Buffer;
|
||||
use ratatui::layout::Rect;
|
||||
use ratatui::prelude::Widget;
|
||||
use ratatui::style::Stylize as _;
|
||||
use ratatui::text::Line;
|
||||
use ratatui::widgets::Clear;
|
||||
use ratatui::widgets::WidgetRef;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum CwdPromptAction {
|
||||
Resume,
|
||||
Fork,
|
||||
}
|
||||
|
||||
impl CwdPromptAction {
|
||||
fn verb(self) -> &'static str {
|
||||
match self {
|
||||
CwdPromptAction::Resume => "resume",
|
||||
CwdPromptAction::Fork => "fork",
|
||||
}
|
||||
}
|
||||
|
||||
fn past_participle(self) -> &'static str {
|
||||
match self {
|
||||
CwdPromptAction::Resume => "resumed",
|
||||
CwdPromptAction::Fork => "forked",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum CwdSelection {
|
||||
Current,
|
||||
Session,
|
||||
}
|
||||
|
||||
impl CwdSelection {
|
||||
fn next(self) -> Self {
|
||||
match self {
|
||||
CwdSelection::Current => CwdSelection::Session,
|
||||
CwdSelection::Session => CwdSelection::Current,
|
||||
}
|
||||
}
|
||||
|
||||
fn prev(self) -> Self {
|
||||
match self {
|
||||
CwdSelection::Current => CwdSelection::Session,
|
||||
CwdSelection::Session => CwdSelection::Current,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn run_cwd_selection_prompt(
|
||||
tui: &mut Tui,
|
||||
action: CwdPromptAction,
|
||||
current_cwd: &Path,
|
||||
session_cwd: &Path,
|
||||
) -> Result<CwdSelection> {
|
||||
let mut screen = CwdPromptScreen::new(
|
||||
tui.frame_requester(),
|
||||
action,
|
||||
current_cwd.display().to_string(),
|
||||
session_cwd.display().to_string(),
|
||||
);
|
||||
tui.draw(u16::MAX, |frame| {
|
||||
frame.render_widget_ref(&screen, frame.area());
|
||||
})?;
|
||||
|
||||
let events = tui.event_stream();
|
||||
tokio::pin!(events);
|
||||
|
||||
while !screen.is_done() {
|
||||
if let Some(event) = events.next().await {
|
||||
match event {
|
||||
TuiEvent::Key(key_event) => screen.handle_key(key_event),
|
||||
TuiEvent::Paste(_) => {}
|
||||
TuiEvent::Draw => {
|
||||
tui.draw(u16::MAX, |frame| {
|
||||
frame.render_widget_ref(&screen, frame.area());
|
||||
})?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(screen.selection().unwrap_or(CwdSelection::Session))
|
||||
}
|
||||
|
||||
struct CwdPromptScreen {
|
||||
request_frame: FrameRequester,
|
||||
action: CwdPromptAction,
|
||||
current_cwd: String,
|
||||
session_cwd: String,
|
||||
highlighted: CwdSelection,
|
||||
selection: Option<CwdSelection>,
|
||||
}
|
||||
|
||||
impl CwdPromptScreen {
|
||||
fn new(
|
||||
request_frame: FrameRequester,
|
||||
action: CwdPromptAction,
|
||||
current_cwd: String,
|
||||
session_cwd: String,
|
||||
) -> Self {
|
||||
Self {
|
||||
request_frame,
|
||||
action,
|
||||
current_cwd,
|
||||
session_cwd,
|
||||
highlighted: CwdSelection::Session,
|
||||
selection: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_key(&mut self, key_event: KeyEvent) {
|
||||
if key_event.kind == KeyEventKind::Release {
|
||||
return;
|
||||
}
|
||||
if key_event.modifiers.contains(KeyModifiers::CONTROL)
|
||||
&& matches!(key_event.code, KeyCode::Char('c') | KeyCode::Char('d'))
|
||||
{
|
||||
self.select(CwdSelection::Session);
|
||||
return;
|
||||
}
|
||||
match key_event.code {
|
||||
KeyCode::Up | KeyCode::Char('k') => self.set_highlight(self.highlighted.prev()),
|
||||
KeyCode::Down | KeyCode::Char('j') => self.set_highlight(self.highlighted.next()),
|
||||
KeyCode::Char('1') => self.select(CwdSelection::Session),
|
||||
KeyCode::Char('2') => self.select(CwdSelection::Current),
|
||||
KeyCode::Enter => self.select(self.highlighted),
|
||||
KeyCode::Esc => self.select(CwdSelection::Session),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn set_highlight(&mut self, highlight: CwdSelection) {
|
||||
if self.highlighted != highlight {
|
||||
self.highlighted = highlight;
|
||||
self.request_frame.schedule_frame();
|
||||
}
|
||||
}
|
||||
|
||||
fn select(&mut self, selection: CwdSelection) {
|
||||
self.highlighted = selection;
|
||||
self.selection = Some(selection);
|
||||
self.request_frame.schedule_frame();
|
||||
}
|
||||
|
||||
fn is_done(&self) -> bool {
|
||||
self.selection.is_some()
|
||||
}
|
||||
|
||||
fn selection(&self) -> Option<CwdSelection> {
|
||||
self.selection
|
||||
}
|
||||
}
|
||||
|
||||
impl WidgetRef for &CwdPromptScreen {
|
||||
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
|
||||
Clear.render(area, buf);
|
||||
let mut column = ColumnRenderable::new();
|
||||
|
||||
let action_verb = self.action.verb();
|
||||
let action_past = self.action.past_participle();
|
||||
let current_cwd = self.current_cwd.as_str();
|
||||
let session_cwd = self.session_cwd.as_str();
|
||||
|
||||
column.push("");
|
||||
column.push(Line::from(vec![
|
||||
"Choose working directory to ".into(),
|
||||
action_verb.bold(),
|
||||
" this session".into(),
|
||||
]));
|
||||
column.push("");
|
||||
column.push(
|
||||
Line::from(format!(
|
||||
"Session = latest cwd recorded in the {action_past} session"
|
||||
))
|
||||
.dim()
|
||||
.inset(Insets::tlbr(0, 2, 0, 0)),
|
||||
);
|
||||
column.push(
|
||||
Line::from("Current = your current working directory".dim())
|
||||
.inset(Insets::tlbr(0, 2, 0, 0)),
|
||||
);
|
||||
column.push("");
|
||||
column.push(selection_option_row(
|
||||
0,
|
||||
format!("Use session directory ({session_cwd})"),
|
||||
self.highlighted == CwdSelection::Session,
|
||||
));
|
||||
column.push(selection_option_row(
|
||||
1,
|
||||
format!("Use current directory ({current_cwd})"),
|
||||
self.highlighted == CwdSelection::Current,
|
||||
));
|
||||
column.push("");
|
||||
column.push(
|
||||
Line::from(vec![
|
||||
"Press ".dim(),
|
||||
key_hint::plain(KeyCode::Enter).into(),
|
||||
" to continue".dim(),
|
||||
])
|
||||
.inset(Insets::tlbr(0, 2, 0, 0)),
|
||||
);
|
||||
column.render(area, buf);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::test_backend::VT100Backend;
|
||||
use crossterm::event::KeyEvent;
|
||||
use crossterm::event::KeyModifiers;
|
||||
use pretty_assertions::assert_eq;
|
||||
use ratatui::Terminal;
|
||||
|
||||
fn new_prompt() -> CwdPromptScreen {
|
||||
CwdPromptScreen::new(
|
||||
FrameRequester::test_dummy(),
|
||||
CwdPromptAction::Resume,
|
||||
"/Users/example/current".to_string(),
|
||||
"/Users/example/session".to_string(),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cwd_prompt_snapshot() {
|
||||
let screen = new_prompt();
|
||||
let mut terminal = Terminal::new(VT100Backend::new(80, 14)).expect("terminal");
|
||||
terminal
|
||||
.draw(|frame| frame.render_widget_ref(&screen, frame.area()))
|
||||
.expect("render cwd prompt");
|
||||
insta::assert_snapshot!("cwd_prompt_modal", terminal.backend());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cwd_prompt_fork_snapshot() {
|
||||
let screen = CwdPromptScreen::new(
|
||||
FrameRequester::test_dummy(),
|
||||
CwdPromptAction::Fork,
|
||||
"/Users/example/current".to_string(),
|
||||
"/Users/example/session".to_string(),
|
||||
);
|
||||
let mut terminal = Terminal::new(VT100Backend::new(80, 14)).expect("terminal");
|
||||
terminal
|
||||
.draw(|frame| frame.render_widget_ref(&screen, frame.area()))
|
||||
.expect("render cwd prompt");
|
||||
insta::assert_snapshot!("cwd_prompt_fork_modal", terminal.backend());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cwd_prompt_selects_session_by_default() {
|
||||
let mut screen = new_prompt();
|
||||
screen.handle_key(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
assert_eq!(screen.selection(), Some(CwdSelection::Session));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cwd_prompt_can_select_current() {
|
||||
let mut screen = new_prompt();
|
||||
screen.handle_key(KeyEvent::new(KeyCode::Down, KeyModifiers::NONE));
|
||||
screen.handle_key(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
|
||||
assert_eq!(screen.selection(), Some(CwdSelection::Current));
|
||||
}
|
||||
}
|
||||
@@ -27,13 +27,19 @@ use codex_core::config_loader::ConfigLoadError;
|
||||
use codex_core::config_loader::format_config_error_with_source;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use codex_core::get_platform_sandbox;
|
||||
use codex_core::path_utils;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_core::read_session_meta_line;
|
||||
use codex_core::terminal::Multiplexer;
|
||||
use codex_protocol::config_types::AltScreenMode;
|
||||
use codex_protocol::config_types::SandboxMode;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use cwd_prompt::CwdPromptAction;
|
||||
use cwd_prompt::CwdSelection;
|
||||
use std::fs::OpenOptions;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use tracing::error;
|
||||
use tracing_appender::non_blocking;
|
||||
@@ -54,6 +60,7 @@ mod collab;
|
||||
mod collaboration_modes;
|
||||
mod color;
|
||||
pub mod custom_terminal;
|
||||
mod cwd_prompt;
|
||||
mod diff_render;
|
||||
mod exec_cell;
|
||||
mod exec_command;
|
||||
@@ -577,25 +584,27 @@ async fn run_ratatui_app(
|
||||
resume_picker::SessionSelection::StartFresh
|
||||
};
|
||||
|
||||
let current_cwd = config.cwd.clone();
|
||||
let allow_prompt = cli.cwd.is_none();
|
||||
let action_and_path_if_resume_or_fork = match &session_selection {
|
||||
resume_picker::SessionSelection::Resume(path) => Some((CwdPromptAction::Resume, path)),
|
||||
resume_picker::SessionSelection::Fork(path) => Some((CwdPromptAction::Fork, path)),
|
||||
_ => None,
|
||||
};
|
||||
let fallback_cwd = match action_and_path_if_resume_or_fork {
|
||||
Some((action, path)) => {
|
||||
resolve_cwd_for_resume_or_fork(&mut tui, ¤t_cwd, path, action, allow_prompt)
|
||||
.await?
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let config = match &session_selection {
|
||||
resume_picker::SessionSelection::Resume(path)
|
||||
| resume_picker::SessionSelection::Fork(path) => {
|
||||
let history_cwd = match read_session_meta_line(path).await {
|
||||
Ok(meta_line) => Some(meta_line.meta.cwd),
|
||||
Err(err) => {
|
||||
let rollout_path = path.display().to_string();
|
||||
tracing::warn!(
|
||||
%rollout_path,
|
||||
%err,
|
||||
"Failed to read session metadata from rollout"
|
||||
);
|
||||
None
|
||||
}
|
||||
};
|
||||
resume_picker::SessionSelection::Resume(_) | resume_picker::SessionSelection::Fork(_) => {
|
||||
load_config_or_exit_with_fallback_cwd(
|
||||
cli_kv_overrides.clone(),
|
||||
overrides.clone(),
|
||||
history_cwd,
|
||||
fallback_cwd,
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -618,6 +627,8 @@ async fn run_ratatui_app(
|
||||
&mut tui,
|
||||
auth_manager,
|
||||
config,
|
||||
cli_kv_overrides.clone(),
|
||||
overrides.clone(),
|
||||
active_profile,
|
||||
prompt,
|
||||
images,
|
||||
@@ -635,6 +646,77 @@ async fn run_ratatui_app(
|
||||
app_result
|
||||
}
|
||||
|
||||
pub(crate) async fn read_session_cwd(path: &Path) -> Option<PathBuf> {
|
||||
// Prefer the latest TurnContext cwd so resume/fork reflects the most recent
|
||||
// session directory (for the changed-cwd prompt). The alternative would be
|
||||
// mutating the SessionMeta line when the session cwd changes, but the rollout
|
||||
// is an append-only JSONL log and rewriting the head would be error-prone.
|
||||
// When rollouts move to SQLite, we can drop this scan.
|
||||
if let Some(cwd) = parse_latest_turn_context_cwd(path).await {
|
||||
return Some(cwd);
|
||||
}
|
||||
match read_session_meta_line(path).await {
|
||||
Ok(meta_line) => Some(meta_line.meta.cwd),
|
||||
Err(err) => {
|
||||
let rollout_path = path.display().to_string();
|
||||
tracing::warn!(
|
||||
%rollout_path,
|
||||
%err,
|
||||
"Failed to read session metadata from rollout"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn parse_latest_turn_context_cwd(path: &Path) -> Option<PathBuf> {
|
||||
let text = tokio::fs::read_to_string(path).await.ok()?;
|
||||
for line in text.lines().rev() {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let Ok(rollout_line) = serde_json::from_str::<RolloutLine>(trimmed) else {
|
||||
continue;
|
||||
};
|
||||
if let RolloutItem::TurnContext(item) = rollout_line.item {
|
||||
return Some(item.cwd);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn cwds_differ(current_cwd: &Path, session_cwd: &Path) -> bool {
|
||||
match (
|
||||
path_utils::normalize_for_path_comparison(current_cwd),
|
||||
path_utils::normalize_for_path_comparison(session_cwd),
|
||||
) {
|
||||
(Ok(current), Ok(session)) => current != session,
|
||||
_ => current_cwd != session_cwd,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn resolve_cwd_for_resume_or_fork(
|
||||
tui: &mut Tui,
|
||||
current_cwd: &Path,
|
||||
path: &Path,
|
||||
action: CwdPromptAction,
|
||||
allow_prompt: bool,
|
||||
) -> color_eyre::Result<Option<PathBuf>> {
|
||||
let Some(history_cwd) = read_session_cwd(path).await else {
|
||||
return Ok(None);
|
||||
};
|
||||
if allow_prompt && cwds_differ(current_cwd, &history_cwd) {
|
||||
let selection =
|
||||
cwd_prompt::run_cwd_selection_prompt(tui, action, current_cwd, &history_cwd).await?;
|
||||
return Ok(Some(match selection {
|
||||
CwdSelection::Current => current_cwd.to_path_buf(),
|
||||
CwdSelection::Session => history_cwd,
|
||||
}));
|
||||
}
|
||||
Ok(Some(history_cwd))
|
||||
}
|
||||
|
||||
#[expect(
|
||||
clippy::print_stderr,
|
||||
reason = "TUI should no longer be displayed, so we can write to stderr."
|
||||
@@ -772,7 +854,14 @@ fn should_show_login_screen(login_status: LoginStatus, config: &Config) -> bool
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ProjectConfig;
|
||||
use codex_core::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::TurnContextItem;
|
||||
use serial_test::serial;
|
||||
use tempfile::TempDir;
|
||||
|
||||
@@ -846,4 +935,180 @@ mod tests {
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_turn_context(config: &Config, cwd: PathBuf) -> TurnContextItem {
|
||||
let model = config
|
||||
.model
|
||||
.clone()
|
||||
.unwrap_or_else(|| "gpt-5.1".to_string());
|
||||
TurnContextItem {
|
||||
cwd,
|
||||
approval_policy: config.approval_policy.value(),
|
||||
sandbox_policy: config.sandbox_policy.get().clone(),
|
||||
model,
|
||||
personality: None,
|
||||
collaboration_mode: None,
|
||||
effort: config.model_reasoning_effort,
|
||||
summary: config.model_reasoning_summary,
|
||||
user_instructions: None,
|
||||
developer_instructions: None,
|
||||
final_output_json_schema: None,
|
||||
truncation_policy: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn read_session_cwd_prefers_latest_turn_context() -> std::io::Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let config = build_config(&temp_dir).await?;
|
||||
let first = temp_dir.path().join("first");
|
||||
let second = temp_dir.path().join("second");
|
||||
std::fs::create_dir_all(&first)?;
|
||||
std::fs::create_dir_all(&second)?;
|
||||
|
||||
let rollout_path = temp_dir.path().join("rollout.jsonl");
|
||||
let lines = vec![
|
||||
RolloutLine {
|
||||
timestamp: "t0".to_string(),
|
||||
item: RolloutItem::TurnContext(build_turn_context(&config, first)),
|
||||
},
|
||||
RolloutLine {
|
||||
timestamp: "t1".to_string(),
|
||||
item: RolloutItem::TurnContext(build_turn_context(&config, second.clone())),
|
||||
},
|
||||
];
|
||||
let mut text = String::new();
|
||||
for line in lines {
|
||||
text.push_str(&serde_json::to_string(&line).expect("serialize rollout"));
|
||||
text.push('\n');
|
||||
}
|
||||
std::fs::write(&rollout_path, text)?;
|
||||
|
||||
let cwd = read_session_cwd(&rollout_path).await.expect("expected cwd");
|
||||
assert_eq!(cwd, second);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn should_prompt_when_meta_matches_current_but_latest_turn_differs() -> std::io::Result<()>
|
||||
{
|
||||
let temp_dir = TempDir::new()?;
|
||||
let config = build_config(&temp_dir).await?;
|
||||
let current = temp_dir.path().join("current");
|
||||
let latest = temp_dir.path().join("latest");
|
||||
std::fs::create_dir_all(¤t)?;
|
||||
std::fs::create_dir_all(&latest)?;
|
||||
|
||||
let rollout_path = temp_dir.path().join("rollout.jsonl");
|
||||
let session_meta = SessionMeta {
|
||||
cwd: current.clone(),
|
||||
..SessionMeta::default()
|
||||
};
|
||||
let lines = vec![
|
||||
RolloutLine {
|
||||
timestamp: "t0".to_string(),
|
||||
item: RolloutItem::SessionMeta(SessionMetaLine {
|
||||
meta: session_meta,
|
||||
git: None,
|
||||
}),
|
||||
},
|
||||
RolloutLine {
|
||||
timestamp: "t1".to_string(),
|
||||
item: RolloutItem::TurnContext(build_turn_context(&config, latest.clone())),
|
||||
},
|
||||
];
|
||||
let mut text = String::new();
|
||||
for line in lines {
|
||||
text.push_str(&serde_json::to_string(&line).expect("serialize rollout"));
|
||||
text.push('\n');
|
||||
}
|
||||
std::fs::write(&rollout_path, text)?;
|
||||
|
||||
let session_cwd = read_session_cwd(&rollout_path).await.expect("expected cwd");
|
||||
assert_eq!(session_cwd, latest);
|
||||
assert!(cwds_differ(¤t, &session_cwd));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn config_rebuild_changes_trust_defaults_with_cwd() -> std::io::Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let codex_home = temp_dir.path().to_path_buf();
|
||||
let trusted = temp_dir.path().join("trusted");
|
||||
let untrusted = temp_dir.path().join("untrusted");
|
||||
std::fs::create_dir_all(&trusted)?;
|
||||
std::fs::create_dir_all(&untrusted)?;
|
||||
|
||||
// TOML keys need escaped backslashes on Windows paths.
|
||||
let trusted_display = trusted.display().to_string().replace('\\', "\\\\");
|
||||
let untrusted_display = untrusted.display().to_string().replace('\\', "\\\\");
|
||||
let config_toml = format!(
|
||||
r#"[projects."{trusted_display}"]
|
||||
trust_level = "trusted"
|
||||
|
||||
[projects."{untrusted_display}"]
|
||||
trust_level = "untrusted"
|
||||
"#
|
||||
);
|
||||
std::fs::write(temp_dir.path().join("config.toml"), config_toml)?;
|
||||
|
||||
let trusted_overrides = ConfigOverrides {
|
||||
cwd: Some(trusted.clone()),
|
||||
..Default::default()
|
||||
};
|
||||
let trusted_config = ConfigBuilder::default()
|
||||
.codex_home(codex_home.clone())
|
||||
.harness_overrides(trusted_overrides.clone())
|
||||
.build()
|
||||
.await?;
|
||||
assert_eq!(
|
||||
trusted_config.approval_policy.value(),
|
||||
AskForApproval::OnRequest
|
||||
);
|
||||
|
||||
let untrusted_overrides = ConfigOverrides {
|
||||
cwd: Some(untrusted),
|
||||
..trusted_overrides
|
||||
};
|
||||
let untrusted_config = ConfigBuilder::default()
|
||||
.codex_home(codex_home)
|
||||
.harness_overrides(untrusted_overrides)
|
||||
.build()
|
||||
.await?;
|
||||
assert_eq!(
|
||||
untrusted_config.approval_policy.value(),
|
||||
AskForApproval::UnlessTrusted
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn read_session_cwd_falls_back_to_session_meta() -> std::io::Result<()> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
let _config = build_config(&temp_dir).await?;
|
||||
let session_cwd = temp_dir.path().join("session");
|
||||
std::fs::create_dir_all(&session_cwd)?;
|
||||
|
||||
let rollout_path = temp_dir.path().join("rollout.jsonl");
|
||||
let session_meta = SessionMeta {
|
||||
cwd: session_cwd.clone(),
|
||||
..SessionMeta::default()
|
||||
};
|
||||
let meta_line = RolloutLine {
|
||||
timestamp: "t0".to_string(),
|
||||
item: RolloutItem::SessionMeta(SessionMetaLine {
|
||||
meta: session_meta,
|
||||
git: None,
|
||||
}),
|
||||
};
|
||||
let text = format!(
|
||||
"{}\n",
|
||||
serde_json::to_string(&meta_line).expect("serialize meta")
|
||||
);
|
||||
std::fs::write(&rollout_path, text)?;
|
||||
|
||||
let cwd = read_session_cwd(&rollout_path).await.expect("expected cwd");
|
||||
assert_eq!(cwd, session_cwd);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ use crate::app_event::AppEvent;
|
||||
use crate::app_event_sender::AppEventSender;
|
||||
use crate::bottom_pane::ChatComposer;
|
||||
use crate::bottom_pane::InputResult;
|
||||
use crate::bottom_pane::default_chat_composer;
|
||||
use crate::render::renderable::Renderable;
|
||||
|
||||
/// Action returned from feeding a key event into the ComposerInput.
|
||||
@@ -37,7 +38,8 @@ impl ComposerInput {
|
||||
let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
|
||||
let sender = AppEventSender::new(tx.clone());
|
||||
// `enhanced_keys_supported=true` enables Shift+Enter newline hint/behavior.
|
||||
let inner = ChatComposer::new(true, sender, true, "Compose new task".to_string(), false);
|
||||
let inner =
|
||||
default_chat_composer(true, sender, true, "Compose new task".to_string(), false);
|
||||
Self { inner, _tx: tx, rx }
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tui/src/cwd_prompt.rs
|
||||
expression: terminal.backend()
|
||||
---
|
||||
|
||||
Choose working directory to fork this session
|
||||
|
||||
Session = latest cwd recorded in the forked session
|
||||
Current = your current working directory
|
||||
|
||||
› 1. Use session directory (/Users/example/session)
|
||||
2. Use current directory (/Users/example/current)
|
||||
|
||||
Press enter to continue
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: tui/src/cwd_prompt.rs
|
||||
expression: terminal.backend()
|
||||
---
|
||||
|
||||
Choose working directory to resume this session
|
||||
|
||||
Session = latest cwd recorded in the resumed session
|
||||
Current = your current working directory
|
||||
|
||||
› 1. Use session directory (/Users/example/session)
|
||||
2. Use current directory (/Users/example/current)
|
||||
|
||||
Press enter to continue
|
||||
Reference in New Issue
Block a user