Compare commits

...

1 Commits

Author SHA1 Message Date
Ahmed Ibrahim
ee97dcb23d Feature-flag request_user_input outside plan mode 2026-02-24 16:18:26 -08:00
19 changed files with 276 additions and 39 deletions

View File

@@ -177,6 +177,9 @@ impl MessageProcessor {
auth_manager.clone(),
SessionSource::VSCode,
config.model_catalog.clone(),
config
.features
.enabled(codex_core::features::Feature::RequestUserInputOutsidePlanMode),
));
let cloud_requirements = Arc::new(RwLock::new(cloud_requirements));
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {

View File

@@ -493,6 +493,86 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn turn_start_uses_request_user_input_flag_in_collaboration_mode_override_v2() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = responses::start_mock_server().await;
let body = responses::sse(vec![
responses::ev_response_created("resp-1"),
responses::ev_assistant_message("msg-1", "Done"),
responses::ev_completed("resp-1"),
]);
let response_mock = responses::mount_sse_once(&server, body).await;
let codex_home = TempDir::new()?;
create_config_toml(
codex_home.path(),
&server.uri(),
"never",
&BTreeMap::from([(Feature::RequestUserInputOutsidePlanMode, true)]),
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let thread_req = mcp
.send_thread_start_request(ThreadStartParams {
model: Some("gpt-5.2-codex".to_string()),
..Default::default()
})
.await?;
let thread_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
let collaboration_mode = CollaborationMode {
mode: ModeKind::Default,
settings: Settings {
model: "mock-model-collab".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
},
};
let turn_req = mcp
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![V2UserInput::Text {
text: "Hello".to_string(),
text_elements: Vec::new(),
}],
model: Some("mock-model-override".to_string()),
effort: Some(ReasoningEffort::Low),
summary: Some(ReasoningSummary::Auto),
output_schema: None,
collaboration_mode: Some(collaboration_mode),
..Default::default()
})
.await?;
let turn_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
)
.await??;
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp)?;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
let request = response_mock.single_request();
let payload_text = request.body_json().to_string();
assert!(payload_text.contains("The `request_user_input` tool is available in Default mode."));
Ok(())
}
#[tokio::test]
async fn turn_start_change_personality_mid_thread_v2() -> Result<()> {
skip_if_no_network!(Ok(()));

View File

@@ -373,6 +373,9 @@
"request_rule": {
"type": "boolean"
},
"request_user_input_outside_plan_mode": {
"type": "boolean"
},
"responses_websockets": {
"type": "boolean"
},
@@ -1660,6 +1663,9 @@
"request_rule": {
"type": "boolean"
},
"request_user_input_outside_plan_mode": {
"type": "boolean"
},
"responses_websockets": {
"type": "boolean"
},

View File

@@ -8068,6 +8068,9 @@ mod tests {
config.codex_home.clone(),
auth_manager.clone(),
None,
config
.features
.enabled(Feature::RequestUserInputOutsidePlanMode),
));
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
@@ -8143,6 +8146,9 @@ mod tests {
config.codex_home.clone(),
auth_manager.clone(),
None,
config
.features
.enabled(Feature::RequestUserInputOutsidePlanMode),
));
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();
@@ -8298,6 +8304,9 @@ mod tests {
config.codex_home.clone(),
auth_manager.clone(),
None,
config
.features
.enabled(Feature::RequestUserInputOutsidePlanMode),
));
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();

View File

@@ -138,6 +138,8 @@ pub enum Feature {
/// Enable collaboration modes (Plan, Default).
/// Kept for config backward compatibility; behavior is always collaboration-modes-enabled.
CollaborationModes,
/// Allow request_user_input outside plan mode.
RequestUserInputOutsidePlanMode,
/// Enable personality selection in the TUI.
Personality,
/// Enable voice transcription in the TUI composer.
@@ -631,6 +633,16 @@ pub const FEATURES: &[FeatureSpec] = &[
stage: Stage::Removed,
default_enabled: true,
},
FeatureSpec {
id: Feature::RequestUserInputOutsidePlanMode,
key: "request_user_input_outside_plan_mode",
stage: Stage::Experimental {
name: "Request user input outside Plan mode",
menu_description: "Allow Codex to use request_user_input in Default mode too.",
announcement: "NEW: request_user_input can be enabled outside Plan mode in /experimental.",
},
default_enabled: false,
},
FeatureSpec {
id: Feature::Personality,
key: "personality",

View File

@@ -3,14 +3,21 @@ use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::TUI_VISIBLE_COLLABORATION_MODES;
use codex_protocol::openai_models::ReasoningEffort;
use crate::tools::handlers::request_user_input_allowed_for_mode;
const COLLABORATION_MODE_PLAN: &str = include_str!("../../templates/collaboration_mode/plan.md");
const COLLABORATION_MODE_DEFAULT: &str =
include_str!("../../templates/collaboration_mode/default.md");
const KNOWN_MODE_NAMES_PLACEHOLDER: &str = "{{KNOWN_MODE_NAMES}}";
const REQUEST_USER_INPUT_AVAILABILITY_PLACEHOLDER: &str = "{{REQUEST_USER_INPUT_AVAILABILITY}}";
pub(crate) fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
vec![plan_preset(), default_preset()]
pub(crate) fn builtin_collaboration_mode_presets(
request_user_input_outside_plan_mode: bool,
) -> Vec<CollaborationModeMask> {
vec![
plan_preset(),
default_preset(request_user_input_outside_plan_mode),
]
}
fn plan_preset() -> CollaborationModeMask {
@@ -23,20 +30,24 @@ fn plan_preset() -> CollaborationModeMask {
}
}
fn default_preset() -> CollaborationModeMask {
fn default_preset(request_user_input_outside_plan_mode: bool) -> CollaborationModeMask {
CollaborationModeMask {
name: ModeKind::Default.display_name().to_string(),
mode: Some(ModeKind::Default),
model: None,
reasoning_effort: None,
developer_instructions: Some(Some(default_mode_instructions())),
developer_instructions: Some(Some(default_mode_instructions(
request_user_input_outside_plan_mode,
))),
}
}
fn default_mode_instructions() -> String {
fn default_mode_instructions(request_user_input_outside_plan_mode: bool) -> String {
let known_mode_names = format_mode_names(&TUI_VISIBLE_COLLABORATION_MODES);
let request_user_input_availability =
request_user_input_availability_message(ModeKind::Default);
let request_user_input_availability = request_user_input_availability_message(
ModeKind::Default,
request_user_input_outside_plan_mode,
);
COLLABORATION_MODE_DEFAULT
.replace(KNOWN_MODE_NAMES_PLACEHOLDER, &known_mode_names)
.replace(
@@ -55,9 +66,12 @@ fn format_mode_names(modes: &[ModeKind]) -> String {
}
}
fn request_user_input_availability_message(mode: ModeKind) -> String {
fn request_user_input_availability_message(
mode: ModeKind,
request_user_input_outside_plan_mode: bool,
) -> String {
let mode_name = mode.display_name();
if mode.allows_request_user_input() {
if request_user_input_allowed_for_mode(mode, request_user_input_outside_plan_mode) {
format!("The `request_user_input` tool is available in {mode_name} mode.")
} else {
format!(
@@ -74,7 +88,7 @@ mod tests {
#[test]
fn preset_names_use_mode_display_names() {
assert_eq!(plan_preset().name, ModeKind::Plan.display_name());
assert_eq!(default_preset().name, ModeKind::Default.display_name());
assert_eq!(default_preset(false).name, ModeKind::Default.display_name());
assert_eq!(
plan_preset().reasoning_effort,
Some(Some(ReasoningEffort::Medium))
@@ -83,7 +97,7 @@ mod tests {
#[test]
fn default_mode_instructions_replace_mode_names_placeholder() {
let default_instructions = default_preset()
let default_instructions = default_preset(false)
.developer_instructions
.expect("default preset should include instructions")
.expect("default instructions should be set");
@@ -96,7 +110,19 @@ mod tests {
assert!(default_instructions.contains(&expected_snippet));
let expected_availability_message =
request_user_input_availability_message(ModeKind::Default);
request_user_input_availability_message(ModeKind::Default, false);
assert!(default_instructions.contains(&expected_availability_message));
}
#[test]
fn default_mode_instructions_reflect_request_user_input_flag() {
let default_instructions = default_preset(true)
.developer_instructions
.expect("default preset should include instructions")
.expect("default instructions should be set");
let expected_availability_message =
request_user_input_availability_message(ModeKind::Default, true);
assert!(default_instructions.contains(&expected_availability_message));
}
}

View File

@@ -50,6 +50,7 @@ pub struct ModelsManager {
etag: RwLock<Option<String>>,
cache_manager: ModelsCacheManager,
provider: ModelProviderInfo,
request_user_input_outside_plan_mode: bool,
}
impl ModelsManager {
@@ -62,6 +63,7 @@ impl ModelsManager {
codex_home: PathBuf,
auth_manager: Arc<AuthManager>,
model_catalog: Option<ModelsResponse>,
request_user_input_outside_plan_mode: bool,
) -> Self {
let cache_path = codex_home.join(MODEL_CACHE_FILE);
let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL);
@@ -79,6 +81,7 @@ impl ModelsManager {
etag: RwLock::new(None),
cache_manager,
provider: ModelProviderInfo::create_openai_provider(),
request_user_input_outside_plan_mode,
}
}
@@ -97,7 +100,7 @@ impl ModelsManager {
///
/// Returns a static set of presets seeded with the configured model.
pub fn list_collaboration_modes(&self) -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets()
builtin_collaboration_mode_presets(self.request_user_input_outside_plan_mode)
}
/// Attempt to list models without blocking, using the current cached state.
@@ -348,6 +351,7 @@ impl ModelsManager {
etag: RwLock::new(None),
cache_manager,
provider,
request_user_input_outside_plan_mode: false,
}
}
@@ -470,7 +474,8 @@ mod tests {
.expect("load default test config");
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
let manager = ModelsManager::new(codex_home.path().to_path_buf(), auth_manager, None);
let manager =
ModelsManager::new(codex_home.path().to_path_buf(), auth_manager, None, false);
let known_slug = manager
.get_remote_models()
.await
@@ -507,6 +512,7 @@ mod tests {
Some(ModelsResponse {
models: vec![remote_model("gpt-overlay", "Overlay", 0)],
}),
false,
);
let model_info = manager

View File

@@ -85,5 +85,5 @@ pub fn all_model_presets() -> &'static Vec<ModelPreset> {
}
pub fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
collaboration_mode_presets::builtin_collaboration_mode_presets()
collaboration_mode_presets::builtin_collaboration_mode_presets(false)
}

View File

@@ -143,6 +143,7 @@ impl ThreadManager {
auth_manager: Arc<AuthManager>,
session_source: SessionSource,
model_catalog: Option<ModelsResponse>,
request_user_input_outside_plan_mode: bool,
) -> Self {
let (thread_created_tx, _) = broadcast::channel(THREAD_CREATED_CHANNEL_CAPACITY);
let skills_manager = Arc::new(SkillsManager::new(codex_home.clone()));
@@ -155,6 +156,7 @@ impl ThreadManager {
codex_home,
auth_manager.clone(),
model_catalog,
request_user_input_outside_plan_mode,
)),
skills_manager,
file_watcher,

View File

@@ -31,6 +31,7 @@ pub use multi_agents::MultiAgentHandler;
pub use plan::PlanHandler;
pub use read_file::ReadFileHandler;
pub use request_user_input::RequestUserInputHandler;
pub(crate) use request_user_input::request_user_input_allowed_for_mode;
pub(crate) use request_user_input::request_user_input_tool_description;
pub(crate) use search_tool_bm25::DEFAULT_LIMIT as SEARCH_TOOL_BM25_DEFAULT_LIMIT;
pub(crate) use search_tool_bm25::SEARCH_TOOL_BM25_TOOL_NAME;

View File

@@ -1,6 +1,7 @@
use async_trait::async_trait;
use codex_protocol::models::FunctionCallOutputBody;
use crate::features::Feature;
use crate::function_tool::FunctionCallError;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolOutput;
@@ -12,23 +13,35 @@ use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::TUI_VISIBLE_COLLABORATION_MODES;
use codex_protocol::request_user_input::RequestUserInputArgs;
fn format_allowed_modes() -> String {
pub(crate) fn request_user_input_allowed_for_mode(
mode: ModeKind,
request_user_input_outside_plan_mode: bool,
) -> bool {
mode.allows_request_user_input() || request_user_input_outside_plan_mode
}
fn format_allowed_modes(request_user_input_outside_plan_mode: bool) -> String {
let mode_names: Vec<&str> = TUI_VISIBLE_COLLABORATION_MODES
.into_iter()
.filter(|mode| mode.allows_request_user_input())
.filter(|mode| {
request_user_input_allowed_for_mode(*mode, request_user_input_outside_plan_mode)
})
.map(ModeKind::display_name)
.collect();
match mode_names.as_slice() {
[] => "no modes".to_string(),
[mode] => format!("{mode} mode"),
[first, second] => format!("{first} or {second} mode"),
[..] => format!("modes: {}", mode_names.join(",")),
[first, second] => format!("{first} and {second} modes"),
[..] => format!("modes: {}", mode_names.join(", ")),
}
}
pub(crate) fn request_user_input_unavailable_message(mode: ModeKind) -> Option<String> {
if mode.allows_request_user_input() {
pub(crate) fn request_user_input_unavailable_message(
mode: ModeKind,
request_user_input_outside_plan_mode: bool,
) -> Option<String> {
if request_user_input_allowed_for_mode(mode, request_user_input_outside_plan_mode) {
None
} else {
let mode_name = mode.display_name();
@@ -38,8 +51,10 @@ pub(crate) fn request_user_input_unavailable_message(mode: ModeKind) -> Option<S
}
}
pub(crate) fn request_user_input_tool_description() -> String {
let allowed_modes = format_allowed_modes();
pub(crate) fn request_user_input_tool_description(
request_user_input_outside_plan_mode: bool,
) -> String {
let allowed_modes = format_allowed_modes(request_user_input_outside_plan_mode);
format!(
"Request user input for one to three short questions and wait for the response. This tool is only available in {allowed_modes}."
)
@@ -71,8 +86,13 @@ impl ToolHandler for RequestUserInputHandler {
}
};
let request_user_input_outside_plan_mode = session
.features()
.enabled(Feature::RequestUserInputOutsidePlanMode);
let mode = session.collaboration_mode().await.mode;
if let Some(message) = request_user_input_unavailable_message(mode) {
if let Some(message) =
request_user_input_unavailable_message(mode, request_user_input_outside_plan_mode)
{
return Err(FunctionCallError::RespondToModel(message));
}
@@ -126,17 +146,20 @@ mod tests {
#[test]
fn request_user_input_unavailable_messages_use_default_name_for_default_modes() {
assert_eq!(request_user_input_unavailable_message(ModeKind::Plan), None);
assert_eq!(
request_user_input_unavailable_message(ModeKind::Default),
request_user_input_unavailable_message(ModeKind::Plan, false),
None
);
assert_eq!(
request_user_input_unavailable_message(ModeKind::Default, false),
Some("request_user_input is unavailable in Default mode".to_string())
);
assert_eq!(
request_user_input_unavailable_message(ModeKind::Execute),
request_user_input_unavailable_message(ModeKind::Execute, false),
Some("request_user_input is unavailable in Execute mode".to_string())
);
assert_eq!(
request_user_input_unavailable_message(ModeKind::PairProgramming),
request_user_input_unavailable_message(ModeKind::PairProgramming, false),
Some("request_user_input is unavailable in Pair Programming mode".to_string())
);
}
@@ -144,8 +167,27 @@ mod tests {
#[test]
fn request_user_input_tool_description_mentions_plan_only() {
assert_eq!(
request_user_input_tool_description(),
request_user_input_tool_description(false),
"Request user input for one to three short questions and wait for the response. This tool is only available in Plan mode.".to_string()
);
}
#[test]
fn request_user_input_outside_plan_mode_allows_all_non_plan_modes() {
assert!(request_user_input_allowed_for_mode(ModeKind::Plan, true));
assert!(request_user_input_allowed_for_mode(ModeKind::Default, true));
assert!(request_user_input_allowed_for_mode(ModeKind::Execute, true));
assert!(request_user_input_allowed_for_mode(
ModeKind::PairProgramming,
true
));
assert_eq!(
request_user_input_unavailable_message(ModeKind::Default, true),
None
);
assert_eq!(
request_user_input_tool_description(true),
"Request user input for one to three short questions and wait for the response. This tool is only available in Default and Plan modes.".to_string()
);
}
}

View File

@@ -44,6 +44,7 @@ pub(crate) struct ToolsConfig {
pub js_repl_tools_only: bool,
pub collab_tools: bool,
pub collaboration_modes_tools: bool,
pub request_user_input_outside_plan_mode: bool,
pub experimental_supported_tools: Vec<String>,
}
@@ -66,6 +67,8 @@ impl ToolsConfig {
include_js_repl && features.enabled(Feature::JsReplToolsOnly);
let include_collab_tools = features.enabled(Feature::Collab);
let include_collaboration_modes_tools = true;
let request_user_input_outside_plan_mode =
features.enabled(Feature::RequestUserInputOutsidePlanMode);
let include_search_tool = features.enabled(Feature::Apps);
let shell_type = if !features.enabled(Feature::ShellTool) {
@@ -106,6 +109,7 @@ impl ToolsConfig {
js_repl_tools_only: include_js_repl_tools_only,
collab_tools: include_collab_tools,
collaboration_modes_tools: include_collaboration_modes_tools,
request_user_input_outside_plan_mode,
experimental_supported_tools: model_info.experimental_supported_tools.clone(),
}
}
@@ -668,7 +672,7 @@ fn create_wait_tool() -> ToolSpec {
})
}
fn create_request_user_input_tool() -> ToolSpec {
fn create_request_user_input_tool(request_user_input_outside_plan_mode: bool) -> ToolSpec {
let mut option_props = BTreeMap::new();
option_props.insert(
"label".to_string(),
@@ -739,7 +743,7 @@ fn create_request_user_input_tool() -> ToolSpec {
ToolSpec::Function(ResponsesApiTool {
name: "request_user_input".to_string(),
description: request_user_input_tool_description(),
description: request_user_input_tool_description(request_user_input_outside_plan_mode),
strict: false,
parameters: JsonSchema::Object {
properties,
@@ -1520,7 +1524,9 @@ pub(crate) fn build_specs(
}
if config.collaboration_modes_tools {
builder.push_spec(create_request_user_input_tool());
builder.push_spec(create_request_user_input_tool(
config.request_user_input_outside_plan_mode,
));
builder.register_handler("request_user_input", request_user_input_handler);
}
@@ -1836,7 +1842,7 @@ mod tests {
create_exec_command_tool(true),
create_write_stdin_tool(),
PLAN_TOOL.clone(),
create_request_user_input_tool(),
create_request_user_input_tool(false),
create_apply_patch_freeform_tool(),
ToolSpec::WebSearch {
external_web_access: Some(true),

View File

@@ -584,6 +584,9 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
auth_manager,
SessionSource::Exec,
config.model_catalog.clone(),
config
.features
.enabled(Feature::RequestUserInputOutsidePlanMode),
);
let NewThread { thread: codex, .. } = thread_manager
.start_thread(config)

View File

@@ -12,7 +12,7 @@ async fn offline_model_info_without_tool_output_override() {
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager, None);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager, None, false);
let model_info = manager.get_model_info("gpt-5.1", &config).await;
@@ -30,7 +30,7 @@ async fn offline_model_info_with_tool_output_override() {
let auth_manager = codex_core::test_support::auth_manager_from_auth(
CodexAuth::create_dummy_chatgpt_auth_for_testing(),
);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager, None);
let manager = ModelsManager::new(config.codex_home.clone(), auth_manager, None, false);
let model_info = manager.get_model_info("gpt-5.1-codex", &config).await;

View File

@@ -71,10 +71,13 @@ fn call_output_content_and_success(
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_round_trip_resolves_pending() -> anyhow::Result<()> {
request_user_input_round_trip_for_mode(ModeKind::Plan).await
request_user_input_round_trip_for_mode(ModeKind::Plan, false).await
}
async fn request_user_input_round_trip_for_mode(mode: ModeKind) -> anyhow::Result<()> {
async fn request_user_input_round_trip_for_mode(
mode: ModeKind,
request_user_input_outside_plan_mode: bool,
) -> anyhow::Result<()> {
skip_if_no_network!(Ok(()));
let server = start_mock_server().await;
@@ -86,8 +89,13 @@ async fn request_user_input_round_trip_for_mode(mode: ModeKind) -> anyhow::Resul
session_configured,
..
} = builder
.with_config(|config| {
.with_config(move |config| {
config.features.enable(Feature::CollaborationModes);
if request_user_input_outside_plan_mode {
config
.features
.enable(Feature::RequestUserInputOutsidePlanMode);
}
})
.build(&server)
.await?;
@@ -190,6 +198,24 @@ async fn request_user_input_round_trip_for_mode(mode: ModeKind) -> anyhow::Resul
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_round_trip_resolves_pending_in_default_mode_with_flag()
-> anyhow::Result<()> {
request_user_input_round_trip_for_mode(ModeKind::Default, true).await
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_round_trip_resolves_pending_in_execute_mode_with_flag()
-> anyhow::Result<()> {
request_user_input_round_trip_for_mode(ModeKind::Execute, true).await
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_round_trip_resolves_pending_in_pair_mode_with_flag()
-> anyhow::Result<()> {
request_user_input_round_trip_for_mode(ModeKind::PairProgramming, true).await
}
async fn assert_request_user_input_rejected<F>(mode_name: &str, build_mode: F) -> anyhow::Result<()>
where
F: FnOnce(String) -> CollaborationMode,

View File

@@ -374,6 +374,9 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
auth_manager.clone(),
SessionSource::Exec,
config.model_catalog.clone(),
config
.features
.enabled(codex_core::features::Feature::RequestUserInputOutsidePlanMode),
));
let default_model = thread_manager
.get_models_manager()

View File

@@ -62,6 +62,9 @@ impl MessageProcessor {
auth_manager,
SessionSource::Mcp,
config.model_catalog.clone(),
config
.features
.enabled(codex_core::features::Feature::RequestUserInputOutsidePlanMode),
));
Self {
outgoing,

View File

@@ -1234,6 +1234,9 @@ impl App {
auth_manager.clone(),
SessionSource::Cli,
config.model_catalog.clone(),
config
.features
.enabled(codex_core::features::Feature::RequestUserInputOutsidePlanMode),
));
let mut model = thread_manager
.get_models_manager()

View File

@@ -1632,7 +1632,12 @@ async fn make_chatwidget_manual(
let auth_manager =
codex_core::test_support::auth_manager_from_auth(CodexAuth::from_api_key("test"));
let codex_home = cfg.codex_home.clone();
let models_manager = Arc::new(ModelsManager::new(codex_home, auth_manager.clone(), None));
let models_manager = Arc::new(ModelsManager::new(
codex_home,
auth_manager.clone(),
None,
false,
));
let reasoning_effort = None;
let base_mode = CollaborationMode {
mode: ModeKind::Default,
@@ -1755,6 +1760,7 @@ fn set_chatgpt_auth(chat: &mut ChatWidget) {
chat.config.codex_home.clone(),
chat.auth_manager.clone(),
None,
false,
));
}