Compare commits

..

1 Commits

Author SHA1 Message Date
Ahmed Ibrahim
ad1a8040c7 fix: use platform python for notify test 2026-03-02 10:22:12 -07:00
20 changed files with 22 additions and 172 deletions

View File

@@ -146,8 +146,9 @@ jobs:
shell: bash
run: |
set -euo pipefail
git clone https://git.savannah.gnu.org/git/bash /tmp/bash
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
cd /tmp/bash
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
./configure --without-bash-malloc
@@ -187,8 +188,9 @@ jobs:
shell: bash
run: |
set -euo pipefail
git clone https://git.savannah.gnu.org/git/bash /tmp/bash
git clone --depth 1 https://github.com/bolinfest/bash /tmp/bash
cd /tmp/bash
git fetch --depth 1 origin a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/bash-exec-wrapper.patch"
./configure --without-bash-malloc

View File

@@ -189,7 +189,6 @@ impl MessageProcessor {
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
config.plan_mode_developer_instructions.clone(),
));
let cloud_requirements = Arc::new(RwLock::new(cloud_requirements));
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {

View File

@@ -207,12 +207,13 @@ tmp_path.replace(payload_path)
let notify_script = notify_script
.to_str()
.expect("notify script path should be valid UTF-8");
let notify_command = if cfg!(windows) { "python" } else { "python3" };
create_config_toml_with_extra(
codex_home.path(),
&server.uri(),
"never",
&format!(
"notify = [\"python3\", {}]",
"notify = [\"{notify_command}\", {}]",
toml_basic_string(notify_script)
),
)?;
@@ -261,7 +262,12 @@ tmp_path.replace(payload_path)
)
.await??;
fs_wait::wait_for_path_exists(&notify_file, Duration::from_secs(5)).await?;
let notify_timeout = if cfg!(windows) {
Duration::from_secs(15)
} else {
Duration::from_secs(5)
};
fs_wait::wait_for_path_exists(&notify_file, notify_timeout).await?;
let payload_raw = tokio::fs::read_to_string(&notify_file).await?;
let payload: Value = serde_json::from_str(&payload_raw)?;
assert_eq!(payload["client"], "xcode");

View File

@@ -499,9 +499,6 @@
"personality": {
"$ref": "#/definitions/Personality"
},
"plan_mode_developer_instructions": {
"type": "string"
},
"plan_mode_reasoning_effort": {
"$ref": "#/definitions/ReasoningEffort"
},
@@ -2041,9 +2038,6 @@
],
"description": "Optionally specify a personality for the model"
},
"plan_mode_developer_instructions": {
"type": "string"
},
"plan_mode_reasoning_effort": {
"$ref": "#/definitions/ReasoningEffort"
},

View File

@@ -8130,7 +8130,6 @@ mod tests {
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
None,
));
let model = ModelsManager::get_model_offline_for_tests(config.model.as_deref());
let model_info =
@@ -8218,7 +8217,6 @@ mod tests {
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
None,
));
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();
@@ -8387,7 +8385,6 @@ mod tests {
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
None,
));
let agent_control = AgentControl::default();
let exec_policy = ExecPolicyManager::default();

View File

@@ -413,10 +413,6 @@ pub struct Config {
/// Plan preset. The `none` value means "no reasoning" (not "inherit the
/// global default").
pub plan_mode_reasoning_effort: Option<ReasoningEffort>,
/// Optional Plan-mode-specific developer instructions override.
///
/// When unset, Plan mode uses the built-in Plan preset instructions.
pub plan_mode_developer_instructions: Option<String>,
/// Optional value to use for `reasoning.summary` when making a request
/// using the Responses API. When unset, the model catalog default is used.
@@ -1170,7 +1166,6 @@ pub struct ConfigToml {
pub model_reasoning_effort: Option<ReasoningEffort>,
pub plan_mode_reasoning_effort: Option<ReasoningEffort>,
pub plan_mode_developer_instructions: Option<String>,
pub model_reasoning_summary: Option<ReasoningSummary>,
/// Optional verbosity control for GPT-5 models (Responses API `text.verbosity`).
pub model_verbosity: Option<Verbosity>,
@@ -2170,9 +2165,6 @@ impl Config {
plan_mode_reasoning_effort: config_profile
.plan_mode_reasoning_effort
.or(cfg.plan_mode_reasoning_effort),
plan_mode_developer_instructions: config_profile
.plan_mode_developer_instructions
.or(cfg.plan_mode_developer_instructions),
model_reasoning_summary: config_profile
.model_reasoning_summary
.or(cfg.model_reasoning_summary),
@@ -4925,7 +4917,6 @@ model_verbosity = "high"
model_reasoning_effort: Some(ReasoningEffort::High),
plan_mode_reasoning_effort: None,
model_reasoning_summary: Some(ReasoningSummary::Detailed),
plan_mode_developer_instructions: None,
model_supports_reasoning_summaries: None,
model_catalog: None,
model_verbosity: None,
@@ -5054,7 +5045,6 @@ model_verbosity = "high"
model_reasoning_effort: None,
plan_mode_reasoning_effort: None,
model_reasoning_summary: None,
plan_mode_developer_instructions: None,
model_supports_reasoning_summaries: None,
model_catalog: None,
model_verbosity: None,
@@ -5181,7 +5171,6 @@ model_verbosity = "high"
model_reasoning_effort: None,
plan_mode_reasoning_effort: None,
model_reasoning_summary: None,
plan_mode_developer_instructions: None,
model_supports_reasoning_summaries: None,
model_catalog: None,
model_verbosity: None,
@@ -5294,7 +5283,6 @@ model_verbosity = "high"
model_reasoning_effort: Some(ReasoningEffort::High),
plan_mode_reasoning_effort: None,
model_reasoning_summary: Some(ReasoningSummary::Detailed),
plan_mode_developer_instructions: None,
model_supports_reasoning_summaries: None,
model_catalog: None,
model_verbosity: Some(Verbosity::High),

View File

@@ -25,7 +25,6 @@ pub struct ConfigProfile {
pub sandbox_mode: Option<SandboxMode>,
pub model_reasoning_effort: Option<ReasoningEffort>,
pub plan_mode_reasoning_effort: Option<ReasoningEffort>,
pub plan_mode_developer_instructions: Option<String>,
pub model_reasoning_summary: Option<ReasoningSummary>,
pub model_verbosity: Option<Verbosity>,
/// Optional path to a JSON model catalog (applied on startup only).

View File

@@ -6,6 +6,7 @@ mod macos;
mod tests;
use crate::config::ConfigToml;
use crate::config::deserialize_config_toml_with_base;
use crate::config_loader::layer_io::LoadedConfigLayers;
use crate::git_info::resolve_root_git_project_for_trust;
use codex_app_server_protocol::ConfigLayerSource;
@@ -575,11 +576,6 @@ struct ProjectTrustContext {
user_config_file: AbsolutePathBuf,
}
#[derive(Deserialize)]
struct ProjectTrustConfigToml {
projects: Option<std::collections::HashMap<String, crate::config::ProjectConfig>>,
}
struct ProjectTrustDecision {
trust_level: Option<TrustLevel>,
trust_key: String,
@@ -670,16 +666,10 @@ async fn project_trust_context(
config_base_dir: &Path,
user_config_file: &AbsolutePathBuf,
) -> io::Result<ProjectTrustContext> {
let project_trust_config: ProjectTrustConfigToml = {
let _guard = AbsolutePathBufGuard::new(config_base_dir);
merged_config
.clone()
.try_into()
.map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidData, err))?
};
let config_toml = deserialize_config_toml_with_base(merged_config.clone(), config_base_dir)?;
let project_root = find_project_root(cwd, project_root_markers).await?;
let projects = project_trust_config.projects.unwrap_or_default();
let projects = config_toml.projects.unwrap_or_default();
let project_root_key = project_root.as_path().to_string_lossy().to_string();
let repo_root = resolve_root_git_project_for_trust(cwd.as_path());

View File

@@ -1114,91 +1114,6 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
Ok(())
}
#[tokio::test]
async fn cli_override_can_update_project_local_mcp_server_when_project_is_trusted()
-> std::io::Result<()> {
let tmp = tempdir()?;
let project_root = tmp.path().join("project");
let nested = project_root.join("child");
let dot_codex = project_root.join(".codex");
let codex_home = tmp.path().join("home");
tokio::fs::create_dir_all(&nested).await?;
tokio::fs::create_dir_all(&dot_codex).await?;
tokio::fs::create_dir_all(&codex_home).await?;
tokio::fs::write(project_root.join(".git"), "gitdir: here").await?;
tokio::fs::write(
dot_codex.join(CONFIG_TOML_FILE),
r#"
[mcp_servers.sentry]
url = "https://mcp.sentry.dev/mcp"
enabled = false
"#,
)
.await?;
make_config_for_test(&codex_home, &project_root, TrustLevel::Trusted, None).await?;
let config = ConfigBuilder::default()
.codex_home(codex_home)
.cli_overrides(vec![(
"mcp_servers.sentry.enabled".to_string(),
TomlValue::Boolean(true),
)])
.fallback_cwd(Some(nested))
.build()
.await?;
let server = config
.mcp_servers
.get()
.get("sentry")
.expect("trusted project MCP server should load");
assert!(server.enabled);
Ok(())
}
#[tokio::test]
async fn cli_override_for_disabled_project_local_mcp_server_returns_invalid_transport()
-> std::io::Result<()> {
let tmp = tempdir()?;
let project_root = tmp.path().join("project");
let nested = project_root.join("child");
let dot_codex = project_root.join(".codex");
let codex_home = tmp.path().join("home");
tokio::fs::create_dir_all(&nested).await?;
tokio::fs::create_dir_all(&dot_codex).await?;
tokio::fs::create_dir_all(&codex_home).await?;
tokio::fs::write(project_root.join(".git"), "gitdir: here").await?;
tokio::fs::write(
dot_codex.join(CONFIG_TOML_FILE),
r#"
[mcp_servers.sentry]
url = "https://mcp.sentry.dev/mcp"
enabled = false
"#,
)
.await?;
let err = ConfigBuilder::default()
.codex_home(codex_home)
.cli_overrides(vec![(
"mcp_servers.sentry.enabled".to_string(),
TomlValue::Boolean(true),
)])
.fallback_cwd(Some(nested))
.build()
.await
.expect_err("untrusted project layer should not provide MCP transport");
assert!(
err.to_string().contains("invalid transport")
&& err.to_string().contains("mcp_servers.sentry"),
"unexpected error: {err}"
);
Ok(())
}
#[tokio::test]
async fn invalid_project_config_ignored_when_untrusted_or_unknown() -> std::io::Result<()> {
let tmp = tempdir()?;

View File

@@ -23,25 +23,17 @@ pub struct CollaborationModesConfig {
pub(crate) fn builtin_collaboration_mode_presets(
collaboration_modes_config: CollaborationModesConfig,
plan_mode_developer_instructions: Option<&str>,
) -> Vec<CollaborationModeMask> {
vec![
plan_preset(plan_mode_developer_instructions),
default_preset(collaboration_modes_config),
]
vec![plan_preset(), default_preset(collaboration_modes_config)]
}
fn plan_preset(plan_mode_developer_instructions: Option<&str>) -> CollaborationModeMask {
fn plan_preset() -> CollaborationModeMask {
CollaborationModeMask {
name: ModeKind::Plan.display_name().to_string(),
mode: Some(ModeKind::Plan),
model: None,
reasoning_effort: Some(Some(ReasoningEffort::Medium)),
developer_instructions: Some(Some(
plan_mode_developer_instructions
.unwrap_or(COLLABORATION_MODE_PLAN)
.to_string(),
)),
developer_instructions: Some(Some(COLLABORATION_MODE_PLAN.to_string())),
}
}
@@ -117,27 +109,17 @@ mod tests {
#[test]
fn preset_names_use_mode_display_names() {
assert_eq!(plan_preset(None).name, ModeKind::Plan.display_name());
assert_eq!(plan_preset().name, ModeKind::Plan.display_name());
assert_eq!(
default_preset(CollaborationModesConfig::default()).name,
ModeKind::Default.display_name()
);
assert_eq!(
plan_preset(None).reasoning_effort,
plan_preset().reasoning_effort,
Some(Some(ReasoningEffort::Medium))
);
}
#[test]
fn plan_preset_uses_configured_developer_instructions_override() {
let override_instructions = "Use plan override.";
let plan = plan_preset(Some(override_instructions));
assert_eq!(
plan.developer_instructions,
Some(Some(override_instructions.to_string()))
);
}
#[test]
fn default_mode_instructions_replace_mode_names_placeholder() {
let default_instructions = default_preset(CollaborationModesConfig {

View File

@@ -61,7 +61,6 @@ pub struct ModelsManager {
etag: RwLock<Option<String>>,
cache_manager: ModelsCacheManager,
provider: ModelProviderInfo,
plan_mode_developer_instructions: Option<String>,
}
impl ModelsManager {
@@ -75,7 +74,6 @@ impl ModelsManager {
auth_manager: Arc<AuthManager>,
model_catalog: Option<ModelsResponse>,
collaboration_modes_config: CollaborationModesConfig,
plan_mode_developer_instructions: Option<String>,
) -> Self {
let cache_path = codex_home.join(MODEL_CACHE_FILE);
let cache_manager = ModelsCacheManager::new(cache_path, DEFAULT_MODEL_CACHE_TTL);
@@ -98,7 +96,6 @@ impl ModelsManager {
etag: RwLock::new(None),
cache_manager,
provider: ModelProviderInfo::create_openai_provider(),
plan_mode_developer_instructions,
}
}
@@ -124,10 +121,7 @@ impl ModelsManager {
&self,
collaboration_modes_config: CollaborationModesConfig,
) -> Vec<CollaborationModeMask> {
builtin_collaboration_mode_presets(
collaboration_modes_config,
self.plan_mode_developer_instructions.as_deref(),
)
builtin_collaboration_mode_presets(collaboration_modes_config)
}
/// Attempt to list models without blocking, using the current cached state.
@@ -400,7 +394,6 @@ impl ModelsManager {
etag: RwLock::new(None),
cache_manager,
provider,
plan_mode_developer_instructions: None,
}
}
@@ -528,7 +521,6 @@ mod tests {
auth_manager,
None,
CollaborationModesConfig::default(),
None,
);
let known_slug = manager
.get_remote_models()
@@ -567,7 +559,6 @@ mod tests {
models: vec![remote_model("gpt-overlay", "Overlay", 0)],
}),
CollaborationModesConfig::default(),
None,
);
let model_info = manager
@@ -596,7 +587,6 @@ mod tests {
auth_manager,
None,
CollaborationModesConfig::default(),
None,
);
let known_slug = manager
.get_remote_models()
@@ -628,7 +618,6 @@ mod tests {
auth_manager,
None,
CollaborationModesConfig::default(),
None,
);
let known_slug = manager
.get_remote_models()

View File

@@ -87,6 +87,5 @@ pub fn all_model_presets() -> &'static Vec<ModelPreset> {
pub fn builtin_collaboration_mode_presets() -> Vec<CollaborationModeMask> {
collaboration_mode_presets::builtin_collaboration_mode_presets(
collaboration_mode_presets::CollaborationModesConfig::default(),
None,
)
}

View File

@@ -150,7 +150,6 @@ impl ThreadManager {
session_source: SessionSource,
model_catalog: Option<ModelsResponse>,
collaboration_modes_config: CollaborationModesConfig,
plan_mode_developer_instructions: Option<String>,
) -> Self {
let (thread_created_tx, _) = broadcast::channel(THREAD_CREATED_CHANNEL_CAPACITY);
let plugins_manager = Arc::new(PluginsManager::new(codex_home.clone()));
@@ -169,7 +168,6 @@ impl ThreadManager {
auth_manager.clone(),
model_catalog,
collaboration_modes_config,
plan_mode_developer_instructions,
)),
skills_manager,
plugins_manager,

View File

@@ -702,7 +702,6 @@ async fn prefers_apikey_when_config_prefers_apikey_even_with_chatgpt_tokens() {
.features
.enabled(Feature::DefaultModeRequestUserInput),
},
config.plan_mode_developer_instructions.clone(),
);
let NewThread { thread: codex, .. } = thread_manager
.start_thread(config)

View File

@@ -18,7 +18,6 @@ async fn offline_model_info_without_tool_output_override() {
auth_manager,
None,
CollaborationModesConfig::default(),
None,
);
let model_info = manager.get_model_info("gpt-5.1", &config).await;
@@ -42,7 +41,6 @@ async fn offline_model_info_with_tool_output_override() {
auth_manager,
None,
CollaborationModesConfig::default(),
None,
);
let model_info = manager.get_model_info("gpt-5.1-codex", &config).await;

View File

@@ -410,7 +410,6 @@ pub async fn run_main(cli: Cli, arg0_paths: Arg0DispatchPaths) -> anyhow::Result
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
config.plan_mode_developer_instructions.clone(),
));
let default_model = thread_manager
.get_models_manager()

View File

@@ -68,7 +68,6 @@ impl MessageProcessor {
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
config.plan_mode_developer_instructions.clone(),
));
Self {
outgoing,

View File

@@ -20,7 +20,7 @@ decision to the shell-escalation protocol over a shared file descriptor (specifi
We carry a small patch to `execute_cmd.c` (see `patches/bash-exec-wrapper.patch`) that adds support for `EXEC_WRAPPER`. The original commit message is “add support for BASH_EXEC_WRAPPER” and the patch applies cleanly to `a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b` from https://github.com/bminor/bash. To rebuild manually:
```bash
git clone https://git.savannah.gnu.org/git/bash
git clone https://github.com/bminor/bash
git checkout a8a1c2fac029404d3f42cd39f5a20f24b6e4fe4b
git apply /path/to/patches/bash-exec-wrapper.patch
./configure --without-bash-malloc

View File

@@ -1521,7 +1521,6 @@ impl App {
.features
.enabled(codex_core::features::Feature::DefaultModeRequestUserInput),
},
config.plan_mode_developer_instructions.clone(),
));
let mut model = thread_manager
.get_models_manager()

View File

@@ -1645,7 +1645,6 @@ async fn make_chatwidget_manual(
auth_manager.clone(),
None,
CollaborationModesConfig::default(),
None,
));
let reasoning_effort = None;
let base_mode = CollaborationMode {
@@ -1774,7 +1773,6 @@ fn set_chatgpt_auth(chat: &mut ChatWidget) {
chat.auth_manager.clone(),
None,
CollaborationModesConfig::default(),
None,
));
}