mirror of
https://github.com/openai/codex.git
synced 2026-04-29 08:56:38 +00:00
## Why
`codex-rs/core/src/lib.rs` re-exported a broad set of types and modules
from `codex-protocol` and `codex-shell-command`. That made it easy for
workspace crates to import those APIs through `codex-core`, which in
turn hides dependency edges and makes it harder to reduce compile-time
coupling over time.
This change removes those public re-exports so call sites must import
from the source crates directly. Even when a crate still depends on
`codex-core` today, this makes dependency boundaries explicit and
unblocks future work to drop `codex-core` dependencies where possible.
## What Changed
- Removed public re-exports from `codex-rs/core/src/lib.rs` for:
- `codex_protocol::protocol` and related protocol/model types (including
`InitialHistory`)
- `codex_protocol::config_types` (`protocol_config_types`)
- `codex_shell_command::{bash, is_dangerous_command, is_safe_command,
parse_command, powershell}`
- Migrated workspace Rust call sites to import directly from:
- `codex_protocol::protocol`
- `codex_protocol::config_types`
- `codex_protocol::models`
- `codex_shell_command`
- Added explicit `Cargo.toml` dependencies (`codex-protocol` /
`codex-shell-command`) in crates that now import those crates directly.
- Kept `codex-core` internal modules compiling by using `pub(crate)`
aliases in `core/src/lib.rs` (internal-only, not part of the public
API).
- Updated the two utility crates that can already drop a `codex-core`
dependency edge entirely:
- `codex-utils-approval-presets`
- `codex-utils-cli`
## Verification
- `cargo test -p codex-utils-approval-presets`
- `cargo test -p codex-utils-cli`
- `cargo check --workspace --all-targets`
- `just clippy`
211 lines
6.7 KiB
Rust
211 lines
6.7 KiB
Rust
#![allow(clippy::unwrap_used)]
|
|
|
|
use codex_core::features::Feature;
|
|
use codex_protocol::config_types::WebSearchMode;
|
|
use codex_protocol::protocol::SandboxPolicy;
|
|
use core_test_support::responses;
|
|
use core_test_support::responses::start_mock_server;
|
|
use core_test_support::skip_if_no_network;
|
|
use core_test_support::test_codex::test_codex;
|
|
use pretty_assertions::assert_eq;
|
|
use serde_json::Value;
|
|
|
|
#[allow(clippy::expect_used)]
|
|
fn find_web_search_tool(body: &Value) -> &Value {
|
|
body["tools"]
|
|
.as_array()
|
|
.expect("request body should include tools array")
|
|
.iter()
|
|
.find(|tool| tool.get("type").and_then(Value::as_str) == Some("web_search"))
|
|
.expect("tools should include a web_search tool")
|
|
}
|
|
|
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
|
async fn web_search_mode_cached_sets_external_web_access_false() {
|
|
skip_if_no_network!();
|
|
|
|
let server = start_mock_server().await;
|
|
let sse = responses::sse(vec![
|
|
responses::ev_response_created("resp-1"),
|
|
responses::ev_completed("resp-1"),
|
|
]);
|
|
let resp_mock = responses::mount_sse_once(&server, sse).await;
|
|
|
|
let mut builder = test_codex()
|
|
.with_model("gpt-5-codex")
|
|
.with_config(|config| {
|
|
config
|
|
.web_search_mode
|
|
.set(WebSearchMode::Cached)
|
|
.expect("test web_search_mode should satisfy constraints");
|
|
});
|
|
let test = builder
|
|
.build(&server)
|
|
.await
|
|
.expect("create test Codex conversation");
|
|
|
|
test.submit_turn_with_policy(
|
|
"hello cached web search",
|
|
SandboxPolicy::new_read_only_policy(),
|
|
)
|
|
.await
|
|
.expect("submit turn");
|
|
|
|
let body = resp_mock.single_request().body_json();
|
|
let tool = find_web_search_tool(&body);
|
|
assert_eq!(
|
|
tool.get("external_web_access").and_then(Value::as_bool),
|
|
Some(false),
|
|
"web_search cached mode should force external_web_access=false"
|
|
);
|
|
}
|
|
|
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
|
async fn web_search_mode_takes_precedence_over_legacy_flags() {
|
|
skip_if_no_network!();
|
|
|
|
let server = start_mock_server().await;
|
|
let sse = responses::sse(vec![
|
|
responses::ev_response_created("resp-1"),
|
|
responses::ev_completed("resp-1"),
|
|
]);
|
|
let resp_mock = responses::mount_sse_once(&server, sse).await;
|
|
|
|
let mut builder = test_codex()
|
|
.with_model("gpt-5-codex")
|
|
.with_config(|config| {
|
|
config.features.enable(Feature::WebSearchRequest);
|
|
config
|
|
.web_search_mode
|
|
.set(WebSearchMode::Cached)
|
|
.expect("test web_search_mode should satisfy constraints");
|
|
});
|
|
let test = builder
|
|
.build(&server)
|
|
.await
|
|
.expect("create test Codex conversation");
|
|
|
|
test.submit_turn_with_policy(
|
|
"hello cached+live flags",
|
|
SandboxPolicy::new_read_only_policy(),
|
|
)
|
|
.await
|
|
.expect("submit turn");
|
|
|
|
let body = resp_mock.single_request().body_json();
|
|
let tool = find_web_search_tool(&body);
|
|
assert_eq!(
|
|
tool.get("external_web_access").and_then(Value::as_bool),
|
|
Some(false),
|
|
"web_search mode should win over legacy web_search_request"
|
|
);
|
|
}
|
|
|
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
|
async fn web_search_mode_defaults_to_cached_when_features_disabled() {
|
|
skip_if_no_network!();
|
|
|
|
let server = start_mock_server().await;
|
|
let sse = responses::sse(vec![
|
|
responses::ev_response_created("resp-1"),
|
|
responses::ev_completed("resp-1"),
|
|
]);
|
|
let resp_mock = responses::mount_sse_once(&server, sse).await;
|
|
|
|
let mut builder = test_codex()
|
|
.with_model("gpt-5-codex")
|
|
.with_config(|config| {
|
|
config
|
|
.web_search_mode
|
|
.set(WebSearchMode::Cached)
|
|
.expect("test web_search_mode should satisfy constraints");
|
|
config.features.disable(Feature::WebSearchCached);
|
|
config.features.disable(Feature::WebSearchRequest);
|
|
});
|
|
let test = builder
|
|
.build(&server)
|
|
.await
|
|
.expect("create test Codex conversation");
|
|
|
|
test.submit_turn_with_policy(
|
|
"hello default cached web search",
|
|
SandboxPolicy::new_read_only_policy(),
|
|
)
|
|
.await
|
|
.expect("submit turn");
|
|
|
|
let body = resp_mock.single_request().body_json();
|
|
let tool = find_web_search_tool(&body);
|
|
assert_eq!(
|
|
tool.get("external_web_access").and_then(Value::as_bool),
|
|
Some(false),
|
|
"default web_search should be cached when unset"
|
|
);
|
|
}
|
|
|
|
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
|
async fn web_search_mode_updates_between_turns_with_sandbox_policy() {
|
|
skip_if_no_network!();
|
|
|
|
let server = start_mock_server().await;
|
|
let resp_mock = responses::mount_sse_sequence(
|
|
&server,
|
|
vec![
|
|
responses::sse(vec![
|
|
responses::ev_response_created("resp-1"),
|
|
responses::ev_completed("resp-1"),
|
|
]),
|
|
responses::sse(vec![
|
|
responses::ev_response_created("resp-2"),
|
|
responses::ev_completed("resp-2"),
|
|
]),
|
|
],
|
|
)
|
|
.await;
|
|
|
|
let mut builder = test_codex()
|
|
.with_model("gpt-5-codex")
|
|
.with_config(|config| {
|
|
config
|
|
.web_search_mode
|
|
.set(WebSearchMode::Cached)
|
|
.expect("test web_search_mode should satisfy constraints");
|
|
config.features.disable(Feature::WebSearchCached);
|
|
config.features.disable(Feature::WebSearchRequest);
|
|
});
|
|
let test = builder
|
|
.build(&server)
|
|
.await
|
|
.expect("create test Codex conversation");
|
|
|
|
test.submit_turn_with_policy("hello cached", SandboxPolicy::new_read_only_policy())
|
|
.await
|
|
.expect("submit first turn");
|
|
test.submit_turn_with_policy("hello live", SandboxPolicy::DangerFullAccess)
|
|
.await
|
|
.expect("submit second turn");
|
|
|
|
let requests = resp_mock.requests();
|
|
assert_eq!(requests.len(), 2, "expected two response requests");
|
|
|
|
let first_body = requests[0].body_json();
|
|
let first_tool = find_web_search_tool(&first_body);
|
|
assert_eq!(
|
|
first_tool
|
|
.get("external_web_access")
|
|
.and_then(Value::as_bool),
|
|
Some(false),
|
|
"read-only policy should default web_search to cached"
|
|
);
|
|
|
|
let second_body = requests[1].body_json();
|
|
let second_tool = find_web_search_tool(&second_body);
|
|
assert_eq!(
|
|
second_tool
|
|
.get("external_web_access")
|
|
.and_then(Value::as_bool),
|
|
Some(true),
|
|
"danger-full-access policy should default web_search to live"
|
|
);
|
|
}
|