mirror of
https://github.com/openai/codex.git
synced 2026-02-03 23:43:39 +00:00
Compare commits
64 Commits
codex-work
...
feat-sessi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
21f259ced0 | ||
|
|
24f027b4ab | ||
|
|
53056a4f8d | ||
|
|
d7bcca0de7 | ||
|
|
d989ad2ade | ||
|
|
32ad28b660 | ||
|
|
5a32ef8cd5 | ||
|
|
d17ac501eb | ||
|
|
50cae2152c | ||
|
|
4be0d3f71a | ||
|
|
33f8ea4ec2 | ||
|
|
560895eab7 | ||
|
|
8a231bea77 | ||
|
|
a894ef8eb0 | ||
|
|
8bdbbfa8d0 | ||
|
|
b7a2e470bb | ||
|
|
5777eccddc | ||
|
|
b83ab19bc8 | ||
|
|
71613fdfdd | ||
|
|
bbc8af7a15 | ||
|
|
c15a13a3ea | ||
|
|
5a40f3bfd3 | ||
|
|
33946d2612 | ||
|
|
e777444283 | ||
|
|
666f82588a | ||
|
|
73680a7b4b | ||
|
|
4b9d4de64b | ||
|
|
86f96cfa2f | ||
|
|
66d4b8a950 | ||
|
|
175feb3048 | ||
|
|
4e508c4453 | ||
|
|
c36be90c87 | ||
|
|
e44766d3ac | ||
|
|
2fc5d026b8 | ||
|
|
d9543bf5f4 | ||
|
|
b3a4bae560 | ||
|
|
30db43f067 | ||
|
|
6c21aec303 | ||
|
|
209335b780 | ||
|
|
c563d2d9d1 | ||
|
|
b5ff43f45f | ||
|
|
2b72cde474 | ||
|
|
df5207dcb3 | ||
|
|
60b73acf3c | ||
|
|
d41cc94e04 | ||
|
|
a6ca72561c | ||
|
|
64cc979687 | ||
|
|
e0911d0ed3 | ||
|
|
e27ea34dda | ||
|
|
121f081d43 | ||
|
|
5acb68d6c7 | ||
|
|
7b26f3539e | ||
|
|
024f49e63e | ||
|
|
9eff9b835e | ||
|
|
37a287a3f0 | ||
|
|
4190aa3a04 | ||
|
|
b267af326d | ||
|
|
c479bff448 | ||
|
|
231103ba3b | ||
|
|
061b4d900d | ||
|
|
edc4f8a2d0 | ||
|
|
ffa0486a41 | ||
|
|
62ea1d6940 | ||
|
|
bdd376dfc9 |
4
.github/codex/labels/codex-rust-review.md
vendored
4
.github/codex/labels/codex-rust-review.md
vendored
@@ -15,10 +15,10 @@ Things to look out for when doing the review:
|
||||
|
||||
## Code Organization
|
||||
|
||||
- Each create in the Cargo workspace in `codex-rs` has a specific purpose: make a note if you believe new code is not introduced in the correct crate.
|
||||
- Each crate in the Cargo workspace in `codex-rs` has a specific purpose: make a note if you believe new code is not introduced in the correct crate.
|
||||
- When possible, try to keep the `core` crate as small as possible. Non-core but shared logic is often a good candidate for `codex-rs/common`.
|
||||
- Be wary of large files and offer suggestions for how to break things into more reasonably-sized files.
|
||||
- Rust files should generally be organized such that the public parts of the API appear near the top of the file and helper functions go below. This is analagous to the "inverted pyramid" structure that is favored in journalism.
|
||||
- Rust files should generally be organized such that the public parts of the API appear near the top of the file and helper functions go below. This is analogous to the "inverted pyramid" structure that is favored in journalism.
|
||||
|
||||
## Assertions in Tests
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<p align="center"><code>npm i -g @openai/codex</code><br />or <code>brew install --cask codex</code></p>
|
||||
<p align="center"><strong>Codex CLI</strong> is a coding agent from OpenAI that runs locally on your computer.
|
||||
<p align="center">
|
||||
<img src="./.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
<img src="https://github.com/openai/codex/blob/main/.github/codex-cli-splash.png" alt="Codex CLI splash" width="80%" />
|
||||
</p>
|
||||
</br>
|
||||
If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE.</a>
|
||||
|
||||
@@ -14,4 +14,4 @@ target_app = "cli"
|
||||
[[announcements]]
|
||||
content = "This is a test announcement"
|
||||
version_regex = "^0\\.0\\.0$"
|
||||
to_date = "2026-01-10"
|
||||
to_date = "2026-05-10"
|
||||
|
||||
18
codex-rs/Cargo.lock
generated
18
codex-rs/Cargo.lock
generated
@@ -1088,6 +1088,7 @@ dependencies = [
|
||||
"codex-arg0",
|
||||
"codex-backend-client",
|
||||
"codex-chatgpt",
|
||||
"codex-cloud-requirements",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-execpolicy",
|
||||
@@ -1400,6 +1401,7 @@ dependencies = [
|
||||
"codex-state",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
"codex-utils-home-dir",
|
||||
"codex-utils-pty",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-string",
|
||||
@@ -1407,7 +1409,6 @@ dependencies = [
|
||||
"core-foundation 0.9.4",
|
||||
"core_test_support",
|
||||
"ctor 0.6.3",
|
||||
"dirs",
|
||||
"dunce",
|
||||
"encoding_rs",
|
||||
"env-flags",
|
||||
@@ -1426,6 +1427,7 @@ dependencies = [
|
||||
"multimap",
|
||||
"once_cell",
|
||||
"openssl-sys",
|
||||
"opentelemetry_sdk",
|
||||
"os_info",
|
||||
"predicates",
|
||||
"pretty_assertions",
|
||||
@@ -1450,6 +1452,7 @@ dependencies = [
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
"tokio-util",
|
||||
"toml 0.9.5",
|
||||
"toml_edit 0.24.0+spec-1.1.0",
|
||||
@@ -1487,6 +1490,7 @@ dependencies = [
|
||||
"assert_cmd",
|
||||
"clap",
|
||||
"codex-arg0",
|
||||
"codex-cloud-requirements",
|
||||
"codex-common",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
@@ -1776,6 +1780,7 @@ dependencies = [
|
||||
"strum_macros 0.27.2",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tokio-tungstenite",
|
||||
"tracing",
|
||||
"tracing-opentelemetry",
|
||||
"tracing-subscriber",
|
||||
@@ -1842,7 +1847,7 @@ dependencies = [
|
||||
"codex-keyring-store",
|
||||
"codex-protocol",
|
||||
"codex-utils-cargo-bin",
|
||||
"dirs",
|
||||
"codex-utils-home-dir",
|
||||
"futures",
|
||||
"keyring",
|
||||
"mcp-types",
|
||||
@@ -2007,6 +2012,15 @@ dependencies = [
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-home-dir"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"dirs",
|
||||
"pretty_assertions",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-image"
|
||||
version = "0.0.0"
|
||||
|
||||
@@ -43,6 +43,7 @@ members = [
|
||||
"utils/cache",
|
||||
"utils/image",
|
||||
"utils/json-to-toml",
|
||||
"utils/home-dir",
|
||||
"utils/pty",
|
||||
"utils/readiness",
|
||||
"utils/string",
|
||||
@@ -102,6 +103,7 @@ codex-utils-cache = { path = "utils/cache" }
|
||||
codex-utils-cargo-bin = { path = "utils/cargo-bin" }
|
||||
codex-utils-image = { path = "utils/image" }
|
||||
codex-utils-json-to-toml = { path = "utils/json-to-toml" }
|
||||
codex-utils-home-dir = { path = "utils/home-dir" }
|
||||
codex-utils-pty = { path = "utils/pty" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
|
||||
@@ -497,6 +497,14 @@ pub struct ConfigReadResponse {
|
||||
pub struct ConfigRequirements {
|
||||
pub allowed_approval_policies: Option<Vec<AskForApproval>>,
|
||||
pub allowed_sandbox_modes: Option<Vec<SandboxMode>>,
|
||||
pub enforce_residency: Option<ResidencyRequirement>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
pub enum ResidencyRequirement {
|
||||
Us,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -2113,7 +2121,11 @@ pub enum ThreadItem {
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
WebSearch { id: String, query: String },
|
||||
WebSearch {
|
||||
id: String,
|
||||
query: String,
|
||||
action: Option<WebSearchAction>,
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
ImageView { id: String, path: String },
|
||||
@@ -2128,6 +2140,42 @@ pub enum ThreadItem {
|
||||
ContextCompaction { id: String },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(tag = "type", rename_all = "camelCase")]
|
||||
#[ts(tag = "type", rename_all = "camelCase")]
|
||||
pub enum WebSearchAction {
|
||||
Search {
|
||||
query: Option<String>,
|
||||
queries: Option<Vec<String>>,
|
||||
},
|
||||
OpenPage {
|
||||
url: Option<String>,
|
||||
},
|
||||
FindInPage {
|
||||
url: Option<String>,
|
||||
pattern: Option<String>,
|
||||
},
|
||||
#[serde(other)]
|
||||
Other,
|
||||
}
|
||||
|
||||
impl From<codex_protocol::models::WebSearchAction> for WebSearchAction {
|
||||
fn from(value: codex_protocol::models::WebSearchAction) -> Self {
|
||||
match value {
|
||||
codex_protocol::models::WebSearchAction::Search { query, queries } => {
|
||||
WebSearchAction::Search { query, queries }
|
||||
}
|
||||
codex_protocol::models::WebSearchAction::OpenPage { url } => {
|
||||
WebSearchAction::OpenPage { url }
|
||||
}
|
||||
codex_protocol::models::WebSearchAction::FindInPage { url, pattern } => {
|
||||
WebSearchAction::FindInPage { url, pattern }
|
||||
}
|
||||
codex_protocol::models::WebSearchAction::Other => WebSearchAction::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CoreTurnItem> for ThreadItem {
|
||||
fn from(value: CoreTurnItem) -> Self {
|
||||
match value {
|
||||
@@ -2157,6 +2205,7 @@ impl From<CoreTurnItem> for ThreadItem {
|
||||
CoreTurnItem::WebSearch(search) => ThreadItem::WebSearch {
|
||||
id: search.id,
|
||||
query: search.query,
|
||||
action: Some(WebSearchAction::from(search.action)),
|
||||
},
|
||||
CoreTurnItem::ContextCompaction(compaction) => {
|
||||
ThreadItem::ContextCompaction { id: compaction.id }
|
||||
@@ -2810,7 +2859,7 @@ mod tests {
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::items::WebSearchItem;
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
use codex_protocol::models::WebSearchAction as CoreWebSearchAction;
|
||||
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::user_input::UserInput as CoreUserInput;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -2926,8 +2975,9 @@ mod tests {
|
||||
let search_item = TurnItem::WebSearch(WebSearchItem {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
action: CoreWebSearchAction::Search {
|
||||
query: Some("docs".to_string()),
|
||||
queries: None,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -2936,6 +2986,10 @@ mod tests {
|
||||
ThreadItem::WebSearch {
|
||||
id: "search-1".to_string(),
|
||||
query: "docs".to_string(),
|
||||
action: Some(WebSearchAction::Search {
|
||||
query: Some("docs".to_string()),
|
||||
queries: None,
|
||||
}),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-cloud-requirements = { workspace = true }
|
||||
codex-common = { workspace = true, features = ["cli"] }
|
||||
codex-core = { workspace = true }
|
||||
codex-backend-client = { workspace = true }
|
||||
|
||||
@@ -103,7 +103,7 @@ Example (from OpenAI's official VSCode extension):
|
||||
- `config/read` — fetch the effective config on disk after resolving config layering.
|
||||
- `config/value/write` — write a single config key/value to the user's config.toml on disk.
|
||||
- `config/batchWrite` — apply multiple config edits atomically to the user's config.toml on disk.
|
||||
- `configRequirements/read` — fetch the loaded requirements allow-lists from `requirements.toml` and/or MDM (or `null` if none are configured).
|
||||
- `configRequirements/read` — fetch the loaded requirements allow-lists and `enforceResidency` from `requirements.toml` and/or MDM (or `null` if none are configured).
|
||||
|
||||
### Example: Start or resume a thread
|
||||
|
||||
@@ -450,7 +450,7 @@ Today both notifications carry an empty `items` array even when item events were
|
||||
- `fileChange` — `{id, changes, status}` describing proposed edits; `changes` list `{path, kind, diff}` and `status` is `inProgress`, `completed`, `failed`, or `declined`.
|
||||
- `mcpToolCall` — `{id, server, tool, status, arguments, result?, error?}` describing MCP calls; `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `collabToolCall` — `{id, tool, status, senderThreadId, receiverThreadId?, newThreadId?, prompt?, agentStatus?}` describing collab tool calls (`spawn_agent`, `send_input`, `wait`, `close_agent`); `status` is `inProgress`, `completed`, or `failed`.
|
||||
- `webSearch` — `{id, query}` for a web search request issued by the agent.
|
||||
- `webSearch` — `{id, query, action?}` for a web search request issued by the agent; `action` mirrors the Responses API web_search action payload (`search`, `open_page`, `find_in_page`) and may be omitted until completion.
|
||||
- `imageView` — `{id, path}` emitted when the agent invokes the image viewer tool.
|
||||
- `enteredReviewMode` — `{id, review}` sent when the reviewer starts; `review` is a short user-facing label such as `"current changes"` or the requested target description.
|
||||
- `exitedReviewMode` — `{id, review}` emitted when the reviewer finishes; `review` is the full plain-text review (usually, overall notes plus bullet point findings).
|
||||
|
||||
@@ -152,6 +152,7 @@ use codex_core::config::ConfigService;
|
||||
use codex_core::config::edit::ConfigEdit;
|
||||
use codex_core::config::edit::ConfigEditsBuilder;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::error::CodexErr;
|
||||
use codex_core::exec::ExecParams;
|
||||
@@ -201,6 +202,8 @@ use codex_utils_json_to_toml::json_to_toml;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::FileTimes;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::Error as IoError;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
@@ -208,6 +211,7 @@ use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
use std::time::SystemTime;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::sync::oneshot;
|
||||
@@ -263,6 +267,7 @@ pub(crate) struct CodexMessageProcessor {
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
conversation_listeners: HashMap<Uuid, oneshot::Sender<()>>,
|
||||
listener_thread_ids_by_subscription: HashMap<Uuid, ThreadId>,
|
||||
active_login: Arc<Mutex<Option<ActiveLogin>>>,
|
||||
@@ -281,6 +286,17 @@ pub(crate) enum ApiVersion {
|
||||
V2,
|
||||
}
|
||||
|
||||
pub(crate) struct CodexMessageProcessorArgs {
|
||||
pub(crate) auth_manager: Arc<AuthManager>,
|
||||
pub(crate) thread_manager: Arc<ThreadManager>,
|
||||
pub(crate) outgoing: Arc<OutgoingMessageSender>,
|
||||
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub(crate) config: Arc<Config>,
|
||||
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
|
||||
pub(crate) cloud_requirements: CloudRequirementsLoader,
|
||||
pub(crate) feedback: CodexFeedback,
|
||||
}
|
||||
|
||||
impl CodexMessageProcessor {
|
||||
async fn load_thread(
|
||||
&self,
|
||||
@@ -305,15 +321,17 @@ impl CodexMessageProcessor {
|
||||
|
||||
Ok((thread_id, thread))
|
||||
}
|
||||
pub fn new(
|
||||
auth_manager: Arc<AuthManager>,
|
||||
thread_manager: Arc<ThreadManager>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
feedback: CodexFeedback,
|
||||
) -> Self {
|
||||
pub fn new(args: CodexMessageProcessorArgs) -> Self {
|
||||
let CodexMessageProcessorArgs {
|
||||
auth_manager,
|
||||
thread_manager,
|
||||
outgoing,
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
cli_overrides,
|
||||
cloud_requirements,
|
||||
feedback,
|
||||
} = args;
|
||||
Self {
|
||||
auth_manager,
|
||||
thread_manager,
|
||||
@@ -321,6 +339,7 @@ impl CodexMessageProcessor {
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
cli_overrides,
|
||||
cloud_requirements,
|
||||
conversation_listeners: HashMap::new(),
|
||||
listener_thread_ids_by_subscription: HashMap::new(),
|
||||
active_login: Arc::new(Mutex::new(None)),
|
||||
@@ -333,7 +352,10 @@ impl CodexMessageProcessor {
|
||||
}
|
||||
|
||||
async fn load_latest_config(&self) -> Result<Config, JSONRPCErrorError> {
|
||||
Config::load_with_cli_overrides(self.cli_overrides.clone())
|
||||
codex_core::config::ConfigBuilder::default()
|
||||
.cli_overrides(self.cli_overrides.clone())
|
||||
.cloud_requirements(self.cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
@@ -1519,6 +1541,7 @@ impl CodexMessageProcessor {
|
||||
&self.cli_overrides,
|
||||
Some(request_overrides),
|
||||
typesafe_overrides,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -1603,6 +1626,7 @@ impl CodexMessageProcessor {
|
||||
&self.cli_overrides,
|
||||
config,
|
||||
typesafe_overrides,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -1749,7 +1773,7 @@ impl CodexMessageProcessor {
|
||||
codex_linux_sandbox_exe: self.codex_linux_sandbox_exe.clone(),
|
||||
base_instructions,
|
||||
developer_instructions,
|
||||
model_personality: personality,
|
||||
personality,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
@@ -1957,6 +1981,28 @@ impl CodexMessageProcessor {
|
||||
message: format!("failed to unarchive thread: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
tokio::task::spawn_blocking({
|
||||
let restored_path = restored_path.clone();
|
||||
move || -> std::io::Result<()> {
|
||||
let times = FileTimes::new().set_modified(SystemTime::now());
|
||||
OpenOptions::new()
|
||||
.append(true)
|
||||
.open(&restored_path)?
|
||||
.set_times(times)?;
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to update unarchived thread timestamp: {err}"),
|
||||
data: None,
|
||||
})?
|
||||
.map_err(|err| JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: format!("failed to update unarchived thread timestamp: {err}"),
|
||||
data: None,
|
||||
})?;
|
||||
if let Some(ctx) = state_db_ctx {
|
||||
let _ = ctx
|
||||
.mark_unarchived(thread_id, restored_path.as_path())
|
||||
@@ -2350,6 +2396,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -2542,6 +2589,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -3336,6 +3384,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -3524,6 +3573,7 @@ impl CodexMessageProcessor {
|
||||
request_overrides,
|
||||
typesafe_overrides,
|
||||
history_cwd,
|
||||
&self.cloud_requirements,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -4794,6 +4844,7 @@ async fn derive_config_from_params(
|
||||
cli_overrides: &[(String, TomlValue)],
|
||||
request_overrides: Option<HashMap<String, serde_json::Value>>,
|
||||
typesafe_overrides: ConfigOverrides,
|
||||
cloud_requirements: &CloudRequirementsLoader,
|
||||
) -> std::io::Result<Config> {
|
||||
let merged_cli_overrides = cli_overrides
|
||||
.iter()
|
||||
@@ -4806,7 +4857,11 @@ async fn derive_config_from_params(
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Config::load_with_cli_overrides_and_harness_overrides(merged_cli_overrides, typesafe_overrides)
|
||||
codex_core::config::ConfigBuilder::default()
|
||||
.cli_overrides(merged_cli_overrides)
|
||||
.harness_overrides(typesafe_overrides)
|
||||
.cloud_requirements(cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -4815,6 +4870,7 @@ async fn derive_config_for_cwd(
|
||||
request_overrides: Option<HashMap<String, serde_json::Value>>,
|
||||
typesafe_overrides: ConfigOverrides,
|
||||
cwd: Option<PathBuf>,
|
||||
cloud_requirements: &CloudRequirementsLoader,
|
||||
) -> std::io::Result<Config> {
|
||||
let merged_cli_overrides = cli_overrides
|
||||
.iter()
|
||||
@@ -4831,6 +4887,7 @@ async fn derive_config_for_cwd(
|
||||
.cli_overrides(merged_cli_overrides)
|
||||
.harness_overrides(typesafe_overrides)
|
||||
.fallback_cwd(cwd)
|
||||
.cloud_requirements(cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -12,8 +12,10 @@ use codex_app_server_protocol::JSONRPCErrorError;
|
||||
use codex_app_server_protocol::SandboxMode;
|
||||
use codex_core::config::ConfigService;
|
||||
use codex_core::config::ConfigServiceError;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::ConfigRequirementsToml;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::config_loader::ResidencyRequirement as CoreResidencyRequirement;
|
||||
use codex_core::config_loader::SandboxModeRequirement as CoreSandboxModeRequirement;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
@@ -29,9 +31,15 @@ impl ConfigApi {
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
) -> Self {
|
||||
Self {
|
||||
service: ConfigService::new(codex_home, cli_overrides, loader_overrides),
|
||||
service: ConfigService::new(
|
||||
codex_home,
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,6 +92,9 @@ fn map_requirements_toml_to_api(requirements: ConfigRequirementsToml) -> ConfigR
|
||||
.filter_map(map_sandbox_mode_requirement_to_api)
|
||||
.collect()
|
||||
}),
|
||||
enforce_residency: requirements
|
||||
.enforce_residency
|
||||
.map(map_residency_requirement_to_api),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,6 +107,14 @@ fn map_sandbox_mode_requirement_to_api(mode: CoreSandboxModeRequirement) -> Opti
|
||||
}
|
||||
}
|
||||
|
||||
fn map_residency_requirement_to_api(
|
||||
residency: CoreResidencyRequirement,
|
||||
) -> codex_app_server_protocol::ResidencyRequirement {
|
||||
match residency {
|
||||
CoreResidencyRequirement::Us => codex_app_server_protocol::ResidencyRequirement::Us,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_error(err: ConfigServiceError) -> JSONRPCErrorError {
|
||||
if let Some(code) = err.write_error_code() {
|
||||
return config_write_error(code, err.to_string());
|
||||
@@ -137,6 +156,7 @@ mod tests {
|
||||
]),
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: Some(CoreResidencyRequirement::Us),
|
||||
};
|
||||
|
||||
let mapped = map_requirements_toml_to_api(requirements);
|
||||
@@ -152,5 +172,9 @@ mod tests {
|
||||
mapped.allowed_sandbox_modes,
|
||||
Some(vec![SandboxMode::ReadOnly]),
|
||||
);
|
||||
assert_eq!(
|
||||
mapped.enforce_residency,
|
||||
Some(codex_app_server_protocol::ResidencyRequirement::Us),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
use std::num::NonZero;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use codex_app_server_protocol::FuzzyFileSearchResult;
|
||||
use codex_file_search as file_search;
|
||||
use tokio::task::JoinSet;
|
||||
use tracing::warn;
|
||||
|
||||
const LIMIT_PER_ROOT: usize = 50;
|
||||
const MATCH_LIMIT: usize = 50;
|
||||
const MAX_THREADS: usize = 12;
|
||||
const COMPUTE_INDICES: bool = true;
|
||||
|
||||
pub(crate) async fn run_fuzzy_file_search(
|
||||
query: String,
|
||||
@@ -23,64 +20,54 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
}
|
||||
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit_per_root =
|
||||
NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize");
|
||||
let limit = NonZero::new(MATCH_LIMIT).expect("MATCH_LIMIT should be a valid non-zero usize");
|
||||
|
||||
let cores = std::thread::available_parallelism()
|
||||
.map(std::num::NonZero::get)
|
||||
.unwrap_or(1);
|
||||
let threads = cores.min(MAX_THREADS);
|
||||
let threads_per_root = (threads / roots.len()).max(1);
|
||||
let threads = NonZero::new(threads_per_root).unwrap_or(NonZeroUsize::MIN);
|
||||
#[expect(clippy::expect_used)]
|
||||
let threads = NonZero::new(threads.max(1)).expect("threads should be non-zero");
|
||||
let search_dirs: Vec<PathBuf> = roots.iter().map(PathBuf::from).collect();
|
||||
|
||||
let mut files: Vec<FuzzyFileSearchResult> = Vec::new();
|
||||
let mut join_set = JoinSet::new();
|
||||
|
||||
for root in roots {
|
||||
let search_dir = PathBuf::from(&root);
|
||||
let query = query.clone();
|
||||
let cancel_flag = cancellation_flag.clone();
|
||||
join_set.spawn_blocking(move || {
|
||||
match file_search::run(
|
||||
query.as_str(),
|
||||
limit_per_root,
|
||||
&search_dir,
|
||||
Vec::new(),
|
||||
let mut files = match tokio::task::spawn_blocking(move || {
|
||||
file_search::run(
|
||||
query.as_str(),
|
||||
search_dirs,
|
||||
file_search::FileSearchOptions {
|
||||
limit,
|
||||
threads,
|
||||
cancel_flag,
|
||||
COMPUTE_INDICES,
|
||||
true,
|
||||
) {
|
||||
Ok(res) => Ok((root, res)),
|
||||
Err(err) => Err((root, err)),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(res) = join_set.join_next().await {
|
||||
match res {
|
||||
Ok(Ok((root, res))) => {
|
||||
for m in res.matches {
|
||||
let path = m.path;
|
||||
let file_name = file_search::file_name_from_path(&path);
|
||||
let result = FuzzyFileSearchResult {
|
||||
root: root.clone(),
|
||||
path,
|
||||
file_name,
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
};
|
||||
files.push(result);
|
||||
compute_indices: true,
|
||||
..Default::default()
|
||||
},
|
||||
Some(cancellation_flag),
|
||||
)
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(Ok(res)) => res
|
||||
.matches
|
||||
.into_iter()
|
||||
.map(|m| {
|
||||
let file_name = m.path.file_name().unwrap_or_default();
|
||||
FuzzyFileSearchResult {
|
||||
root: m.root.to_string_lossy().to_string(),
|
||||
path: m.path.to_string_lossy().to_string(),
|
||||
file_name: file_name.to_string_lossy().to_string(),
|
||||
score: m.score,
|
||||
indices: m.indices,
|
||||
}
|
||||
}
|
||||
Ok(Err((root, err))) => {
|
||||
warn!("fuzzy-file-search in dir '{root}' failed: {err}");
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("fuzzy-file-search join_next failed: {err}");
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
Ok(Err(err)) => {
|
||||
warn!("fuzzy-file-search failed: {err}");
|
||||
Vec::new()
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("fuzzy-file-search join failed: {err}");
|
||||
Vec::new()
|
||||
}
|
||||
};
|
||||
|
||||
files.sort_by(file_search::cmp_by_score_desc_then_path_asc::<
|
||||
FuzzyFileSearchResult,
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
use codex_cloud_requirements::cloud_requirements_loader;
|
||||
use codex_common::CliConfigOverrides;
|
||||
use codex_core::AuthManager;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::ConfigLayerStackOrdering;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use std::io::ErrorKind;
|
||||
@@ -10,6 +13,7 @@ use std::io::Result as IoResult;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::message_processor::MessageProcessor;
|
||||
use crate::message_processor::MessageProcessorArgs;
|
||||
use crate::outgoing_message::OutgoingMessage;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
use codex_app_server_protocol::ConfigLayerSource;
|
||||
@@ -204,11 +208,49 @@ pub async fn run_main(
|
||||
format!("error parsing -c overrides: {e}"),
|
||||
)
|
||||
})?;
|
||||
let cloud_requirements = match ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides.clone())
|
||||
.loader_overrides(loader_overrides.clone())
|
||||
.build()
|
||||
.await
|
||||
{
|
||||
Ok(config) => {
|
||||
let effective_toml = config.config_layer_stack.effective_config();
|
||||
match effective_toml.try_into() {
|
||||
Ok(config_toml) => {
|
||||
if let Err(err) = codex_core::personality_migration::maybe_migrate_personality(
|
||||
&config.codex_home,
|
||||
&config_toml,
|
||||
)
|
||||
.await
|
||||
{
|
||||
warn!(error = %err, "Failed to run personality migration");
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(error = %err, "Failed to deserialize config for personality migration");
|
||||
}
|
||||
}
|
||||
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
false,
|
||||
config.cli_auth_credentials_store_mode,
|
||||
);
|
||||
cloud_requirements_loader(auth_manager, config.chatgpt_base_url)
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(error = %err, "Failed to preload config for cloud requirements");
|
||||
// TODO(gt): Make cloud requirements preload failures blocking once we can fail-closed.
|
||||
CloudRequirementsLoader::default()
|
||||
}
|
||||
};
|
||||
let loader_overrides_for_config_api = loader_overrides.clone();
|
||||
let mut config_warnings = Vec::new();
|
||||
let config = match ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides.clone())
|
||||
.loader_overrides(loader_overrides)
|
||||
.cloud_requirements(cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
{
|
||||
@@ -290,15 +332,16 @@ pub async fn run_main(
|
||||
let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx);
|
||||
let cli_overrides: Vec<(String, TomlValue)> = cli_kv_overrides.clone();
|
||||
let loader_overrides = loader_overrides_for_config_api;
|
||||
let mut processor = MessageProcessor::new(
|
||||
outgoing_message_sender,
|
||||
let mut processor = MessageProcessor::new(MessageProcessorArgs {
|
||||
outgoing: outgoing_message_sender,
|
||||
codex_linux_sandbox_exe,
|
||||
std::sync::Arc::new(config),
|
||||
config: std::sync::Arc::new(config),
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
feedback.clone(),
|
||||
cloud_requirements: cloud_requirements.clone(),
|
||||
feedback: feedback.clone(),
|
||||
config_warnings,
|
||||
);
|
||||
});
|
||||
let mut thread_created_rx = processor.thread_created_receiver();
|
||||
async move {
|
||||
let mut listen_for_threads = true;
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::codex_message_processor::CodexMessageProcessor;
|
||||
use crate::codex_message_processor::CodexMessageProcessorArgs;
|
||||
use crate::config_api::ConfigApi;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
@@ -31,10 +32,12 @@ use codex_core::auth::ExternalAuthRefreshReason;
|
||||
use codex_core::auth::ExternalAuthRefresher;
|
||||
use codex_core::auth::ExternalAuthTokens;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config_loader::CloudRequirementsLoader;
|
||||
use codex_core::config_loader::LoaderOverrides;
|
||||
use codex_core::default_client::SetOriginatorError;
|
||||
use codex_core::default_client::USER_AGENT_SUFFIX;
|
||||
use codex_core::default_client::get_codex_user_agent;
|
||||
use codex_core::default_client::set_default_client_residency_requirement;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use codex_feedback::CodexFeedback;
|
||||
use codex_protocol::ThreadId;
|
||||
@@ -102,22 +105,36 @@ pub(crate) struct MessageProcessor {
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
codex_message_processor: CodexMessageProcessor,
|
||||
config_api: ConfigApi,
|
||||
config: Arc<Config>,
|
||||
initialized: bool,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
}
|
||||
|
||||
pub(crate) struct MessageProcessorArgs {
|
||||
pub(crate) outgoing: OutgoingMessageSender,
|
||||
pub(crate) codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub(crate) config: Arc<Config>,
|
||||
pub(crate) cli_overrides: Vec<(String, TomlValue)>,
|
||||
pub(crate) loader_overrides: LoaderOverrides,
|
||||
pub(crate) cloud_requirements: CloudRequirementsLoader,
|
||||
pub(crate) feedback: CodexFeedback,
|
||||
pub(crate) config_warnings: Vec<ConfigWarningNotification>,
|
||||
}
|
||||
|
||||
impl MessageProcessor {
|
||||
/// Create a new `MessageProcessor`, retaining a handle to the outgoing
|
||||
/// `Sender` so handlers can enqueue messages to be written to stdout.
|
||||
pub(crate) fn new(
|
||||
outgoing: OutgoingMessageSender,
|
||||
codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
config: Arc<Config>,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
feedback: CodexFeedback,
|
||||
config_warnings: Vec<ConfigWarningNotification>,
|
||||
) -> Self {
|
||||
pub(crate) fn new(args: MessageProcessorArgs) -> Self {
|
||||
let MessageProcessorArgs {
|
||||
outgoing,
|
||||
codex_linux_sandbox_exe,
|
||||
config,
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
feedback,
|
||||
config_warnings,
|
||||
} = args;
|
||||
let outgoing = Arc::new(outgoing);
|
||||
let auth_manager = AuthManager::shared(
|
||||
config.codex_home.clone(),
|
||||
@@ -133,21 +150,28 @@ impl MessageProcessor {
|
||||
auth_manager.clone(),
|
||||
SessionSource::VSCode,
|
||||
));
|
||||
let codex_message_processor = CodexMessageProcessor::new(
|
||||
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {
|
||||
auth_manager,
|
||||
thread_manager,
|
||||
outgoing.clone(),
|
||||
outgoing: outgoing.clone(),
|
||||
codex_linux_sandbox_exe,
|
||||
Arc::clone(&config),
|
||||
cli_overrides.clone(),
|
||||
config: Arc::clone(&config),
|
||||
cli_overrides: cli_overrides.clone(),
|
||||
cloud_requirements: cloud_requirements.clone(),
|
||||
feedback,
|
||||
});
|
||||
let config_api = ConfigApi::new(
|
||||
config.codex_home.clone(),
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
);
|
||||
let config_api = ConfigApi::new(config.codex_home.clone(), cli_overrides, loader_overrides);
|
||||
|
||||
Self {
|
||||
outgoing,
|
||||
codex_message_processor,
|
||||
config_api,
|
||||
config,
|
||||
initialized: false,
|
||||
config_warnings,
|
||||
}
|
||||
@@ -220,6 +244,7 @@ impl MessageProcessor {
|
||||
}
|
||||
}
|
||||
}
|
||||
set_default_client_residency_requirement(self.config.enforce_residency.value());
|
||||
let user_agent_suffix = format!("{name}; {version}");
|
||||
if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() {
|
||||
*suffix = Some(user_agent_suffix);
|
||||
|
||||
@@ -402,7 +402,7 @@ async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id.clone(),
|
||||
model: Some("gpt-5.2-codex".to_string()),
|
||||
personality: Some(Personality::Friendly),
|
||||
personality: Some(Personality::Pragmatic),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -11,7 +11,11 @@ use codex_app_server_protocol::ThreadUnarchiveParams;
|
||||
use codex_app_server_protocol::ThreadUnarchiveResponse;
|
||||
use codex_core::find_archived_thread_path_by_id_str;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use std::fs::FileTimes;
|
||||
use std::fs::OpenOptions;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use std::time::SystemTime;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
@@ -62,6 +66,16 @@ async fn thread_unarchive_moves_rollout_back_into_sessions_directory() -> Result
|
||||
archived_path.exists(),
|
||||
"expected {archived_path_display} to exist"
|
||||
);
|
||||
let old_time = SystemTime::UNIX_EPOCH + Duration::from_secs(1);
|
||||
let old_timestamp = old_time
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.expect("old timestamp")
|
||||
.as_secs() as i64;
|
||||
let times = FileTimes::new().set_modified(old_time);
|
||||
OpenOptions::new()
|
||||
.append(true)
|
||||
.open(&archived_path)?
|
||||
.set_times(times)?;
|
||||
|
||||
let unarchive_id = mcp
|
||||
.send_thread_unarchive_request(ThreadUnarchiveParams {
|
||||
@@ -73,7 +87,13 @@ async fn thread_unarchive_moves_rollout_back_into_sessions_directory() -> Result
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(unarchive_id)),
|
||||
)
|
||||
.await??;
|
||||
let _: ThreadUnarchiveResponse = to_response::<ThreadUnarchiveResponse>(unarchive_resp)?;
|
||||
let ThreadUnarchiveResponse {
|
||||
thread: unarchived_thread,
|
||||
} = to_response::<ThreadUnarchiveResponse>(unarchive_resp)?;
|
||||
assert!(
|
||||
unarchived_thread.updated_at > old_timestamp,
|
||||
"expected updated_at to be bumped on unarchive"
|
||||
);
|
||||
|
||||
let rollout_path_display = rollout_path.display();
|
||||
assert!(
|
||||
|
||||
@@ -451,7 +451,7 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
personality: Some(Personality::Friendly),
|
||||
personality: Some(Personality::Pragmatic),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
@@ -484,6 +484,117 @@ async fn turn_start_accepts_personality_override_v2() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_change_personality_mid_thread_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let sse1 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let sse2 = responses::sse(vec![
|
||||
responses::ev_response_created("resp-2"),
|
||||
responses::ev_assistant_message("msg-2", "Done"),
|
||||
responses::ev_completed("resp-2"),
|
||||
]);
|
||||
let response_mock = responses::mount_sse_sequence(&server, vec![sse1, sse2]).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::from([(Feature::Personality, true)]),
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("exp-codex-personality".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
personality: None,
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let turn_req2 = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello again".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
personality: Some(Personality::Pragmatic),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp2: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req2)),
|
||||
)
|
||||
.await??;
|
||||
let _turn2: TurnStartResponse = to_response::<TurnStartResponse>(turn_resp2)?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let requests = response_mock.requests();
|
||||
assert_eq!(requests.len(), 2, "expected two requests");
|
||||
|
||||
let first_developer_texts = requests[0].message_input_texts("developer");
|
||||
assert!(
|
||||
first_developer_texts
|
||||
.iter()
|
||||
.all(|text| !text.contains("<personality_spec>")),
|
||||
"expected no personality update message in first request, got {first_developer_texts:?}"
|
||||
);
|
||||
|
||||
let second_developer_texts = requests[1].message_input_texts("developer");
|
||||
assert!(
|
||||
second_developer_texts
|
||||
.iter()
|
||||
.any(|text| text.contains("<personality_spec>")),
|
||||
"expected personality update message in second request, got {second_developer_texts:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_accepts_local_image_input() -> Result<()> {
|
||||
// Two Codex turns hit the mock model (session start + turn/start).
|
||||
|
||||
@@ -27,7 +27,7 @@ pub fn run_main() -> i32 {
|
||||
match std::io::stdin().read_to_string(&mut buf) {
|
||||
Ok(_) => {
|
||||
if buf.is_empty() {
|
||||
eprintln!("Usage: apply_patch 'PATCH'\n echo 'PATCH' | apply-patch");
|
||||
eprintln!("Usage: apply_patch 'PATCH'\n echo 'PATCH' | apply_patch");
|
||||
return 2;
|
||||
}
|
||||
buf
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
//! Cloud-hosted config requirements for Codex.
|
||||
//!
|
||||
//! This crate fetches `requirements.toml` data from the backend as an alternative to loading it
|
||||
//! from the local filesystem. It only applies to Enterprise ChatGPT customers.
|
||||
//! from the local filesystem. It only applies to Business (aka Enterprise CBP) or Enterprise ChatGPT
|
||||
//! customers.
|
||||
//!
|
||||
//! Today, fetching is best-effort: on error or timeout, Codex continues without cloud requirements.
|
||||
//! We expect to tighten this so that Enterprise ChatGPT customers must successfully fetch these
|
||||
@@ -19,7 +20,7 @@ use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
use tokio::time::timeout;
|
||||
|
||||
/// This blocks codecs startup, so must be short.
|
||||
/// This blocks codex startup, so must be short.
|
||||
const CLOUD_REQUIREMENTS_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
|
||||
#[async_trait]
|
||||
@@ -119,7 +120,12 @@ impl CloudRequirementsService {
|
||||
|
||||
async fn fetch(&self) -> Option<ConfigRequirementsToml> {
|
||||
let auth = self.auth_manager.auth().await?;
|
||||
if !(auth.is_chatgpt_auth() && auth.account_plan_type() == Some(PlanType::Enterprise)) {
|
||||
if !auth.is_chatgpt_auth()
|
||||
|| !matches!(
|
||||
auth.account_plan_type(),
|
||||
Some(PlanType::Business | PlanType::Enterprise)
|
||||
)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -269,10 +275,9 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_skips_non_enterprise_plan() {
|
||||
let auth_manager = auth_manager_with_plan("pro");
|
||||
async fn fetch_cloud_requirements_skips_non_business_or_enterprise_plan() {
|
||||
let service = CloudRequirementsService::new(
|
||||
auth_manager,
|
||||
auth_manager_with_plan("pro"),
|
||||
Arc::new(StaticFetcher { contents: None }),
|
||||
CLOUD_REQUIREMENTS_TIMEOUT,
|
||||
);
|
||||
@@ -280,6 +285,27 @@ mod tests {
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_allows_business_plan() {
|
||||
let service = CloudRequirementsService::new(
|
||||
auth_manager_with_plan("business"),
|
||||
Arc::new(StaticFetcher {
|
||||
contents: Some("allowed_approval_policies = [\"never\"]".to_string()),
|
||||
}),
|
||||
CLOUD_REQUIREMENTS_TIMEOUT,
|
||||
);
|
||||
assert_eq!(
|
||||
service.fetch().await,
|
||||
Some(ConfigRequirementsToml {
|
||||
allowed_approval_policies: Some(vec![AskForApproval::Never]),
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_cloud_requirements_handles_missing_contents() {
|
||||
let result = parse_for_fetch(None);
|
||||
@@ -315,6 +341,7 @@ mod tests {
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::error::ApiError;
|
||||
use crate::provider::Provider;
|
||||
use crate::sse::responses::ResponsesStreamEvent;
|
||||
use crate::sse::responses::process_responses_event;
|
||||
use crate::telemetry::WebsocketTelemetry;
|
||||
use codex_client::TransportError;
|
||||
use futures::SinkExt;
|
||||
use futures::StreamExt;
|
||||
@@ -18,6 +19,7 @@ use std::time::Duration;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::time::Instant;
|
||||
use tokio_tungstenite::MaybeTlsStream;
|
||||
use tokio_tungstenite::WebSocketStream;
|
||||
use tokio_tungstenite::tungstenite::Error as WsError;
|
||||
@@ -38,14 +40,21 @@ pub struct ResponsesWebsocketConnection {
|
||||
// TODO (pakrym): is this the right place for timeout?
|
||||
idle_timeout: Duration,
|
||||
server_reasoning_included: bool,
|
||||
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
|
||||
}
|
||||
|
||||
impl ResponsesWebsocketConnection {
|
||||
fn new(stream: WsStream, idle_timeout: Duration, server_reasoning_included: bool) -> Self {
|
||||
fn new(
|
||||
stream: WsStream,
|
||||
idle_timeout: Duration,
|
||||
server_reasoning_included: bool,
|
||||
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
stream: Arc::new(Mutex::new(Some(stream))),
|
||||
idle_timeout,
|
||||
server_reasoning_included,
|
||||
telemetry,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,6 +71,7 @@ impl ResponsesWebsocketConnection {
|
||||
let stream = Arc::clone(&self.stream);
|
||||
let idle_timeout = self.idle_timeout;
|
||||
let server_reasoning_included = self.server_reasoning_included;
|
||||
let telemetry = self.telemetry.clone();
|
||||
let request_body = serde_json::to_value(&request).map_err(|err| {
|
||||
ApiError::Stream(format!("failed to encode websocket request: {err}"))
|
||||
})?;
|
||||
@@ -87,6 +97,7 @@ impl ResponsesWebsocketConnection {
|
||||
tx_event.clone(),
|
||||
request_body,
|
||||
idle_timeout,
|
||||
telemetry,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -114,6 +125,7 @@ impl<A: AuthProvider> ResponsesWebsocketClient<A> {
|
||||
&self,
|
||||
extra_headers: HeaderMap,
|
||||
turn_state: Option<Arc<OnceLock<String>>>,
|
||||
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
|
||||
) -> Result<ResponsesWebsocketConnection, ApiError> {
|
||||
let ws_url = self
|
||||
.provider
|
||||
@@ -130,6 +142,7 @@ impl<A: AuthProvider> ResponsesWebsocketClient<A> {
|
||||
stream,
|
||||
self.provider.stream_idle_timeout,
|
||||
server_reasoning_included,
|
||||
telemetry,
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -218,6 +231,7 @@ async fn run_websocket_response_stream(
|
||||
tx_event: mpsc::Sender<std::result::Result<ResponseEvent, ApiError>>,
|
||||
request_body: Value,
|
||||
idle_timeout: Duration,
|
||||
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
|
||||
) -> Result<(), ApiError> {
|
||||
let request_text = match serde_json::to_string(&request_body) {
|
||||
Ok(text) => text,
|
||||
@@ -228,16 +242,26 @@ async fn run_websocket_response_stream(
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(err) = ws_stream.send(Message::Text(request_text.into())).await {
|
||||
return Err(ApiError::Stream(format!(
|
||||
"failed to send websocket request: {err}"
|
||||
)));
|
||||
let request_start = Instant::now();
|
||||
let result = ws_stream
|
||||
.send(Message::Text(request_text.into()))
|
||||
.await
|
||||
.map_err(|err| ApiError::Stream(format!("failed to send websocket request: {err}")));
|
||||
|
||||
if let Some(t) = telemetry.as_ref() {
|
||||
t.on_ws_request(request_start.elapsed(), result.as_ref().err());
|
||||
}
|
||||
|
||||
result?;
|
||||
|
||||
loop {
|
||||
let poll_start = Instant::now();
|
||||
let response = tokio::time::timeout(idle_timeout, ws_stream.next())
|
||||
.await
|
||||
.map_err(|_| ApiError::Stream("idle timeout waiting for websocket".into()));
|
||||
if let Some(t) = telemetry.as_ref() {
|
||||
t.on_ws_event(&response, poll_start.elapsed());
|
||||
}
|
||||
let message = match response {
|
||||
Ok(Some(Ok(msg))) => msg,
|
||||
Ok(Some(Err(err))) => {
|
||||
|
||||
@@ -33,9 +33,11 @@ pub use crate::endpoint::responses_websocket::ResponsesWebsocketConnection;
|
||||
pub use crate::error::ApiError;
|
||||
pub use crate::provider::Provider;
|
||||
pub use crate::provider::WireApi;
|
||||
pub use crate::provider::is_azure_responses_wire_base_url;
|
||||
pub use crate::requests::ChatRequest;
|
||||
pub use crate::requests::ChatRequestBuilder;
|
||||
pub use crate::requests::ResponsesRequest;
|
||||
pub use crate::requests::ResponsesRequestBuilder;
|
||||
pub use crate::sse::stream_from_fixture;
|
||||
pub use crate::telemetry::SseTelemetry;
|
||||
pub use crate::telemetry::WebsocketTelemetry;
|
||||
|
||||
@@ -95,16 +95,7 @@ impl Provider {
|
||||
}
|
||||
|
||||
pub fn is_azure_responses_endpoint(&self) -> bool {
|
||||
if self.wire != WireApi::Responses {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.name.eq_ignore_ascii_case("azure") {
|
||||
return true;
|
||||
}
|
||||
|
||||
self.base_url.to_ascii_lowercase().contains("openai.azure.")
|
||||
|| matches_azure_responses_base_url(&self.base_url)
|
||||
is_azure_responses_wire_base_url(self.wire.clone(), &self.name, Some(&self.base_url))
|
||||
}
|
||||
|
||||
pub fn websocket_url_for_path(&self, path: &str) -> Result<Url, url::ParseError> {
|
||||
@@ -121,6 +112,23 @@ impl Provider {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_azure_responses_wire_base_url(wire: WireApi, name: &str, base_url: Option<&str>) -> bool {
|
||||
if wire != WireApi::Responses {
|
||||
return false;
|
||||
}
|
||||
|
||||
if name.eq_ignore_ascii_case("azure") {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(base_url) = base_url else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let base = base_url.to_ascii_lowercase();
|
||||
base.contains("openai.azure.") || matches_azure_responses_base_url(&base)
|
||||
}
|
||||
|
||||
fn matches_azure_responses_base_url(base_url: &str) -> bool {
|
||||
const AZURE_MARKERS: [&str; 5] = [
|
||||
"cognitiveservices.azure.",
|
||||
@@ -129,6 +137,54 @@ fn matches_azure_responses_base_url(base_url: &str) -> bool {
|
||||
"azurefd.",
|
||||
"windows.net/openai",
|
||||
];
|
||||
let base = base_url.to_ascii_lowercase();
|
||||
AZURE_MARKERS.iter().any(|marker| base.contains(marker))
|
||||
AZURE_MARKERS.iter().any(|marker| base_url.contains(marker))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn detects_azure_responses_base_urls() {
|
||||
let positive_cases = [
|
||||
"https://foo.openai.azure.com/openai",
|
||||
"https://foo.openai.azure.us/openai/deployments/bar",
|
||||
"https://foo.cognitiveservices.azure.cn/openai",
|
||||
"https://foo.aoai.azure.com/openai",
|
||||
"https://foo.openai.azure-api.net/openai",
|
||||
"https://foo.z01.azurefd.net/",
|
||||
];
|
||||
|
||||
for base_url in positive_cases {
|
||||
assert!(
|
||||
is_azure_responses_wire_base_url(WireApi::Responses, "test", Some(base_url)),
|
||||
"expected {base_url} to be detected as Azure"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(is_azure_responses_wire_base_url(
|
||||
WireApi::Responses,
|
||||
"Azure",
|
||||
Some("https://example.com")
|
||||
));
|
||||
|
||||
let negative_cases = [
|
||||
"https://api.openai.com/v1",
|
||||
"https://example.com/openai",
|
||||
"https://myproxy.azurewebsites.net/openai",
|
||||
];
|
||||
|
||||
for base_url in negative_cases {
|
||||
assert!(
|
||||
!is_azure_responses_wire_base_url(WireApi::Responses, "test", Some(base_url)),
|
||||
"expected {base_url} not to be detected as Azure"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(!is_azure_responses_wire_base_url(
|
||||
WireApi::Chat,
|
||||
"Azure",
|
||||
Some("https://foo.openai.azure.com/openai")
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,6 +41,14 @@ pub fn parse_rate_limit(headers: &HeaderMap) -> Option<RateLimitSnapshot> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses the bespoke Codex rate-limit headers into a `RateLimitSnapshot`.
|
||||
pub fn parse_promo_message(headers: &HeaderMap) -> Option<String> {
|
||||
parse_header_str(headers, "x-codex-promo-message")
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(std::string::ToString::to_string)
|
||||
}
|
||||
|
||||
fn parse_rate_limit_window(
|
||||
headers: &HeaderMap,
|
||||
used_percent_header: &str,
|
||||
|
||||
@@ -157,7 +157,7 @@ struct ResponseCompletedOutputTokensDetails {
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct ResponsesStreamEvent {
|
||||
#[serde(rename = "type")]
|
||||
kind: String,
|
||||
pub(crate) kind: String,
|
||||
response: Option<Value>,
|
||||
item: Option<Value>,
|
||||
delta: Option<String>,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::error::ApiError;
|
||||
use codex_client::Request;
|
||||
use codex_client::RequestTelemetry;
|
||||
use codex_client::Response;
|
||||
@@ -10,6 +11,8 @@ use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::time::Instant;
|
||||
use tokio_tungstenite::tungstenite::Error;
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
|
||||
/// Generic telemetry.
|
||||
pub trait SseTelemetry: Send + Sync {
|
||||
@@ -28,6 +31,17 @@ pub trait SseTelemetry: Send + Sync {
|
||||
);
|
||||
}
|
||||
|
||||
/// Telemetry for Responses WebSocket transport.
|
||||
pub trait WebsocketTelemetry: Send + Sync {
|
||||
fn on_ws_request(&self, duration: Duration, error: Option<&ApiError>);
|
||||
|
||||
fn on_ws_event(
|
||||
&self,
|
||||
result: &Result<Option<Result<Message, Error>>, ApiError>,
|
||||
duration: Duration,
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) trait WithStatus {
|
||||
fn status(&self) -> StatusCode;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
name = "codex-core"
|
||||
version.workspace = true
|
||||
build = "build.rs"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@@ -39,11 +40,11 @@ codex-protocol = { workspace = true }
|
||||
codex-rmcp-client = { workspace = true }
|
||||
codex-state = { workspace = true }
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
codex-utils-home-dir = { workspace = true }
|
||||
codex-utils-pty = { workspace = true }
|
||||
codex-utils-readiness = { workspace = true }
|
||||
codex-utils-string = { workspace = true }
|
||||
codex-windows-sandbox = { package = "codex-windows-sandbox", path = "../windows-sandbox-rs" }
|
||||
dirs = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
encoding_rs = { workspace = true }
|
||||
env-flags = { workspace = true }
|
||||
@@ -90,6 +91,7 @@ tokio = { workspace = true, features = [
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = { workspace = true, features = ["rt"] }
|
||||
tokio-tungstenite = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
toml_edit = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
@@ -145,6 +147,10 @@ image = { workspace = true, features = ["jpeg", "png"] }
|
||||
maplit = { workspace = true }
|
||||
predicates = { workspace = true }
|
||||
pretty_assertions = { workspace = true }
|
||||
opentelemetry_sdk = { workspace = true, features = [
|
||||
"experimental_metrics_custom_reader",
|
||||
"metrics",
|
||||
] }
|
||||
serial_test = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
|
||||
27
codex-rs/core/build.rs
Normal file
27
codex-rs/core/build.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
let samples_dir = Path::new("src/skills/assets/samples");
|
||||
if !samples_dir.exists() {
|
||||
return;
|
||||
}
|
||||
|
||||
println!("cargo:rerun-if-changed={}", samples_dir.display());
|
||||
visit_dir(samples_dir);
|
||||
}
|
||||
|
||||
fn visit_dir(dir: &Path) {
|
||||
let entries = match fs::read_dir(dir) {
|
||||
Ok(entries) => entries,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
if path.is_dir() {
|
||||
visit_dir(&path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -208,6 +208,9 @@
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"runtime_metrics": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"shell_snapshot": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -258,9 +261,6 @@
|
||||
],
|
||||
"description": "Optional path to a file containing model instructions."
|
||||
},
|
||||
"model_personality": {
|
||||
"$ref": "#/definitions/Personality"
|
||||
},
|
||||
"model_provider": {
|
||||
"description": "The key in the `model_providers` map identifying the [`ModelProviderInfo`] to use.",
|
||||
"type": "string"
|
||||
@@ -277,6 +277,9 @@
|
||||
"oss_provider": {
|
||||
"type": "string"
|
||||
},
|
||||
"personality": {
|
||||
"$ref": "#/definitions/Personality"
|
||||
},
|
||||
"sandbox_mode": {
|
||||
"$ref": "#/definitions/SandboxMode"
|
||||
},
|
||||
@@ -1233,6 +1236,9 @@
|
||||
"responses_websockets": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"runtime_metrics": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"shell_snapshot": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -1370,14 +1376,6 @@
|
||||
],
|
||||
"description": "Optional path to a file containing model instructions that will override the built-in instructions for the selected model. Users are STRONGLY DISCOURAGED from using this field, as deviating from the instructions sanctioned by Codex will likely degrade model performance."
|
||||
},
|
||||
"model_personality": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Personality"
|
||||
}
|
||||
],
|
||||
"description": "EXPERIMENTAL Optionally specify a personality for the model"
|
||||
},
|
||||
"model_provider": {
|
||||
"description": "Provider to use from the model_providers map.",
|
||||
"type": "string"
|
||||
@@ -1436,6 +1434,14 @@
|
||||
],
|
||||
"description": "OTEL configuration."
|
||||
},
|
||||
"personality": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/Personality"
|
||||
}
|
||||
],
|
||||
"description": "Optionally specify a personality for the model"
|
||||
},
|
||||
"profile": {
|
||||
"description": "Profile to use from the `profiles` map.",
|
||||
"type": "string"
|
||||
|
||||
331
codex-rs/core/src/analytics_client.rs
Normal file
331
codex-rs/core/src/analytics_client.rs
Normal file
@@ -0,0 +1,331 @@
|
||||
use crate::AuthManager;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::create_client;
|
||||
use crate::git_info::collect_git_info;
|
||||
use crate::git_info::get_git_repo_root;
|
||||
use codex_protocol::protocol::SkillScope;
|
||||
use serde::Serialize;
|
||||
use sha1::Digest;
|
||||
use sha1::Sha1;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct TrackEventsContext {
|
||||
pub(crate) model_slug: String,
|
||||
pub(crate) thread_id: String,
|
||||
}
|
||||
|
||||
pub(crate) fn build_track_events_context(
|
||||
model_slug: String,
|
||||
thread_id: String,
|
||||
) -> TrackEventsContext {
|
||||
TrackEventsContext {
|
||||
model_slug,
|
||||
thread_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct SkillInvocation {
|
||||
pub(crate) skill_name: String,
|
||||
pub(crate) skill_scope: SkillScope,
|
||||
pub(crate) skill_path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct AnalyticsEventsQueue {
|
||||
sender: mpsc::Sender<TrackEventsJob>,
|
||||
}
|
||||
|
||||
pub(crate) struct AnalyticsEventsClient {
|
||||
queue: AnalyticsEventsQueue,
|
||||
config: Arc<Config>,
|
||||
}
|
||||
|
||||
impl AnalyticsEventsQueue {
|
||||
pub(crate) fn new(auth_manager: Arc<AuthManager>) -> Self {
|
||||
let (sender, mut receiver) = mpsc::channel(ANALYTICS_EVENTS_QUEUE_SIZE);
|
||||
tokio::spawn(async move {
|
||||
while let Some(job) = receiver.recv().await {
|
||||
send_track_skill_invocations(&auth_manager, job).await;
|
||||
}
|
||||
});
|
||||
Self { sender }
|
||||
}
|
||||
|
||||
fn try_send(&self, job: TrackEventsJob) {
|
||||
if self.sender.try_send(job).is_err() {
|
||||
//TODO: add a metric for this
|
||||
tracing::warn!("dropping skill analytics events: queue is full");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AnalyticsEventsClient {
|
||||
pub(crate) fn new(config: Arc<Config>, auth_manager: Arc<AuthManager>) -> Self {
|
||||
Self {
|
||||
queue: AnalyticsEventsQueue::new(Arc::clone(&auth_manager)),
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn track_skill_invocations(
|
||||
&self,
|
||||
tracking: TrackEventsContext,
|
||||
invocations: Vec<SkillInvocation>,
|
||||
) {
|
||||
track_skill_invocations(
|
||||
&self.queue,
|
||||
Arc::clone(&self.config),
|
||||
Some(tracking),
|
||||
invocations,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
struct TrackEventsJob {
|
||||
config: Arc<Config>,
|
||||
tracking: TrackEventsContext,
|
||||
invocations: Vec<SkillInvocation>,
|
||||
}
|
||||
|
||||
const ANALYTICS_EVENTS_QUEUE_SIZE: usize = 256;
|
||||
const ANALYTICS_EVENTS_TIMEOUT: Duration = Duration::from_secs(10);
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TrackEventsRequest {
|
||||
events: Vec<TrackEvent>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TrackEvent {
|
||||
event_type: &'static str,
|
||||
skill_id: String,
|
||||
skill_name: String,
|
||||
event_params: TrackEventParams,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TrackEventParams {
|
||||
product_client_id: Option<String>,
|
||||
skill_scope: Option<String>,
|
||||
repo_url: Option<String>,
|
||||
thread_id: Option<String>,
|
||||
invoke_type: Option<String>,
|
||||
model_slug: Option<String>,
|
||||
}
|
||||
|
||||
pub(crate) fn track_skill_invocations(
|
||||
queue: &AnalyticsEventsQueue,
|
||||
config: Arc<Config>,
|
||||
tracking: Option<TrackEventsContext>,
|
||||
invocations: Vec<SkillInvocation>,
|
||||
) {
|
||||
if config.analytics_enabled == Some(false) {
|
||||
return;
|
||||
}
|
||||
let Some(tracking) = tracking else {
|
||||
return;
|
||||
};
|
||||
if invocations.is_empty() {
|
||||
return;
|
||||
}
|
||||
let job = TrackEventsJob {
|
||||
config,
|
||||
tracking,
|
||||
invocations,
|
||||
};
|
||||
queue.try_send(job);
|
||||
}
|
||||
|
||||
async fn send_track_skill_invocations(auth_manager: &AuthManager, job: TrackEventsJob) {
|
||||
let TrackEventsJob {
|
||||
config,
|
||||
tracking,
|
||||
invocations,
|
||||
} = job;
|
||||
let Some(auth) = auth_manager.auth().await else {
|
||||
return;
|
||||
};
|
||||
if !auth.is_chatgpt_auth() {
|
||||
return;
|
||||
}
|
||||
let access_token = match auth.get_token() {
|
||||
Ok(token) => token,
|
||||
Err(_) => return,
|
||||
};
|
||||
let Some(account_id) = auth.get_account_id() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut events = Vec::with_capacity(invocations.len());
|
||||
for invocation in invocations {
|
||||
let skill_scope = match invocation.skill_scope {
|
||||
SkillScope::User => "user",
|
||||
SkillScope::Repo => "repo",
|
||||
SkillScope::System => "system",
|
||||
SkillScope::Admin => "admin",
|
||||
};
|
||||
let repo_root = get_git_repo_root(invocation.skill_path.as_path());
|
||||
let repo_url = if let Some(root) = repo_root.as_ref() {
|
||||
collect_git_info(root)
|
||||
.await
|
||||
.and_then(|info| info.repository_url)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let skill_id = skill_id_for_local_skill(
|
||||
repo_url.as_deref(),
|
||||
repo_root.as_deref(),
|
||||
invocation.skill_path.as_path(),
|
||||
invocation.skill_name.as_str(),
|
||||
);
|
||||
events.push(TrackEvent {
|
||||
event_type: "skill_invocation",
|
||||
skill_id,
|
||||
skill_name: invocation.skill_name.clone(),
|
||||
event_params: TrackEventParams {
|
||||
thread_id: Some(tracking.thread_id.clone()),
|
||||
invoke_type: Some("explicit".to_string()),
|
||||
model_slug: Some(tracking.model_slug.clone()),
|
||||
product_client_id: Some(crate::default_client::originator().value),
|
||||
repo_url,
|
||||
skill_scope: Some(skill_scope.to_string()),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
let base_url = config.chatgpt_base_url.trim_end_matches('/');
|
||||
let url = format!("{base_url}/codex/analytics-events/events");
|
||||
let payload = TrackEventsRequest { events };
|
||||
|
||||
let response = create_client()
|
||||
.post(&url)
|
||||
.timeout(ANALYTICS_EVENTS_TIMEOUT)
|
||||
.bearer_auth(&access_token)
|
||||
.header("chatgpt-account-id", &account_id)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&payload)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match response {
|
||||
Ok(response) if response.status().is_success() => {}
|
||||
Ok(response) => {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
tracing::warn!("events failed with status {status}: {body}");
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::warn!("failed to send events request: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn skill_id_for_local_skill(
|
||||
repo_url: Option<&str>,
|
||||
repo_root: Option<&Path>,
|
||||
skill_path: &Path,
|
||||
skill_name: &str,
|
||||
) -> String {
|
||||
let path = normalize_path_for_skill_id(repo_url, repo_root, skill_path);
|
||||
let prefix = if let Some(url) = repo_url {
|
||||
format!("repo_{url}")
|
||||
} else {
|
||||
"personal".to_string()
|
||||
};
|
||||
let raw_id = format!("{prefix}_{path}_{skill_name}");
|
||||
let mut hasher = Sha1::new();
|
||||
hasher.update(raw_id.as_bytes());
|
||||
format!("{:x}", hasher.finalize())
|
||||
}
|
||||
|
||||
/// Returns a normalized path for skill ID construction.
|
||||
///
|
||||
/// - Repo-scoped skills use a path relative to the repo root.
|
||||
/// - User/admin/system skills use an absolute path.
|
||||
fn normalize_path_for_skill_id(
|
||||
repo_url: Option<&str>,
|
||||
repo_root: Option<&Path>,
|
||||
skill_path: &Path,
|
||||
) -> String {
|
||||
let resolved_path =
|
||||
std::fs::canonicalize(skill_path).unwrap_or_else(|_| skill_path.to_path_buf());
|
||||
match (repo_url, repo_root) {
|
||||
(Some(_), Some(root)) => {
|
||||
let resolved_root = std::fs::canonicalize(root).unwrap_or_else(|_| root.to_path_buf());
|
||||
resolved_path
|
||||
.strip_prefix(&resolved_root)
|
||||
.unwrap_or(resolved_path.as_path())
|
||||
.to_string_lossy()
|
||||
.replace('\\', "/")
|
||||
}
|
||||
_ => resolved_path.to_string_lossy().replace('\\', "/"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::normalize_path_for_skill_id;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn expected_absolute_path(path: &PathBuf) -> String {
|
||||
std::fs::canonicalize(path)
|
||||
.unwrap_or_else(|_| path.to_path_buf())
|
||||
.to_string_lossy()
|
||||
.replace('\\', "/")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_path_for_skill_id_repo_scoped_uses_relative_path() {
|
||||
let repo_root = PathBuf::from("/repo/root");
|
||||
let skill_path = PathBuf::from("/repo/root/.codex/skills/doc/SKILL.md");
|
||||
|
||||
let path = normalize_path_for_skill_id(
|
||||
Some("https://example.com/repo.git"),
|
||||
Some(repo_root.as_path()),
|
||||
skill_path.as_path(),
|
||||
);
|
||||
|
||||
assert_eq!(path, ".codex/skills/doc/SKILL.md");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_path_for_skill_id_user_scoped_uses_absolute_path() {
|
||||
let skill_path = PathBuf::from("/Users/abc/.codex/skills/doc/SKILL.md");
|
||||
|
||||
let path = normalize_path_for_skill_id(None, None, skill_path.as_path());
|
||||
let expected = expected_absolute_path(&skill_path);
|
||||
|
||||
assert_eq!(path, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_path_for_skill_id_admin_scoped_uses_absolute_path() {
|
||||
let skill_path = PathBuf::from("/etc/codex/skills/doc/SKILL.md");
|
||||
|
||||
let path = normalize_path_for_skill_id(None, None, skill_path.as_path());
|
||||
let expected = expected_absolute_path(&skill_path);
|
||||
|
||||
assert_eq!(path, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_path_for_skill_id_repo_root_not_in_skill_path_uses_absolute_path() {
|
||||
let repo_root = PathBuf::from("/repo/root");
|
||||
let skill_path = PathBuf::from("/other/path/.codex/skills/doc/SKILL.md");
|
||||
|
||||
let path = normalize_path_for_skill_id(
|
||||
Some("https://example.com/repo.git"),
|
||||
Some(repo_root.as_path()),
|
||||
skill_path.as_path(),
|
||||
);
|
||||
let expected = expected_absolute_path(&skill_path);
|
||||
|
||||
assert_eq!(path, expected);
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ use chrono::Utc;
|
||||
use codex_api::AuthProvider as ApiAuthProvider;
|
||||
use codex_api::TransportError;
|
||||
use codex_api::error::ApiError;
|
||||
use codex_api::rate_limits::parse_promo_message;
|
||||
use codex_api::rate_limits::parse_rate_limit;
|
||||
use http::HeaderMap;
|
||||
use serde::Deserialize;
|
||||
@@ -70,6 +71,7 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
if let Ok(err) = serde_json::from_str::<UsageErrorResponse>(&body_text) {
|
||||
if err.error.error_type.as_deref() == Some("usage_limit_reached") {
|
||||
let rate_limits = headers.as_ref().and_then(parse_rate_limit);
|
||||
let promo_message = headers.as_ref().and_then(parse_promo_message);
|
||||
let resets_at = err
|
||||
.error
|
||||
.resets_at
|
||||
@@ -78,6 +80,7 @@ pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
|
||||
plan_type: err.error.plan_type,
|
||||
resets_at,
|
||||
rate_limits,
|
||||
promo_message,
|
||||
});
|
||||
} else if err.error.error_type.as_deref() == Some("usage_not_included") {
|
||||
return CodexErr::UsageNotIncluded;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
@@ -5,6 +6,7 @@ use crate::api_bridge::CoreAuthProvider;
|
||||
use crate::api_bridge::auth_provider_from_auth;
|
||||
use crate::api_bridge::map_api_error;
|
||||
use crate::auth::UnauthorizedRecovery;
|
||||
use crate::turn_metadata::build_turn_metadata_header;
|
||||
use codex_api::AggregateStreamExt;
|
||||
use codex_api::ChatClient as ApiChatClient;
|
||||
use codex_api::CompactClient as ApiCompactClient;
|
||||
@@ -21,6 +23,7 @@ use codex_api::ResponsesWebsocketClient as ApiWebSocketResponsesClient;
|
||||
use codex_api::ResponsesWebsocketConnection as ApiWebSocketConnection;
|
||||
use codex_api::SseTelemetry;
|
||||
use codex_api::TransportError;
|
||||
use codex_api::WebsocketTelemetry;
|
||||
use codex_api::build_conversation_headers;
|
||||
use codex_api::common::Reasoning;
|
||||
use codex_api::common::ResponsesWsRequest;
|
||||
@@ -46,6 +49,8 @@ use reqwest::StatusCode;
|
||||
use serde_json::Value;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio_tungstenite::tungstenite::Error;
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::AuthManager;
|
||||
@@ -69,6 +74,7 @@ use crate::transport_manager::TransportManager;
|
||||
|
||||
pub const WEB_SEARCH_ELIGIBLE_HEADER: &str = "x-oai-web-search-eligible";
|
||||
pub const X_CODEX_TURN_STATE_HEADER: &str = "x-codex-turn-state";
|
||||
pub const X_CODEX_TURN_METADATA_HEADER: &str = "x-codex-turn-metadata";
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ModelClientState {
|
||||
@@ -105,6 +111,10 @@ pub struct ModelClientSession {
|
||||
/// keep sending it unchanged between turn requests (e.g., for retries, incremental
|
||||
/// appends, or continuation requests), and must not send it between different turns.
|
||||
turn_state: Arc<OnceLock<String>>,
|
||||
/// Turn-scoped metadata attached to every request in the turn.
|
||||
turn_metadata_header: Option<HeaderValue>,
|
||||
/// Working directory used to lazily compute turn metadata at send time.
|
||||
turn_metadata_cwd: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -137,13 +147,21 @@ impl ModelClient {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_session(&self) -> ModelClientSession {
|
||||
pub fn new_session(
|
||||
&self,
|
||||
turn_metadata_header: Option<String>,
|
||||
turn_metadata_cwd: Option<PathBuf>,
|
||||
) -> ModelClientSession {
|
||||
let turn_metadata_header =
|
||||
turn_metadata_header.and_then(|value| HeaderValue::from_str(&value).ok());
|
||||
ModelClientSession {
|
||||
state: Arc::clone(&self.state),
|
||||
connection: None,
|
||||
websocket_last_items: Vec::new(),
|
||||
transport_manager: self.state.transport_manager.clone(),
|
||||
turn_state: Arc::new(OnceLock::new()),
|
||||
turn_metadata_header,
|
||||
turn_metadata_cwd,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -254,6 +272,21 @@ impl ModelClient {
|
||||
}
|
||||
|
||||
impl ModelClientSession {
|
||||
async fn ensure_turn_metadata_header(&mut self) {
|
||||
if self.turn_metadata_header.is_some() {
|
||||
return;
|
||||
}
|
||||
let Some(cwd) = self.turn_metadata_cwd.as_deref() else {
|
||||
return;
|
||||
};
|
||||
let Some(value) = build_turn_metadata_header(cwd).await else {
|
||||
return;
|
||||
};
|
||||
if let Ok(header_value) = HeaderValue::from_str(value.as_str()) {
|
||||
self.turn_metadata_header = Some(header_value);
|
||||
}
|
||||
}
|
||||
|
||||
/// Streams a single model turn using either the Responses or Chat
|
||||
/// Completions wire API, depending on the configured provider.
|
||||
///
|
||||
@@ -261,6 +294,9 @@ impl ModelClientSession {
|
||||
/// based on the `show_raw_agent_reasoning` flag in the config.
|
||||
pub async fn stream(&mut self, prompt: &Prompt) -> Result<ResponseStream> {
|
||||
let wire_api = self.state.provider.wire_api;
|
||||
if matches!(wire_api, WireApi::Responses) {
|
||||
self.ensure_turn_metadata_header().await;
|
||||
}
|
||||
match wire_api {
|
||||
WireApi::Responses => {
|
||||
let websocket_enabled = self.responses_websocket_enabled()
|
||||
@@ -377,7 +413,11 @@ impl ModelClientSession {
|
||||
store_override: None,
|
||||
conversation_id: Some(conversation_id),
|
||||
session_source: Some(self.state.session_source.clone()),
|
||||
extra_headers: build_responses_headers(&self.state.config, Some(&self.turn_state)),
|
||||
extra_headers: build_responses_headers(
|
||||
&self.state.config,
|
||||
Some(&self.turn_state),
|
||||
self.turn_metadata_header.as_ref(),
|
||||
),
|
||||
compression,
|
||||
turn_state: Some(Arc::clone(&self.turn_state)),
|
||||
}
|
||||
@@ -451,9 +491,14 @@ impl ModelClientSession {
|
||||
if needs_new {
|
||||
let mut headers = options.extra_headers.clone();
|
||||
headers.extend(build_conversation_headers(options.conversation_id.clone()));
|
||||
let websocket_telemetry = self.build_websocket_telemetry();
|
||||
let new_conn: ApiWebSocketConnection =
|
||||
ApiWebSocketResponsesClient::new(api_provider, api_auth)
|
||||
.connect(headers, options.turn_state.clone())
|
||||
.connect(
|
||||
headers,
|
||||
options.turn_state.clone(),
|
||||
Some(websocket_telemetry),
|
||||
)
|
||||
.await?;
|
||||
self.connection = Some(new_conn);
|
||||
}
|
||||
@@ -650,6 +695,13 @@ impl ModelClientSession {
|
||||
let sse_telemetry: Arc<dyn SseTelemetry> = telemetry;
|
||||
(request_telemetry, sse_telemetry)
|
||||
}
|
||||
|
||||
/// Builds telemetry for the Responses API WebSocket transport.
|
||||
fn build_websocket_telemetry(&self) -> Arc<dyn WebsocketTelemetry> {
|
||||
let telemetry = Arc::new(ApiTelemetry::new(self.state.otel_manager.clone()));
|
||||
let websocket_telemetry: Arc<dyn WebsocketTelemetry> = telemetry;
|
||||
websocket_telemetry
|
||||
}
|
||||
}
|
||||
|
||||
impl ModelClient {
|
||||
@@ -698,6 +750,7 @@ fn experimental_feature_headers(config: &Config) -> ApiHeaderMap {
|
||||
fn build_responses_headers(
|
||||
config: &Config,
|
||||
turn_state: Option<&Arc<OnceLock<String>>>,
|
||||
turn_metadata_header: Option<&HeaderValue>,
|
||||
) -> ApiHeaderMap {
|
||||
let mut headers = experimental_feature_headers(config);
|
||||
headers.insert(
|
||||
@@ -716,6 +769,9 @@ fn build_responses_headers(
|
||||
{
|
||||
headers.insert(X_CODEX_TURN_STATE_HEADER, header_value);
|
||||
}
|
||||
if let Some(header_value) = turn_metadata_header {
|
||||
headers.insert(X_CODEX_TURN_METADATA_HEADER, header_value.clone());
|
||||
}
|
||||
headers
|
||||
}
|
||||
|
||||
@@ -849,3 +905,19 @@ impl SseTelemetry for ApiTelemetry {
|
||||
self.otel_manager.log_sse_event(result, duration);
|
||||
}
|
||||
}
|
||||
|
||||
impl WebsocketTelemetry for ApiTelemetry {
|
||||
fn on_ws_request(&self, duration: Duration, error: Option<&ApiError>) {
|
||||
let error_message = error.map(std::string::ToString::to_string);
|
||||
self.otel_manager
|
||||
.record_websocket_request(duration, error_message.as_deref());
|
||||
}
|
||||
|
||||
fn on_ws_event(
|
||||
&self,
|
||||
result: &std::result::Result<Option<std::result::Result<Message, Error>>, ApiError>,
|
||||
duration: Duration,
|
||||
) {
|
||||
self.otel_manager.record_websocket_event(result, duration);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,8 @@ use crate::agent::AgentControl;
|
||||
use crate::agent::AgentStatus;
|
||||
use crate::agent::MAX_THREAD_SPAWN_DEPTH;
|
||||
use crate::agent::agent_status_from_event;
|
||||
use crate::analytics_client::AnalyticsEventsClient;
|
||||
use crate::analytics_client::build_track_events_context;
|
||||
use crate::compact;
|
||||
use crate::compact::run_inline_auto_compact_task;
|
||||
use crate::compact::should_use_remote_compact_task;
|
||||
@@ -49,6 +51,7 @@ use codex_protocol::items::PlanItem;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::items::UserMessageItem;
|
||||
use codex_protocol::models::BaseInstructions;
|
||||
use codex_protocol::models::format_allow_prefixes;
|
||||
use codex_protocol::openai_models::ModelInfo;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::HasLegacyEvent;
|
||||
@@ -116,6 +119,7 @@ use crate::error::Result as CodexResult;
|
||||
use crate::exec::StreamOutput;
|
||||
use crate::exec_policy::ExecPolicyUpdateError;
|
||||
use crate::feedback_tags;
|
||||
use crate::git_info::get_git_repo_root;
|
||||
use crate::instructions::UserInstructions;
|
||||
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
|
||||
use crate::mcp::auth::compute_auth_statuses;
|
||||
@@ -210,7 +214,6 @@ use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::models::render_command_prefix_list;
|
||||
use codex_protocol::protocol::CodexErrorInfo;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
@@ -330,7 +333,7 @@ impl Codex {
|
||||
.base_instructions
|
||||
.clone()
|
||||
.or_else(|| conversation_history.get_base_instructions().map(|s| s.text))
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality));
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.personality));
|
||||
// Respect explicit thread-start tools; fall back to persisted tools when resuming a thread.
|
||||
let dynamic_tools = if dynamic_tools.is_empty() {
|
||||
conversation_history.get_dynamic_tools().unwrap_or_default()
|
||||
@@ -354,7 +357,7 @@ impl Codex {
|
||||
model_reasoning_summary: config.model_reasoning_summary,
|
||||
developer_instructions: config.developer_instructions.clone(),
|
||||
user_instructions,
|
||||
personality: config.model_personality,
|
||||
personality: config.personality,
|
||||
base_instructions,
|
||||
compact_prompt: config.compact_prompt.clone(),
|
||||
approval_policy: config.approval_policy.clone(),
|
||||
@@ -488,7 +491,7 @@ pub(crate) struct TurnContext {
|
||||
pub(crate) developer_instructions: Option<String>,
|
||||
pub(crate) compact_prompt: Option<String>,
|
||||
pub(crate) user_instructions: Option<String>,
|
||||
pub(crate) collaboration_mode_kind: ModeKind,
|
||||
pub(crate) collaboration_mode: CollaborationMode,
|
||||
pub(crate) personality: Option<Personality>,
|
||||
pub(crate) approval_policy: AskForApproval,
|
||||
pub(crate) sandbox_policy: SandboxPolicy,
|
||||
@@ -501,8 +504,9 @@ pub(crate) struct TurnContext {
|
||||
pub(crate) tool_call_gate: Arc<ReadinessFlag>,
|
||||
pub(crate) truncation_policy: TruncationPolicy,
|
||||
pub(crate) dynamic_tools: Vec<DynamicToolSpec>,
|
||||
/// Per-turn metadata serialized as a header value for outbound model requests.
|
||||
pub(crate) turn_metadata_header: Option<String>,
|
||||
}
|
||||
|
||||
impl TurnContext {
|
||||
pub(crate) fn resolve_path(&self, path: Option<String>) -> PathBuf {
|
||||
path.as_ref()
|
||||
@@ -632,9 +636,10 @@ impl Session {
|
||||
per_turn_config.model_reasoning_effort =
|
||||
session_configuration.collaboration_mode.reasoning_effort();
|
||||
per_turn_config.model_reasoning_summary = session_configuration.model_reasoning_summary;
|
||||
per_turn_config.model_personality = session_configuration.personality;
|
||||
per_turn_config.personality = session_configuration.personality;
|
||||
per_turn_config.web_search_mode = Some(resolve_web_search_mode_for_turn(
|
||||
per_turn_config.web_search_mode,
|
||||
session_configuration.provider.is_azure_responses_endpoint(),
|
||||
session_configuration.sandbox_policy.get(),
|
||||
));
|
||||
per_turn_config.features = config.features.clone();
|
||||
@@ -689,7 +694,7 @@ impl Session {
|
||||
developer_instructions: session_configuration.developer_instructions.clone(),
|
||||
compact_prompt: session_configuration.compact_prompt.clone(),
|
||||
user_instructions: session_configuration.user_instructions.clone(),
|
||||
collaboration_mode_kind: session_configuration.collaboration_mode.mode,
|
||||
collaboration_mode: session_configuration.collaboration_mode.clone(),
|
||||
personality: session_configuration.personality,
|
||||
approval_policy: session_configuration.approval_policy.value(),
|
||||
sandbox_policy: session_configuration.sandbox_policy.get().clone(),
|
||||
@@ -702,6 +707,7 @@ impl Session {
|
||||
tool_call_gate: Arc::new(ReadinessFlag::new()),
|
||||
truncation_policy: model_info.truncation_policy.into(),
|
||||
dynamic_tools: session_configuration.dynamic_tools.clone(),
|
||||
turn_metadata_header: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -903,6 +909,10 @@ impl Session {
|
||||
mcp_connection_manager: Arc::new(RwLock::new(McpConnectionManager::default())),
|
||||
mcp_startup_cancellation_token: Mutex::new(CancellationToken::new()),
|
||||
unified_exec_manager: UnifiedExecProcessManager::default(),
|
||||
analytics_events_client: AnalyticsEventsClient::new(
|
||||
Arc::clone(&config),
|
||||
Arc::clone(&auth_manager),
|
||||
),
|
||||
notifier: UserNotifier::new(config.notify.clone()),
|
||||
rollout: Mutex::new(rollout_recorder),
|
||||
user_shell: Arc::new(default_shell),
|
||||
@@ -1349,16 +1359,14 @@ impl Session {
|
||||
|
||||
fn build_collaboration_mode_update_item(
|
||||
&self,
|
||||
previous_collaboration_mode: &CollaborationMode,
|
||||
next_collaboration_mode: Option<&CollaborationMode>,
|
||||
previous: Option<&Arc<TurnContext>>,
|
||||
next: &TurnContext,
|
||||
) -> Option<ResponseItem> {
|
||||
if let Some(next_mode) = next_collaboration_mode {
|
||||
if previous_collaboration_mode == next_mode {
|
||||
return None;
|
||||
}
|
||||
let prev = previous?;
|
||||
if prev.collaboration_mode != next.collaboration_mode {
|
||||
// If the next mode has empty developer instructions, this returns None and we emit no
|
||||
// update, so prior collaboration instructions remain in the prompt history.
|
||||
Some(DeveloperInstructions::from_collaboration_mode(next_mode)?.into())
|
||||
Some(DeveloperInstructions::from_collaboration_mode(&next.collaboration_mode)?.into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -1368,8 +1376,6 @@ impl Session {
|
||||
&self,
|
||||
previous_context: Option<&Arc<TurnContext>>,
|
||||
current_context: &TurnContext,
|
||||
previous_collaboration_mode: &CollaborationMode,
|
||||
next_collaboration_mode: Option<&CollaborationMode>,
|
||||
) -> Vec<ResponseItem> {
|
||||
let mut update_items = Vec::new();
|
||||
if let Some(env_item) =
|
||||
@@ -1382,10 +1388,9 @@ impl Session {
|
||||
{
|
||||
update_items.push(permissions_item);
|
||||
}
|
||||
if let Some(collaboration_mode_item) = self.build_collaboration_mode_update_item(
|
||||
previous_collaboration_mode,
|
||||
next_collaboration_mode,
|
||||
) {
|
||||
if let Some(collaboration_mode_item) =
|
||||
self.build_collaboration_mode_update_item(previous_context, current_context)
|
||||
{
|
||||
update_items.push(collaboration_mode_item);
|
||||
}
|
||||
if let Some(personality_item) =
|
||||
@@ -1515,7 +1520,7 @@ impl Session {
|
||||
sub_id: &str,
|
||||
amendment: &ExecPolicyAmendment,
|
||||
) {
|
||||
let Some(prefixes) = render_command_prefix_list([amendment.command.as_slice()]) else {
|
||||
let Some(prefixes) = format_allow_prefixes(vec![amendment.command.clone()]) else {
|
||||
warn!("execpolicy amendment for {sub_id} had no command prefix");
|
||||
return;
|
||||
};
|
||||
@@ -2566,18 +2571,6 @@ mod handlers {
|
||||
sub_id: String,
|
||||
updates: SessionSettingsUpdate,
|
||||
) {
|
||||
let previous_context = sess
|
||||
.new_default_turn_with_sub_id(sess.next_internal_sub_id())
|
||||
.await;
|
||||
let previous_collaboration_mode = sess
|
||||
.state
|
||||
.lock()
|
||||
.await
|
||||
.session_configuration
|
||||
.collaboration_mode
|
||||
.clone();
|
||||
let next_collaboration_mode = updates.collaboration_mode.clone();
|
||||
|
||||
if let Err(err) = sess.update_settings(updates).await {
|
||||
sess.send_event_raw(Event {
|
||||
id: sub_id,
|
||||
@@ -2587,24 +2580,6 @@ mod handlers {
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
|
||||
let initial_context_seeded = sess.state.lock().await.initial_context_seeded;
|
||||
if !initial_context_seeded {
|
||||
return;
|
||||
}
|
||||
|
||||
let current_context = sess.new_default_turn_with_sub_id(sub_id).await;
|
||||
let update_items = sess.build_settings_update_items(
|
||||
Some(&previous_context),
|
||||
¤t_context,
|
||||
&previous_collaboration_mode,
|
||||
next_collaboration_mode.as_ref(),
|
||||
);
|
||||
if !update_items.is_empty() {
|
||||
sess.record_conversation_items(¤t_context, &update_items)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2664,14 +2639,6 @@ mod handlers {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let previous_collaboration_mode = sess
|
||||
.state
|
||||
.lock()
|
||||
.await
|
||||
.session_configuration
|
||||
.collaboration_mode
|
||||
.clone();
|
||||
let next_collaboration_mode = updates.collaboration_mode.clone();
|
||||
let Ok(current_context) = sess.new_turn_with_sub_id(sub_id, updates).await else {
|
||||
// new_turn_with_sub_id already emits the error event.
|
||||
return;
|
||||
@@ -2684,12 +2651,8 @@ mod handlers {
|
||||
// Attempt to inject input into current task
|
||||
if let Err(items) = sess.inject_input(items).await {
|
||||
sess.seed_initial_context_if_needed(¤t_context).await;
|
||||
let update_items = sess.build_settings_update_items(
|
||||
previous_context.as_ref(),
|
||||
¤t_context,
|
||||
&previous_collaboration_mode,
|
||||
next_collaboration_mode.as_ref(),
|
||||
);
|
||||
let update_items =
|
||||
sess.build_settings_update_items(previous_context.as_ref(), ¤t_context);
|
||||
if !update_items.is_empty() {
|
||||
sess.record_conversation_items(¤t_context, &update_items)
|
||||
.await;
|
||||
@@ -3204,7 +3167,7 @@ async fn spawn_review_thread(
|
||||
developer_instructions: None,
|
||||
user_instructions: None,
|
||||
compact_prompt: parent_turn_context.compact_prompt.clone(),
|
||||
collaboration_mode_kind: parent_turn_context.collaboration_mode_kind,
|
||||
collaboration_mode: parent_turn_context.collaboration_mode.clone(),
|
||||
personality: parent_turn_context.personality,
|
||||
approval_policy: parent_turn_context.approval_policy,
|
||||
sandbox_policy: parent_turn_context.sandbox_policy.clone(),
|
||||
@@ -3216,6 +3179,7 @@ async fn spawn_review_thread(
|
||||
tool_call_gate: Arc::new(ReadinessFlag::new()),
|
||||
dynamic_tools: parent_turn_context.dynamic_tools.clone(),
|
||||
truncation_policy: model_info.truncation_policy.into(),
|
||||
turn_metadata_header: None,
|
||||
};
|
||||
|
||||
// Seed the child task with the review prompt as the initial user message.
|
||||
@@ -3319,7 +3283,7 @@ pub(crate) async fn run_turn(
|
||||
let total_usage_tokens = sess.get_total_token_usage().await;
|
||||
let event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
collaboration_mode_kind: turn_context.collaboration_mode_kind,
|
||||
collaboration_mode_kind: turn_context.collaboration_mode.mode,
|
||||
});
|
||||
sess.send_event(&turn_context, event).await;
|
||||
if total_usage_tokens >= auto_compact_limit {
|
||||
@@ -3384,10 +3348,18 @@ pub(crate) async fn run_turn(
|
||||
.await;
|
||||
|
||||
let otel_manager = turn_context.client.get_otel_manager();
|
||||
let thread_id = sess.conversation_id.to_string();
|
||||
let tracking = build_track_events_context(turn_context.client.get_model(), thread_id);
|
||||
let SkillInjections {
|
||||
items: skill_items,
|
||||
warnings: skill_warnings,
|
||||
} = build_skill_injections(&mentioned_skills, Some(&otel_manager)).await;
|
||||
} = build_skill_injections(
|
||||
&mentioned_skills,
|
||||
Some(&otel_manager),
|
||||
&sess.services.analytics_events_client,
|
||||
tracking.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
for message in skill_warnings {
|
||||
sess.send_event(&turn_context, EventMsg::Warning(WarningEvent { message }))
|
||||
@@ -3411,7 +3383,10 @@ pub(crate) async fn run_turn(
|
||||
// many turns, from the perspective of the user, it is a single turn.
|
||||
let turn_diff_tracker = Arc::new(tokio::sync::Mutex::new(TurnDiffTracker::new()));
|
||||
|
||||
let mut client_session = turn_context.client.new_session();
|
||||
let mut client_session = turn_context.client.new_session(
|
||||
turn_context.turn_metadata_header.clone(),
|
||||
Some(turn_context.cwd.clone()),
|
||||
);
|
||||
|
||||
loop {
|
||||
// Note that pending_input would be something like a message the user
|
||||
@@ -4224,7 +4199,7 @@ async fn try_run_sampling_request(
|
||||
let mut last_agent_message: Option<String> = None;
|
||||
let mut active_item: Option<TurnItem> = None;
|
||||
let mut should_emit_turn_diff = false;
|
||||
let plan_mode = turn_context.collaboration_mode_kind == ModeKind::Plan;
|
||||
let plan_mode = turn_context.collaboration_mode.mode == ModeKind::Plan;
|
||||
let mut plan_mode_state = plan_mode.then(|| PlanModeStreamState::new(&turn_context.sub_id));
|
||||
let receiving_span = trace_span!("receiving_stream");
|
||||
let outcome: CodexResult<SamplingRequestResult> = loop {
|
||||
@@ -4474,8 +4449,6 @@ pub(super) fn get_last_assistant_message_from_turn(responses: &[ResponseItem]) -
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) use tests::make_session_and_context;
|
||||
|
||||
use crate::git_info::get_git_repo_root;
|
||||
#[cfg(test)]
|
||||
pub(crate) use tests::make_session_and_context_with_rx;
|
||||
|
||||
@@ -4960,11 +4933,11 @@ mod tests {
|
||||
model_reasoning_summary: config.model_reasoning_summary,
|
||||
developer_instructions: config.developer_instructions.clone(),
|
||||
user_instructions: config.user_instructions.clone(),
|
||||
personality: config.model_personality,
|
||||
personality: config.personality,
|
||||
base_instructions: config
|
||||
.base_instructions
|
||||
.clone()
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
|
||||
compact_prompt: config.compact_prompt.clone(),
|
||||
approval_policy: config.approval_policy.clone(),
|
||||
sandbox_policy: config.sandbox_policy.clone(),
|
||||
@@ -5043,11 +5016,11 @@ mod tests {
|
||||
model_reasoning_summary: config.model_reasoning_summary,
|
||||
developer_instructions: config.developer_instructions.clone(),
|
||||
user_instructions: config.user_instructions.clone(),
|
||||
personality: config.model_personality,
|
||||
personality: config.personality,
|
||||
base_instructions: config
|
||||
.base_instructions
|
||||
.clone()
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
|
||||
compact_prompt: config.compact_prompt.clone(),
|
||||
approval_policy: config.approval_policy.clone(),
|
||||
sandbox_policy: config.sandbox_policy.clone(),
|
||||
@@ -5310,11 +5283,11 @@ mod tests {
|
||||
model_reasoning_summary: config.model_reasoning_summary,
|
||||
developer_instructions: config.developer_instructions.clone(),
|
||||
user_instructions: config.user_instructions.clone(),
|
||||
personality: config.model_personality,
|
||||
personality: config.personality,
|
||||
base_instructions: config
|
||||
.base_instructions
|
||||
.clone()
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
|
||||
compact_prompt: config.compact_prompt.clone(),
|
||||
approval_policy: config.approval_policy.clone(),
|
||||
sandbox_policy: config.sandbox_policy.clone(),
|
||||
@@ -5346,6 +5319,10 @@ mod tests {
|
||||
mcp_connection_manager: Arc::new(RwLock::new(McpConnectionManager::default())),
|
||||
mcp_startup_cancellation_token: Mutex::new(CancellationToken::new()),
|
||||
unified_exec_manager: UnifiedExecProcessManager::default(),
|
||||
analytics_events_client: AnalyticsEventsClient::new(
|
||||
Arc::clone(&config),
|
||||
Arc::clone(&auth_manager),
|
||||
),
|
||||
notifier: UserNotifier::new(None),
|
||||
rollout: Mutex::new(None),
|
||||
user_shell: Arc::new(default_user_shell()),
|
||||
@@ -5426,11 +5403,11 @@ mod tests {
|
||||
model_reasoning_summary: config.model_reasoning_summary,
|
||||
developer_instructions: config.developer_instructions.clone(),
|
||||
user_instructions: config.user_instructions.clone(),
|
||||
personality: config.model_personality,
|
||||
personality: config.personality,
|
||||
base_instructions: config
|
||||
.base_instructions
|
||||
.clone()
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.model_personality)),
|
||||
.unwrap_or_else(|| model_info.get_model_instructions(config.personality)),
|
||||
compact_prompt: config.compact_prompt.clone(),
|
||||
approval_policy: config.approval_policy.clone(),
|
||||
sandbox_policy: config.sandbox_policy.clone(),
|
||||
@@ -5462,6 +5439,10 @@ mod tests {
|
||||
mcp_connection_manager: Arc::new(RwLock::new(McpConnectionManager::default())),
|
||||
mcp_startup_cancellation_token: Mutex::new(CancellationToken::new()),
|
||||
unified_exec_manager: UnifiedExecProcessManager::default(),
|
||||
analytics_events_client: AnalyticsEventsClient::new(
|
||||
Arc::clone(&config),
|
||||
Arc::clone(&auth_manager),
|
||||
),
|
||||
notifier: UserNotifier::new(None),
|
||||
rollout: Mutex::new(None),
|
||||
user_shell: Arc::new(default_user_shell()),
|
||||
|
||||
@@ -61,7 +61,7 @@ pub(crate) async fn run_compact_task(
|
||||
) {
|
||||
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
collaboration_mode_kind: turn_context.collaboration_mode_kind,
|
||||
collaboration_mode_kind: turn_context.collaboration_mode.mode,
|
||||
});
|
||||
sess.send_event(&turn_context, start_event).await;
|
||||
run_compact_task_inner(sess.clone(), turn_context, input).await;
|
||||
@@ -335,7 +335,10 @@ async fn drain_to_completed(
|
||||
turn_context: &TurnContext,
|
||||
prompt: &Prompt,
|
||||
) -> CodexResult<()> {
|
||||
let mut client_session = turn_context.client.new_session();
|
||||
let mut client_session = turn_context.client.new_session(
|
||||
turn_context.turn_metadata_header.clone(),
|
||||
Some(turn_context.cwd.clone()),
|
||||
);
|
||||
let mut stream = client_session.stream(prompt).await?;
|
||||
loop {
|
||||
let maybe_event = stream.next().await;
|
||||
|
||||
@@ -22,7 +22,7 @@ pub(crate) async fn run_inline_remote_auto_compact_task(
|
||||
pub(crate) async fn run_remote_compact_task(sess: Arc<Session>, turn_context: Arc<TurnContext>) {
|
||||
let start_event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
collaboration_mode_kind: turn_context.collaboration_mode_kind,
|
||||
collaboration_mode_kind: turn_context.collaboration_mode.mode,
|
||||
});
|
||||
sess.send_event(&turn_context, start_event).await;
|
||||
|
||||
|
||||
@@ -278,7 +278,7 @@ impl ConfigDocument {
|
||||
mutated
|
||||
}),
|
||||
ConfigEdit::SetModelPersonality { personality } => Ok(self.write_profile_value(
|
||||
&["model_personality"],
|
||||
&["personality"],
|
||||
personality.map(|personality| value(personality.to_string())),
|
||||
)),
|
||||
ConfigEdit::SetNoticeHideFullAccessWarning(acknowledged) => Ok(self.write_value(
|
||||
@@ -724,7 +724,7 @@ impl ConfigEditsBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_model_personality(mut self, personality: Option<Personality>) -> Self {
|
||||
pub fn set_personality(mut self, personality: Option<Personality>) -> Self {
|
||||
self.edits
|
||||
.push(ConfigEdit::SetModelPersonality { personality });
|
||||
self
|
||||
|
||||
@@ -24,6 +24,7 @@ use crate::config_loader::ConfigRequirements;
|
||||
use crate::config_loader::LoaderOverrides;
|
||||
use crate::config_loader::McpServerIdentity;
|
||||
use crate::config_loader::McpServerRequirement;
|
||||
use crate::config_loader::ResidencyRequirement;
|
||||
use crate::config_loader::Sourced;
|
||||
use crate::config_loader::load_config_layers_state;
|
||||
use crate::features::Feature;
|
||||
@@ -57,7 +58,6 @@ use codex_protocol::openai_models::ReasoningEffort;
|
||||
use codex_rmcp_client::OAuthCredentialsStoreMode;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use codex_utils_absolute_path::AbsolutePathBufGuard;
|
||||
use dirs::home_dir;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
@@ -134,13 +134,18 @@ pub struct Config {
|
||||
pub model_provider: ModelProviderInfo,
|
||||
|
||||
/// Optionally specify the personality of the model
|
||||
pub model_personality: Option<Personality>,
|
||||
pub personality: Option<Personality>,
|
||||
|
||||
/// Approval policy for executing commands.
|
||||
pub approval_policy: Constrained<AskForApproval>,
|
||||
|
||||
pub sandbox_policy: Constrained<SandboxPolicy>,
|
||||
|
||||
/// enforce_residency means web traffic cannot be routed outside of a
|
||||
/// particular geography. HTTP clients should direct their requests
|
||||
/// using backend-specific headers or URLs to enforce this.
|
||||
pub enforce_residency: Constrained<Option<ResidencyRequirement>>,
|
||||
|
||||
/// True if the user passed in an override or set a value in config.toml
|
||||
/// for either of approval_policy or sandbox_mode.
|
||||
pub did_user_set_custom_approval_policy_or_sandbox_mode: bool,
|
||||
@@ -367,7 +372,7 @@ pub struct ConfigBuilder {
|
||||
cli_overrides: Option<Vec<(String, TomlValue)>>,
|
||||
harness_overrides: Option<ConfigOverrides>,
|
||||
loader_overrides: Option<LoaderOverrides>,
|
||||
cloud_requirements: Option<CloudRequirementsLoader>,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
fallback_cwd: Option<PathBuf>,
|
||||
}
|
||||
|
||||
@@ -393,7 +398,7 @@ impl ConfigBuilder {
|
||||
}
|
||||
|
||||
pub fn cloud_requirements(mut self, cloud_requirements: CloudRequirementsLoader) -> Self {
|
||||
self.cloud_requirements = Some(cloud_requirements);
|
||||
self.cloud_requirements = cloud_requirements;
|
||||
self
|
||||
}
|
||||
|
||||
@@ -524,7 +529,7 @@ pub async fn load_config_as_toml_with_cli_overrides(
|
||||
Some(cwd.clone()),
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -628,7 +633,7 @@ pub async fn load_global_mcp_servers(
|
||||
cwd,
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let merged_toml = config_layer_stack.effective_config();
|
||||
@@ -907,9 +912,8 @@ pub struct ConfigToml {
|
||||
/// Override to force-enable reasoning summaries for the configured model.
|
||||
pub model_supports_reasoning_summaries: Option<bool>,
|
||||
|
||||
/// EXPERIMENTAL
|
||||
/// Optionally specify a personality for the model
|
||||
pub model_personality: Option<Personality>,
|
||||
pub personality: Option<Personality>,
|
||||
|
||||
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
@@ -1188,7 +1192,7 @@ pub struct ConfigOverrides {
|
||||
pub codex_linux_sandbox_exe: Option<PathBuf>,
|
||||
pub base_instructions: Option<String>,
|
||||
pub developer_instructions: Option<String>,
|
||||
pub model_personality: Option<Personality>,
|
||||
pub personality: Option<Personality>,
|
||||
pub compact_prompt: Option<String>,
|
||||
pub include_apply_patch_tool: Option<bool>,
|
||||
pub show_raw_agent_reasoning: Option<bool>,
|
||||
@@ -1246,11 +1250,15 @@ fn resolve_web_search_mode(
|
||||
|
||||
pub(crate) fn resolve_web_search_mode_for_turn(
|
||||
explicit_mode: Option<WebSearchMode>,
|
||||
is_azure_responses_endpoint: bool,
|
||||
sandbox_policy: &SandboxPolicy,
|
||||
) -> WebSearchMode {
|
||||
if let Some(mode) = explicit_mode {
|
||||
return mode;
|
||||
}
|
||||
if is_azure_responses_endpoint {
|
||||
return WebSearchMode::Disabled;
|
||||
}
|
||||
if matches!(sandbox_policy, SandboxPolicy::DangerFullAccess) {
|
||||
WebSearchMode::Live
|
||||
} else {
|
||||
@@ -1291,7 +1299,7 @@ impl Config {
|
||||
codex_linux_sandbox_exe,
|
||||
base_instructions,
|
||||
developer_instructions,
|
||||
model_personality,
|
||||
personality,
|
||||
compact_prompt,
|
||||
include_apply_patch_tool: include_apply_patch_tool_override,
|
||||
show_raw_agent_reasoning,
|
||||
@@ -1488,9 +1496,14 @@ impl Config {
|
||||
Self::try_read_non_empty_file(model_instructions_path, "model instructions file")?;
|
||||
let base_instructions = base_instructions.or(file_base_instructions);
|
||||
let developer_instructions = developer_instructions.or(cfg.developer_instructions);
|
||||
let model_personality = model_personality
|
||||
.or(config_profile.model_personality)
|
||||
.or(cfg.model_personality);
|
||||
let personality = personality
|
||||
.or(config_profile.personality)
|
||||
.or(cfg.personality)
|
||||
.or_else(|| {
|
||||
features
|
||||
.enabled(Feature::Personality)
|
||||
.then_some(Personality::Friendly)
|
||||
});
|
||||
|
||||
let experimental_compact_prompt_path = config_profile
|
||||
.experimental_compact_prompt_file
|
||||
@@ -1513,6 +1526,7 @@ impl Config {
|
||||
sandbox_policy: mut constrained_sandbox_policy,
|
||||
mcp_servers,
|
||||
exec_policy: _,
|
||||
enforce_residency,
|
||||
} = requirements;
|
||||
|
||||
constrained_approval_policy
|
||||
@@ -1535,13 +1549,14 @@ impl Config {
|
||||
cwd: resolved_cwd,
|
||||
approval_policy: constrained_approval_policy,
|
||||
sandbox_policy: constrained_sandbox_policy,
|
||||
enforce_residency,
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode,
|
||||
forced_auto_mode_downgraded_on_windows,
|
||||
shell_environment_policy,
|
||||
notify: cfg.notify,
|
||||
user_instructions,
|
||||
base_instructions,
|
||||
model_personality,
|
||||
personality,
|
||||
developer_instructions,
|
||||
compact_prompt,
|
||||
// The config.toml omits "_mode" because it's a config file. However, "_mode"
|
||||
@@ -1749,27 +1764,12 @@ fn toml_uses_deprecated_instructions_file(value: &TomlValue) -> bool {
|
||||
/// specified by the `CODEX_HOME` environment variable. If not set, defaults to
|
||||
/// `~/.codex`.
|
||||
///
|
||||
/// - If `CODEX_HOME` is set, the value will be canonicalized and this
|
||||
/// function will Err if the path does not exist.
|
||||
/// - If `CODEX_HOME` is set, the value must exist and be a directory. The
|
||||
/// value will be canonicalized and this function will Err otherwise.
|
||||
/// - If `CODEX_HOME` is not set, this function does not verify that the
|
||||
/// directory exists.
|
||||
pub fn find_codex_home() -> std::io::Result<PathBuf> {
|
||||
// Honor the `CODEX_HOME` environment variable when it is set to allow users
|
||||
// (and tests) to override the default location.
|
||||
if let Ok(val) = std::env::var("CODEX_HOME")
|
||||
&& !val.is_empty()
|
||||
{
|
||||
return PathBuf::from(val).canonicalize();
|
||||
}
|
||||
|
||||
let mut p = home_dir().ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Could not find home directory",
|
||||
)
|
||||
})?;
|
||||
p.push(".codex");
|
||||
Ok(p)
|
||||
codex_utils_home_dir::find_codex_home()
|
||||
}
|
||||
|
||||
/// Returns the path to the folder where Codex logs are stored. Does not verify
|
||||
@@ -2347,14 +2347,14 @@ trust_level = "trusted"
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_defaults_to_cached_when_unset() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::ReadOnly);
|
||||
let mode = resolve_web_search_mode_for_turn(None, false, &SandboxPolicy::ReadOnly);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_defaults_to_live_for_danger_full_access() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, &SandboxPolicy::DangerFullAccess);
|
||||
let mode = resolve_web_search_mode_for_turn(None, false, &SandboxPolicy::DangerFullAccess);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Live);
|
||||
}
|
||||
@@ -2363,12 +2363,20 @@ trust_level = "trusted"
|
||||
fn web_search_mode_for_turn_prefers_explicit_value() {
|
||||
let mode = resolve_web_search_mode_for_turn(
|
||||
Some(WebSearchMode::Cached),
|
||||
false,
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Cached);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_search_mode_for_turn_disables_for_azure_responses_endpoint() {
|
||||
let mode = resolve_web_search_mode_for_turn(None, true, &SandboxPolicy::DangerFullAccess);
|
||||
|
||||
assert_eq!(mode, WebSearchMode::Disabled);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile_legacy_toggles_override_base() -> std::io::Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -2631,9 +2639,14 @@ profile = "project"
|
||||
};
|
||||
|
||||
let cwd = AbsolutePathBuf::try_from(codex_home.path())?;
|
||||
let config_layer_stack =
|
||||
load_config_layers_state(codex_home.path(), Some(cwd), &Vec::new(), overrides, None)
|
||||
.await?;
|
||||
let config_layer_stack = load_config_layers_state(
|
||||
codex_home.path(),
|
||||
Some(cwd),
|
||||
&Vec::new(),
|
||||
overrides,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let cfg = deserialize_config_toml_with_base(
|
||||
config_layer_stack.effective_config(),
|
||||
codex_home.path(),
|
||||
@@ -2760,7 +2773,7 @@ profile = "project"
|
||||
Some(cwd),
|
||||
&[("model".to_string(), TomlValue::String("cli".to_string()))],
|
||||
overrides,
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -3770,6 +3783,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::Never),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -3797,7 +3811,7 @@ model_verbosity = "high"
|
||||
model_reasoning_summary: ReasoningSummary::Detailed,
|
||||
model_supports_reasoning_summaries: None,
|
||||
model_verbosity: None,
|
||||
model_personality: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
@@ -3854,6 +3868,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_chat_completions_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::UnlessTrusted),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -3881,7 +3896,7 @@ model_verbosity = "high"
|
||||
model_reasoning_summary: ReasoningSummary::default(),
|
||||
model_supports_reasoning_summaries: None,
|
||||
model_verbosity: None,
|
||||
model_personality: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
@@ -3953,6 +3968,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::OnFailure),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -3980,7 +3996,7 @@ model_verbosity = "high"
|
||||
model_reasoning_summary: ReasoningSummary::default(),
|
||||
model_supports_reasoning_summaries: None,
|
||||
model_verbosity: None,
|
||||
model_personality: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
@@ -4038,6 +4054,7 @@ model_verbosity = "high"
|
||||
model_provider: fixture.openai_provider.clone(),
|
||||
approval_policy: Constrained::allow_any(AskForApproval::OnFailure),
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::new_read_only_policy()),
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
did_user_set_custom_approval_policy_or_sandbox_mode: true,
|
||||
forced_auto_mode_downgraded_on_windows: false,
|
||||
shell_environment_policy: ShellEnvironmentPolicy::default(),
|
||||
@@ -4065,7 +4082,7 @@ model_verbosity = "high"
|
||||
model_reasoning_summary: ReasoningSummary::Detailed,
|
||||
model_supports_reasoning_summaries: None,
|
||||
model_verbosity: Some(Verbosity::High),
|
||||
model_personality: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
|
||||
@@ -25,7 +25,7 @@ pub struct ConfigProfile {
|
||||
pub model_reasoning_effort: Option<ReasoningEffort>,
|
||||
pub model_reasoning_summary: Option<ReasoningSummary>,
|
||||
pub model_verbosity: Option<Verbosity>,
|
||||
pub model_personality: Option<Personality>,
|
||||
pub personality: Option<Personality>,
|
||||
pub chatgpt_base_url: Option<String>,
|
||||
/// Optional path to a file containing model instructions.
|
||||
pub model_instructions_file: Option<AbsolutePathBuf>,
|
||||
|
||||
@@ -2,6 +2,7 @@ use super::CONFIG_TOML_FILE;
|
||||
use super::ConfigToml;
|
||||
use crate::config::edit::ConfigEdit;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::config_loader::CloudRequirementsLoader;
|
||||
use crate::config_loader::ConfigLayerEntry;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigLayerStackOrdering;
|
||||
@@ -109,6 +110,7 @@ pub struct ConfigService {
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
}
|
||||
|
||||
impl ConfigService {
|
||||
@@ -116,11 +118,13 @@ impl ConfigService {
|
||||
codex_home: PathBuf,
|
||||
cli_overrides: Vec<(String, TomlValue)>,
|
||||
loader_overrides: LoaderOverrides,
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
) -> Self {
|
||||
Self {
|
||||
codex_home,
|
||||
cli_overrides,
|
||||
loader_overrides,
|
||||
cloud_requirements,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,6 +133,7 @@ impl ConfigService {
|
||||
codex_home,
|
||||
cli_overrides: Vec::new(),
|
||||
loader_overrides: LoaderOverrides::default(),
|
||||
cloud_requirements: CloudRequirementsLoader::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,6 +151,7 @@ impl ConfigService {
|
||||
.cli_overrides(self.cli_overrides.clone())
|
||||
.loader_overrides(self.loader_overrides.clone())
|
||||
.fallback_cwd(Some(cwd.to_path_buf()))
|
||||
.cloud_requirements(self.cloud_requirements.clone())
|
||||
.build()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
@@ -376,7 +382,7 @@ impl ConfigService {
|
||||
cwd,
|
||||
&self.cli_overrides,
|
||||
self.loader_overrides.clone(),
|
||||
None,
|
||||
self.cloud_requirements.clone(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -814,6 +820,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let response = service
|
||||
@@ -896,6 +903,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let result = service
|
||||
@@ -1000,6 +1008,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let error = service
|
||||
@@ -1048,6 +1057,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let response = service
|
||||
@@ -1095,6 +1105,7 @@ remote_compaction = true
|
||||
managed_preferences_base64: None,
|
||||
macos_managed_config_requirements_base64: None,
|
||||
},
|
||||
CloudRequirementsLoader::default(),
|
||||
);
|
||||
|
||||
let result = service
|
||||
|
||||
@@ -32,6 +32,12 @@ impl fmt::Debug for CloudRequirementsLoader {
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for CloudRequirementsLoader {
|
||||
fn default() -> Self {
|
||||
Self::new(async { None })
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -52,6 +52,7 @@ pub struct ConfigRequirements {
|
||||
pub sandbox_policy: Constrained<SandboxPolicy>,
|
||||
pub mcp_servers: Option<Sourced<BTreeMap<String, McpServerRequirement>>>,
|
||||
pub(crate) exec_policy: Option<Sourced<RequirementsExecPolicy>>,
|
||||
pub enforce_residency: Constrained<Option<ResidencyRequirement>>,
|
||||
}
|
||||
|
||||
impl Default for ConfigRequirements {
|
||||
@@ -61,6 +62,7 @@ impl Default for ConfigRequirements {
|
||||
sandbox_policy: Constrained::allow_any(SandboxPolicy::ReadOnly),
|
||||
mcp_servers: None,
|
||||
exec_policy: None,
|
||||
enforce_residency: Constrained::allow_any(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -84,6 +86,7 @@ pub struct ConfigRequirementsToml {
|
||||
pub allowed_sandbox_modes: Option<Vec<SandboxModeRequirement>>,
|
||||
pub mcp_servers: Option<BTreeMap<String, McpServerRequirement>>,
|
||||
pub rules: Option<RequirementsExecPolicyToml>,
|
||||
pub enforce_residency: Option<ResidencyRequirement>,
|
||||
}
|
||||
|
||||
/// Value paired with the requirement source it came from, for better error
|
||||
@@ -114,6 +117,7 @@ pub struct ConfigRequirementsWithSources {
|
||||
pub allowed_sandbox_modes: Option<Sourced<Vec<SandboxModeRequirement>>>,
|
||||
pub mcp_servers: Option<Sourced<BTreeMap<String, McpServerRequirement>>>,
|
||||
pub rules: Option<Sourced<RequirementsExecPolicyToml>>,
|
||||
pub enforce_residency: Option<Sourced<ResidencyRequirement>>,
|
||||
}
|
||||
|
||||
impl ConfigRequirementsWithSources {
|
||||
@@ -146,6 +150,7 @@ impl ConfigRequirementsWithSources {
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -156,12 +161,14 @@ impl ConfigRequirementsWithSources {
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
} = self;
|
||||
ConfigRequirementsToml {
|
||||
allowed_approval_policies: allowed_approval_policies.map(|sourced| sourced.value),
|
||||
allowed_sandbox_modes: allowed_sandbox_modes.map(|sourced| sourced.value),
|
||||
mcp_servers: mcp_servers.map(|sourced| sourced.value),
|
||||
rules: rules.map(|sourced| sourced.value),
|
||||
enforce_residency: enforce_residency.map(|sourced| sourced.value),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -193,12 +200,19 @@ impl From<SandboxMode> for SandboxModeRequirement {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ResidencyRequirement {
|
||||
Us,
|
||||
}
|
||||
|
||||
impl ConfigRequirementsToml {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.allowed_approval_policies.is_none()
|
||||
&& self.allowed_sandbox_modes.is_none()
|
||||
&& self.mcp_servers.is_none()
|
||||
&& self.rules.is_none()
|
||||
&& self.enforce_residency.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -211,6 +225,7 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
} = toml;
|
||||
|
||||
let approval_policy: Constrained<AskForApproval> = match allowed_approval_policies {
|
||||
@@ -298,11 +313,33 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
|
||||
None => None,
|
||||
};
|
||||
|
||||
let enforce_residency: Constrained<Option<ResidencyRequirement>> = match enforce_residency {
|
||||
Some(Sourced {
|
||||
value: residency,
|
||||
source: requirement_source,
|
||||
}) => {
|
||||
let required = Some(residency);
|
||||
Constrained::new(required, move |candidate| {
|
||||
if candidate == &required {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError::InvalidValue {
|
||||
field_name: "enforce_residency",
|
||||
candidate: format!("{candidate:?}"),
|
||||
allowed: format!("{required:?}"),
|
||||
requirement_source: requirement_source.clone(),
|
||||
})
|
||||
}
|
||||
})?
|
||||
}
|
||||
None => Constrained::allow_any(None),
|
||||
};
|
||||
Ok(ConfigRequirements {
|
||||
approval_policy,
|
||||
sandbox_policy,
|
||||
mcp_servers,
|
||||
exec_policy,
|
||||
enforce_residency,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -329,6 +366,7 @@ mod tests {
|
||||
allowed_sandbox_modes,
|
||||
mcp_servers,
|
||||
rules,
|
||||
enforce_residency,
|
||||
} = toml;
|
||||
ConfigRequirementsWithSources {
|
||||
allowed_approval_policies: allowed_approval_policies
|
||||
@@ -337,6 +375,8 @@ mod tests {
|
||||
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
mcp_servers: mcp_servers.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
rules: rules.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
enforce_residency: enforce_residency
|
||||
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -350,6 +390,8 @@ mod tests {
|
||||
SandboxModeRequirement::WorkspaceWrite,
|
||||
SandboxModeRequirement::DangerFullAccess,
|
||||
];
|
||||
let enforce_residency = ResidencyRequirement::Us;
|
||||
let enforce_source = source.clone();
|
||||
|
||||
// Intentionally constructed without `..Default::default()` so adding a new field to
|
||||
// `ConfigRequirementsToml` forces this test to be updated.
|
||||
@@ -358,6 +400,7 @@ mod tests {
|
||||
allowed_sandbox_modes: Some(allowed_sandbox_modes.clone()),
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: Some(enforce_residency),
|
||||
};
|
||||
|
||||
target.merge_unset_fields(source.clone(), other);
|
||||
@@ -372,6 +415,7 @@ mod tests {
|
||||
allowed_sandbox_modes: Some(Sourced::new(allowed_sandbox_modes, source)),
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: Some(Sourced::new(enforce_residency, enforce_source)),
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -401,6 +445,7 @@ mod tests {
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
@@ -438,6 +483,7 @@ mod tests {
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
|
||||
@@ -25,6 +25,7 @@ use codex_protocol::config_types::TrustLevel;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use codex_utils_absolute_path::AbsolutePathBufGuard;
|
||||
use dunce::canonicalize as normalize_path;
|
||||
use serde::Deserialize;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
@@ -36,6 +37,7 @@ pub use config_requirements::ConfigRequirementsToml;
|
||||
pub use config_requirements::McpServerIdentity;
|
||||
pub use config_requirements::McpServerRequirement;
|
||||
pub use config_requirements::RequirementSource;
|
||||
pub use config_requirements::ResidencyRequirement;
|
||||
pub use config_requirements::SandboxModeRequirement;
|
||||
pub use config_requirements::Sourced;
|
||||
pub use diagnostics::ConfigError;
|
||||
@@ -100,7 +102,7 @@ pub async fn load_config_layers_state(
|
||||
cwd: Option<AbsolutePathBuf>,
|
||||
cli_overrides: &[(String, TomlValue)],
|
||||
overrides: LoaderOverrides,
|
||||
cloud_requirements: Option<CloudRequirementsLoader>, // TODO(gt): Once exec and app-server are wired up, we can remove the option.
|
||||
cloud_requirements: CloudRequirementsLoader,
|
||||
) -> io::Result<ConfigLayerStack> {
|
||||
let mut config_requirements_toml = ConfigRequirementsWithSources::default();
|
||||
|
||||
@@ -113,9 +115,7 @@ pub async fn load_config_layers_state(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(loader) = cloud_requirements
|
||||
&& let Some(requirements) = loader.get().await
|
||||
{
|
||||
if let Some(requirements) = cloud_requirements.get().await {
|
||||
config_requirements_toml
|
||||
.merge_unset_fields(RequirementSource::CloudRequirements, requirements);
|
||||
}
|
||||
@@ -236,6 +236,7 @@ pub async fn load_config_layers_state(
|
||||
&cwd,
|
||||
&project_trust_context.project_root,
|
||||
&project_trust_context,
|
||||
codex_home,
|
||||
)
|
||||
.await?;
|
||||
layers.extend(project_layers);
|
||||
@@ -424,7 +425,9 @@ async fn load_requirements_from_legacy_scheme(
|
||||
/// empty array, which indicates that root detection should be disabled).
|
||||
/// - Returns an error if `project_root_markers` is specified but is not an
|
||||
/// array of strings.
|
||||
fn project_root_markers_from_config(config: &TomlValue) -> io::Result<Option<Vec<String>>> {
|
||||
pub(crate) fn project_root_markers_from_config(
|
||||
config: &TomlValue,
|
||||
) -> io::Result<Option<Vec<String>>> {
|
||||
let Some(table) = config.as_table() else {
|
||||
return Ok(None);
|
||||
};
|
||||
@@ -453,7 +456,7 @@ fn project_root_markers_from_config(config: &TomlValue) -> io::Result<Option<Vec
|
||||
Ok(Some(markers))
|
||||
}
|
||||
|
||||
fn default_project_root_markers() -> Vec<String> {
|
||||
pub(crate) fn default_project_root_markers() -> Vec<String> {
|
||||
DEFAULT_PROJECT_ROOT_MARKERS
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
@@ -671,7 +674,11 @@ async fn load_project_layers(
|
||||
cwd: &AbsolutePathBuf,
|
||||
project_root: &AbsolutePathBuf,
|
||||
trust_context: &ProjectTrustContext,
|
||||
codex_home: &Path,
|
||||
) -> io::Result<Vec<ConfigLayerEntry>> {
|
||||
let codex_home_abs = AbsolutePathBuf::from_absolute_path(codex_home)?;
|
||||
let codex_home_normalized =
|
||||
normalize_path(codex_home_abs.as_path()).unwrap_or_else(|_| codex_home_abs.to_path_buf());
|
||||
let mut dirs = cwd
|
||||
.as_path()
|
||||
.ancestors()
|
||||
@@ -702,6 +709,11 @@ async fn load_project_layers(
|
||||
let layer_dir = AbsolutePathBuf::from_absolute_path(dir)?;
|
||||
let decision = trust_context.decision_for_dir(&layer_dir);
|
||||
let dot_codex_abs = AbsolutePathBuf::from_absolute_path(&dot_codex)?;
|
||||
let dot_codex_normalized =
|
||||
normalize_path(dot_codex_abs.as_path()).unwrap_or_else(|_| dot_codex_abs.to_path_buf());
|
||||
if dot_codex_abs == codex_home_abs || dot_codex_normalized == codex_home_normalized {
|
||||
continue;
|
||||
}
|
||||
let config_file = dot_codex_abs.join(CONFIG_TOML_FILE)?;
|
||||
match tokio::fs::read_to_string(&config_file).await {
|
||||
Ok(contents) => {
|
||||
|
||||
@@ -69,7 +69,7 @@ async fn returns_config_error_for_invalid_user_config_toml() {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect_err("expected error");
|
||||
@@ -99,7 +99,7 @@ async fn returns_config_error_for_invalid_managed_config_toml() {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect_err("expected error");
|
||||
@@ -188,7 +188,7 @@ extra = true
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect("load config");
|
||||
@@ -225,7 +225,7 @@ async fn returns_empty_when_all_layers_missing() {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect("load layers");
|
||||
@@ -323,7 +323,7 @@ flag = false
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
overrides,
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
.expect("load config");
|
||||
@@ -363,7 +363,7 @@ allowed_sandbox_modes = ["read-only"]
|
||||
),
|
||||
),
|
||||
},
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -424,7 +424,7 @@ allowed_approval_policies = ["never"]
|
||||
),
|
||||
),
|
||||
},
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -451,6 +451,7 @@ async fn load_requirements_toml_produces_expected_constraints() -> anyhow::Resul
|
||||
&requirements_file,
|
||||
r#"
|
||||
allowed_approval_policies = ["never", "on-request"]
|
||||
enforce_residency = "us"
|
||||
"#,
|
||||
)
|
||||
.await?;
|
||||
@@ -465,7 +466,6 @@ allowed_approval_policies = ["never", "on-request"]
|
||||
.cloned(),
|
||||
Some(vec![AskForApproval::Never, AskForApproval::OnRequest])
|
||||
);
|
||||
|
||||
let config_requirements: ConfigRequirements = config_requirements_toml.try_into()?;
|
||||
assert_eq!(
|
||||
config_requirements.approval_policy.value(),
|
||||
@@ -480,6 +480,10 @@ allowed_approval_policies = ["never", "on-request"]
|
||||
.can_set(&AskForApproval::OnFailure)
|
||||
.is_err()
|
||||
);
|
||||
assert_eq!(
|
||||
config_requirements.enforce_residency.value(),
|
||||
Some(crate::config_loader::ResidencyRequirement::Us)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -503,6 +507,7 @@ allowed_approval_policies = ["on-request"]
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
},
|
||||
);
|
||||
load_requirements_toml(&mut config_requirements_toml, &requirements_file).await?;
|
||||
@@ -537,6 +542,7 @@ async fn load_config_layers_includes_cloud_requirements() -> anyhow::Result<()>
|
||||
allowed_sandbox_modes: None,
|
||||
mcp_servers: None,
|
||||
rules: None,
|
||||
enforce_residency: None,
|
||||
};
|
||||
let expected = requirements.clone();
|
||||
let cloud_requirements = CloudRequirementsLoader::new(async move { Some(requirements) });
|
||||
@@ -546,7 +552,7 @@ async fn load_config_layers_includes_cloud_requirements() -> anyhow::Result<()>
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
Some(cloud_requirements),
|
||||
cloud_requirements,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -599,7 +605,7 @@ async fn project_layers_prefer_closest_cwd() -> std::io::Result<()> {
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -731,7 +737,7 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -755,6 +761,103 @@ async fn project_layer_is_added_when_dot_codex_exists_without_config_toml() -> s
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn codex_home_is_not_loaded_as_project_layer_from_home_dir() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let home_dir = tmp.path().join("home");
|
||||
let codex_home = home_dir.join(".codex");
|
||||
tokio::fs::create_dir_all(&codex_home).await?;
|
||||
tokio::fs::write(codex_home.join(CONFIG_TOML_FILE), "foo = \"user\"\n").await?;
|
||||
|
||||
let cwd = AbsolutePathBuf::from_absolute_path(&home_dir)?;
|
||||
let layers = load_config_layers_state(
|
||||
&codex_home,
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_layers: Vec<_> = layers
|
||||
.get_layers(
|
||||
super::ConfigLayerStackOrdering::HighestPrecedenceFirst,
|
||||
true,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|layer| matches!(layer.name, super::ConfigLayerSource::Project { .. }))
|
||||
.collect();
|
||||
let expected: Vec<&ConfigLayerEntry> = Vec::new();
|
||||
assert_eq!(expected, project_layers);
|
||||
assert_eq!(
|
||||
layers.effective_config().get("foo"),
|
||||
Some(&TomlValue::String("user".to_string()))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn codex_home_within_project_tree_is_not_double_loaded() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
let project_root = tmp.path().join("project");
|
||||
let nested = project_root.join("child");
|
||||
let project_dot_codex = project_root.join(".codex");
|
||||
let nested_dot_codex = nested.join(".codex");
|
||||
|
||||
tokio::fs::create_dir_all(&nested_dot_codex).await?;
|
||||
tokio::fs::create_dir_all(project_root.join(".git")).await?;
|
||||
tokio::fs::write(nested_dot_codex.join(CONFIG_TOML_FILE), "foo = \"child\"\n").await?;
|
||||
|
||||
tokio::fs::create_dir_all(&project_dot_codex).await?;
|
||||
make_config_for_test(&project_dot_codex, &project_root, TrustLevel::Trusted, None).await?;
|
||||
let user_config_path = project_dot_codex.join(CONFIG_TOML_FILE);
|
||||
let user_config_contents = tokio::fs::read_to_string(&user_config_path).await?;
|
||||
tokio::fs::write(
|
||||
&user_config_path,
|
||||
format!("foo = \"user\"\n{user_config_contents}"),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let cwd = AbsolutePathBuf::from_absolute_path(&nested)?;
|
||||
let layers = load_config_layers_state(
|
||||
&project_dot_codex,
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_layers: Vec<_> = layers
|
||||
.get_layers(
|
||||
super::ConfigLayerStackOrdering::HighestPrecedenceFirst,
|
||||
true,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(|layer| matches!(layer.name, super::ConfigLayerSource::Project { .. }))
|
||||
.collect();
|
||||
|
||||
let child_config: TomlValue = toml::from_str("foo = \"child\"\n").expect("parse child config");
|
||||
assert_eq!(
|
||||
vec![&ConfigLayerEntry {
|
||||
name: super::ConfigLayerSource::Project {
|
||||
dot_codex_folder: AbsolutePathBuf::from_absolute_path(&nested_dot_codex)?,
|
||||
},
|
||||
config: child_config.clone(),
|
||||
version: version_for_toml(&child_config),
|
||||
disabled_reason: None,
|
||||
}],
|
||||
project_layers
|
||||
);
|
||||
assert_eq!(
|
||||
layers.effective_config().get("foo"),
|
||||
Some(&TomlValue::String("child".to_string()))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<()> {
|
||||
let tmp = tempdir()?;
|
||||
@@ -791,7 +894,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
|
||||
Some(cwd.clone()),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers_untrusted: Vec<_> = layers_untrusted
|
||||
@@ -829,7 +932,7 @@ async fn project_layers_disabled_when_untrusted_or_unknown() -> std::io::Result<
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers_unknown: Vec<_> = layers_unknown
|
||||
@@ -890,7 +993,7 @@ async fn invalid_project_config_ignored_when_untrusted_or_unknown() -> std::io::
|
||||
Some(cwd.clone()),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
let project_layers: Vec<_> = layers
|
||||
@@ -946,7 +1049,7 @@ async fn cli_overrides_with_relative_paths_do_not_break_trust_check() -> std::io
|
||||
Some(cwd),
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -988,7 +1091,7 @@ async fn project_root_markers_supports_alternate_markers() -> std::io::Result<()
|
||||
Some(cwd),
|
||||
&[] as &[(String, TomlValue)],
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ impl ContextManager {
|
||||
let model_info = turn_context.client.get_model_info();
|
||||
let personality = turn_context
|
||||
.personality
|
||||
.or(turn_context.client.config().model_personality);
|
||||
.or(turn_context.client.config().personality);
|
||||
let base_instructions = model_info.get_model_instructions(personality);
|
||||
let base_tokens = i64::try_from(approx_token_count(&base_instructions)).unwrap_or(i64::MAX);
|
||||
|
||||
@@ -266,7 +266,7 @@ impl ContextManager {
|
||||
}
|
||||
|
||||
fn process_item(&self, item: &ResponseItem, policy: TruncationPolicy) -> ResponseItem {
|
||||
let policy_with_serialization_budget = policy.mul(1.2);
|
||||
let policy_with_serialization_budget = policy * 1.2;
|
||||
match item {
|
||||
ResponseItem::FunctionCallOutput { call_id, output } => {
|
||||
let truncated =
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::config_loader::ResidencyRequirement;
|
||||
use crate::spawn::CODEX_SANDBOX_ENV_VAR;
|
||||
use codex_client::CodexHttpClient;
|
||||
pub use codex_client::CodexRequestBuilder;
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::header::HeaderValue;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex;
|
||||
@@ -24,6 +26,7 @@ use std::sync::RwLock;
|
||||
pub static USER_AGENT_SUFFIX: LazyLock<Mutex<Option<String>>> = LazyLock::new(|| Mutex::new(None));
|
||||
pub const DEFAULT_ORIGINATOR: &str = "codex_cli_rs";
|
||||
pub const CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR: &str = "CODEX_INTERNAL_ORIGINATOR_OVERRIDE";
|
||||
pub const RESIDENCY_HEADER_NAME: &str = "x-openai-internal-codex-residency";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Originator {
|
||||
@@ -31,6 +34,8 @@ pub struct Originator {
|
||||
pub header_value: HeaderValue,
|
||||
}
|
||||
static ORIGINATOR: LazyLock<RwLock<Option<Originator>>> = LazyLock::new(|| RwLock::new(None));
|
||||
static REQUIREMENTS_RESIDENCY: LazyLock<RwLock<Option<ResidencyRequirement>>> =
|
||||
LazyLock::new(|| RwLock::new(None));
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SetOriginatorError {
|
||||
@@ -74,6 +79,14 @@ pub fn set_default_originator(value: String) -> Result<(), SetOriginatorError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_default_client_residency_requirement(enforce_residency: Option<ResidencyRequirement>) {
|
||||
let Ok(mut guard) = REQUIREMENTS_RESIDENCY.write() else {
|
||||
tracing::warn!("Failed to acquire requirements residency lock");
|
||||
return;
|
||||
};
|
||||
*guard = enforce_residency;
|
||||
}
|
||||
|
||||
pub fn originator() -> Originator {
|
||||
if let Ok(guard) = ORIGINATOR.read()
|
||||
&& let Some(originator) = guard.as_ref()
|
||||
@@ -166,10 +179,17 @@ pub fn create_client() -> CodexHttpClient {
|
||||
}
|
||||
|
||||
pub fn build_reqwest_client() -> reqwest::Client {
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert("originator", originator().header_value);
|
||||
if let Ok(guard) = REQUIREMENTS_RESIDENCY.read()
|
||||
&& let Some(requirement) = guard.as_ref()
|
||||
&& !headers.contains_key(RESIDENCY_HEADER_NAME)
|
||||
{
|
||||
let value = match requirement {
|
||||
ResidencyRequirement::Us => HeaderValue::from_static("us"),
|
||||
};
|
||||
headers.insert(RESIDENCY_HEADER_NAME, value);
|
||||
}
|
||||
let ua = get_codex_user_agent();
|
||||
|
||||
let mut builder = reqwest::Client::builder()
|
||||
@@ -214,6 +234,8 @@ mod tests {
|
||||
async fn test_create_client_sets_default_headers() {
|
||||
skip_if_no_network!();
|
||||
|
||||
set_default_client_residency_requirement(Some(ResidencyRequirement::Us));
|
||||
|
||||
use wiremock::Mock;
|
||||
use wiremock::MockServer;
|
||||
use wiremock::ResponseTemplate;
|
||||
@@ -256,6 +278,13 @@ mod tests {
|
||||
.get("user-agent")
|
||||
.expect("user-agent header missing");
|
||||
assert_eq!(ua_header.to_str().unwrap(), expected_ua);
|
||||
|
||||
let residency_header = headers
|
||||
.get(RESIDENCY_HEADER_NAME)
|
||||
.expect("residency header missing");
|
||||
assert_eq!(residency_header.to_str().unwrap(), "us");
|
||||
|
||||
set_default_client_residency_requirement(None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -28,6 +28,7 @@ impl EnvironmentContext {
|
||||
cwd,
|
||||
// should compare all fields except shell
|
||||
shell: _,
|
||||
..
|
||||
} = other;
|
||||
|
||||
self.cwd == *cwd
|
||||
@@ -66,6 +67,7 @@ impl EnvironmentContext {
|
||||
|
||||
let shell_name = self.shell.name();
|
||||
lines.push(format!(" <shell>{shell_name}</shell>"));
|
||||
|
||||
lines.push(ENVIRONMENT_CONTEXT_CLOSE_TAG.to_string());
|
||||
lines.join("\n")
|
||||
}
|
||||
|
||||
@@ -126,7 +126,7 @@ pub enum CodexErr {
|
||||
QuotaExceeded,
|
||||
|
||||
#[error(
|
||||
"To use Codex with your ChatGPT plan, upgrade to Plus: https://openai.com/chatgpt/pricing."
|
||||
"To use Codex with your ChatGPT plan, upgrade to Plus: https://chatgpt.com/explore/plus."
|
||||
)]
|
||||
UsageNotIncluded,
|
||||
|
||||
@@ -360,13 +360,22 @@ pub struct UsageLimitReachedError {
|
||||
pub(crate) plan_type: Option<PlanType>,
|
||||
pub(crate) resets_at: Option<DateTime<Utc>>,
|
||||
pub(crate) rate_limits: Option<RateLimitSnapshot>,
|
||||
pub(crate) promo_message: Option<String>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UsageLimitReachedError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(promo_message) = &self.promo_message {
|
||||
return write!(
|
||||
f,
|
||||
"You've hit your usage limit. {promo_message},{}",
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
);
|
||||
}
|
||||
|
||||
let message = match self.plan_type.as_ref() {
|
||||
Some(PlanType::Known(KnownPlan::Plus)) => format!(
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit https://chatgpt.com/codex/settings/usage to purchase more credits{}",
|
||||
"You've hit your usage limit. Upgrade to Pro (https://chatgpt.com/explore/pro), visit https://chatgpt.com/codex/settings/usage to purchase more credits{}",
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
),
|
||||
Some(PlanType::Known(KnownPlan::Team)) | Some(PlanType::Known(KnownPlan::Business)) => {
|
||||
@@ -377,7 +386,7 @@ impl std::fmt::Display for UsageLimitReachedError {
|
||||
}
|
||||
Some(PlanType::Known(KnownPlan::Free)) | Some(PlanType::Known(KnownPlan::Go)) => {
|
||||
format!(
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing),{}",
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://chatgpt.com/explore/plus),{}",
|
||||
retry_suffix_after_or(self.resets_at.as_ref())
|
||||
)
|
||||
}
|
||||
@@ -670,10 +679,11 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Plus)),
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again later."
|
||||
"You've hit your usage limit. Upgrade to Pro (https://chatgpt.com/explore/pro), visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again later."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -816,10 +826,11 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Free)),
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing), or try again later."
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://chatgpt.com/explore/plus), or try again later."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -829,10 +840,11 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Go)),
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://openai.com/chatgpt/pricing), or try again later."
|
||||
"You've hit your usage limit. Upgrade to Plus to continue using Codex (https://chatgpt.com/explore/plus), or try again later."
|
||||
);
|
||||
}
|
||||
|
||||
@@ -842,6 +854,7 @@ mod tests {
|
||||
plan_type: None,
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
@@ -859,6 +872,7 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Team)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. To get more access now, send a request to your admin or try again at {expected_time}."
|
||||
@@ -873,6 +887,7 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Business)),
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
@@ -886,6 +901,7 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Enterprise)),
|
||||
resets_at: None,
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
@@ -903,6 +919,7 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Pro)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. Visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
@@ -921,6 +938,7 @@ mod tests {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
let expected = format!("You've hit your usage limit. Try again at {expected_time}.");
|
||||
assert_eq!(err.to_string(), expected);
|
||||
@@ -972,9 +990,10 @@ mod tests {
|
||||
plan_type: Some(PlanType::Known(KnownPlan::Plus)),
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. Upgrade to Pro (https://openai.com/chatgpt/pricing), visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
"You've hit your usage limit. Upgrade to Pro (https://chatgpt.com/explore/pro), visit https://chatgpt.com/codex/settings/usage to purchase more credits or try again at {expected_time}."
|
||||
);
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
@@ -991,6 +1010,7 @@ mod tests {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
let expected = format!("You've hit your usage limit. Try again at {expected_time}.");
|
||||
assert_eq!(err.to_string(), expected);
|
||||
@@ -1007,9 +1027,31 @@ mod tests {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: None,
|
||||
};
|
||||
let expected = format!("You've hit your usage limit. Try again at {expected_time}.");
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn usage_limit_reached_with_promo_message() {
|
||||
let base = Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap();
|
||||
let resets_at = base + ChronoDuration::seconds(30);
|
||||
with_now_override(base, move || {
|
||||
let expected_time = format_retry_timestamp(&resets_at);
|
||||
let err = UsageLimitReachedError {
|
||||
plan_type: None,
|
||||
resets_at: Some(resets_at),
|
||||
rate_limits: Some(rate_limit_snapshot()),
|
||||
promo_message: Some(
|
||||
"To continue using Codex, start a free trial of <PLAN> today".to_string(),
|
||||
),
|
||||
};
|
||||
let expected = format!(
|
||||
"You've hit your usage limit. To continue using Codex, start a free trial of <PLAN> today, or try again at {expected_time}."
|
||||
);
|
||||
assert_eq!(err.to_string(), expected);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -426,6 +426,7 @@ mod tests {
|
||||
status: Some("completed".to_string()),
|
||||
action: Some(WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
queries: None,
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -439,6 +440,7 @@ mod tests {
|
||||
query: "weather".to_string(),
|
||||
action: WebSearchAction::Search {
|
||||
query: Some("weather".to_string()),
|
||||
queries: None,
|
||||
},
|
||||
}
|
||||
),
|
||||
|
||||
@@ -101,6 +101,8 @@ pub enum Feature {
|
||||
RemoteModels,
|
||||
/// Experimental shell snapshotting.
|
||||
ShellSnapshot,
|
||||
/// Enable runtime metrics snapshots via a manual reader.
|
||||
RuntimeMetrics,
|
||||
/// Persist rollout metadata to a local SQLite database.
|
||||
Sqlite,
|
||||
/// Append additional AGENTS.md guidance to user instructions.
|
||||
@@ -437,6 +439,12 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
},
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::RuntimeMetrics,
|
||||
key: "runtime_metrics",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Sqlite,
|
||||
key: "sqlite",
|
||||
@@ -464,12 +472,8 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::RequestRule,
|
||||
key: "request_rule",
|
||||
stage: Stage::Experimental {
|
||||
name: "Smart approvals",
|
||||
menu_description: "Get smarter \"Don't ask again\" rule requests.",
|
||||
announcement: "NEW: Try Smart approvals to get smarter \"Don't ask again\" requests. Enable in /experimental!",
|
||||
},
|
||||
default_enabled: false,
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::WindowsSandbox,
|
||||
@@ -499,11 +503,7 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
id: Feature::PowershellUtf8,
|
||||
key: "powershell_utf8",
|
||||
#[cfg(windows)]
|
||||
stage: Stage::Experimental {
|
||||
name: "Powershell UTF-8 support",
|
||||
menu_description: "Enable UTF-8 output in Powershell.",
|
||||
announcement: "Codex now supports UTF-8 output in Powershell. If you are seeing problems, disable in /experimental.",
|
||||
},
|
||||
stage: Stage::Stable,
|
||||
#[cfg(windows)]
|
||||
default_enabled: true,
|
||||
#[cfg(not(windows))]
|
||||
@@ -558,18 +558,14 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
FeatureSpec {
|
||||
id: Feature::CollaborationModes,
|
||||
key: "collaboration_modes",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Personality,
|
||||
key: "personality",
|
||||
stage: Stage::Experimental {
|
||||
name: "Personality",
|
||||
menu_description: "Choose a communication style for Codex.",
|
||||
announcement: "NEW: Pick a personality for Codex. Enable in /experimental!",
|
||||
},
|
||||
default_enabled: false,
|
||||
stage: Stage::Stable,
|
||||
default_enabled: true,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::ResponsesWebsockets,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
@@ -109,6 +110,92 @@ pub async fn collect_git_info(cwd: &Path) -> Option<GitInfo> {
|
||||
Some(git_info)
|
||||
}
|
||||
|
||||
/// Collect fetch remotes in a multi-root-friendly format: {"origin": "https://..."}.
|
||||
pub async fn get_git_remote_urls(cwd: &Path) -> Option<BTreeMap<String, String>> {
|
||||
let is_git_repo = run_git_command_with_timeout(&["rev-parse", "--git-dir"], cwd)
|
||||
.await?
|
||||
.status
|
||||
.success();
|
||||
if !is_git_repo {
|
||||
return None;
|
||||
}
|
||||
|
||||
get_git_remote_urls_assume_git_repo(cwd).await
|
||||
}
|
||||
|
||||
/// Collect fetch remotes without checking whether `cwd` is in a git repo.
|
||||
pub async fn get_git_remote_urls_assume_git_repo(cwd: &Path) -> Option<BTreeMap<String, String>> {
|
||||
get_git_remote_urls_assume_git_repo_with_timeout(cwd, GIT_COMMAND_TIMEOUT).await
|
||||
}
|
||||
|
||||
/// Collect fetch remotes without checking whether `cwd` is in a git repo,
|
||||
/// using the provided timeout.
|
||||
pub async fn get_git_remote_urls_assume_git_repo_with_timeout(
|
||||
cwd: &Path,
|
||||
timeout_dur: TokioDuration,
|
||||
) -> Option<BTreeMap<String, String>> {
|
||||
let output = run_git_command_with_timeout_override(&["remote", "-v"], cwd, timeout_dur).await?;
|
||||
if !output.status.success() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout).ok()?;
|
||||
parse_git_remote_urls(stdout.as_str())
|
||||
}
|
||||
|
||||
/// Return the current HEAD commit hash without checking whether `cwd` is in a git repo.
|
||||
pub async fn get_head_commit_hash(cwd: &Path) -> Option<String> {
|
||||
get_head_commit_hash_with_timeout(cwd, GIT_COMMAND_TIMEOUT).await
|
||||
}
|
||||
|
||||
/// Return the current HEAD commit hash without checking whether `cwd` is in a
|
||||
/// git repo, using the provided timeout.
|
||||
pub async fn get_head_commit_hash_with_timeout(
|
||||
cwd: &Path,
|
||||
timeout_dur: TokioDuration,
|
||||
) -> Option<String> {
|
||||
let output =
|
||||
run_git_command_with_timeout_override(&["rev-parse", "HEAD"], cwd, timeout_dur).await?;
|
||||
if !output.status.success() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout).ok()?;
|
||||
let hash = stdout.trim();
|
||||
if hash.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(hash.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_git_remote_urls(stdout: &str) -> Option<BTreeMap<String, String>> {
|
||||
let mut remotes = BTreeMap::new();
|
||||
for line in stdout.lines() {
|
||||
let Some(fetch_line) = line.strip_suffix(" (fetch)") else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let Some((name, url_part)) = fetch_line
|
||||
.split_once('\t')
|
||||
.or_else(|| fetch_line.split_once(' '))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let url = url_part.trim_start();
|
||||
if !url.is_empty() {
|
||||
remotes.insert(name.to_string(), url.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if remotes.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(remotes)
|
||||
}
|
||||
}
|
||||
|
||||
/// A minimal commit summary entry used for pickers (subject + timestamp + sha).
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct CommitLogEntry {
|
||||
@@ -185,11 +272,19 @@ pub async fn git_diff_to_remote(cwd: &Path) -> Option<GitDiffToRemote> {
|
||||
|
||||
/// Run a git command with a timeout to prevent blocking on large repositories
|
||||
async fn run_git_command_with_timeout(args: &[&str], cwd: &Path) -> Option<std::process::Output> {
|
||||
let result = timeout(
|
||||
GIT_COMMAND_TIMEOUT,
|
||||
Command::new("git").args(args).current_dir(cwd).output(),
|
||||
)
|
||||
.await;
|
||||
run_git_command_with_timeout_override(args, cwd, GIT_COMMAND_TIMEOUT).await
|
||||
}
|
||||
|
||||
/// Run a git command with a caller-provided timeout.
|
||||
async fn run_git_command_with_timeout_override(
|
||||
args: &[&str],
|
||||
cwd: &Path,
|
||||
timeout_dur: TokioDuration,
|
||||
) -> Option<std::process::Output> {
|
||||
let mut command = Command::new("git");
|
||||
command.args(args).current_dir(cwd);
|
||||
command.kill_on_drop(true);
|
||||
let result = timeout(timeout_dur, command.output()).await;
|
||||
|
||||
match result {
|
||||
Ok(Ok(output)) => Some(output),
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
// the TUI or the tracing stack).
|
||||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||||
|
||||
mod analytics_client;
|
||||
pub mod api_bridge;
|
||||
mod apply_patch;
|
||||
pub mod auth;
|
||||
@@ -48,6 +49,7 @@ mod message_history;
|
||||
mod model_provider_info;
|
||||
pub mod parse_command;
|
||||
pub mod path_utils;
|
||||
pub mod personality_migration;
|
||||
pub mod powershell;
|
||||
mod proposed_plan_parser;
|
||||
pub mod sandboxing;
|
||||
@@ -99,6 +101,7 @@ pub mod state_db;
|
||||
pub mod terminal;
|
||||
mod tools;
|
||||
pub mod turn_diff_tracker;
|
||||
mod turn_metadata;
|
||||
pub use rollout::ARCHIVED_SESSIONS_SUBDIR;
|
||||
pub use rollout::INTERACTIVE_SESSION_SOURCES;
|
||||
pub use rollout::RolloutRecorder;
|
||||
@@ -128,6 +131,7 @@ pub mod util;
|
||||
|
||||
pub use apply_patch::CODEX_APPLY_PATCH_ARG1;
|
||||
pub use client::WEB_SEARCH_ELIGIBLE_HEADER;
|
||||
pub use client::X_CODEX_TURN_METADATA_HEADER;
|
||||
pub use command_safety::is_dangerous_command;
|
||||
pub use command_safety::is_safe_command;
|
||||
pub use exec_policy::ExecPolicyError;
|
||||
|
||||
@@ -9,6 +9,7 @@ use crate::auth::AuthMode;
|
||||
use crate::error::EnvVarError;
|
||||
use codex_api::Provider as ApiProvider;
|
||||
use codex_api::WireApi as ApiWireApi;
|
||||
use codex_api::is_azure_responses_wire_base_url;
|
||||
use codex_api::provider::RetryConfig as ApiRetryConfig;
|
||||
use http::HeaderMap;
|
||||
use http::header::HeaderName;
|
||||
@@ -170,6 +171,15 @@ impl ModelProviderInfo {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn is_azure_responses_endpoint(&self) -> bool {
|
||||
let wire = match self.wire_api {
|
||||
WireApi::Responses => ApiWireApi::Responses,
|
||||
WireApi::Chat => ApiWireApi::Chat,
|
||||
};
|
||||
|
||||
is_azure_responses_wire_base_url(wire, &self.name, self.base_url.as_deref())
|
||||
}
|
||||
|
||||
/// If `env_key` is Some, returns the API key for this provider if present
|
||||
/// (and non-empty) in the environment. If `env_key` is required but
|
||||
/// cannot be found, returns an error.
|
||||
@@ -432,87 +442,4 @@ env_http_headers = { "X-Example-Env-Header" = "EXAMPLE_ENV_VAR" }
|
||||
let provider: ModelProviderInfo = toml::from_str(azure_provider_toml).unwrap();
|
||||
assert_eq!(expected_provider, provider);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn detects_azure_responses_base_urls() {
|
||||
let positive_cases = [
|
||||
"https://foo.openai.azure.com/openai",
|
||||
"https://foo.openai.azure.us/openai/deployments/bar",
|
||||
"https://foo.cognitiveservices.azure.cn/openai",
|
||||
"https://foo.aoai.azure.com/openai",
|
||||
"https://foo.openai.azure-api.net/openai",
|
||||
"https://foo.z01.azurefd.net/",
|
||||
];
|
||||
for base_url in positive_cases {
|
||||
let provider = ModelProviderInfo {
|
||||
name: "test".into(),
|
||||
base_url: Some(base_url.into()),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
api.is_azure_responses_endpoint(),
|
||||
"expected {base_url} to be detected as Azure"
|
||||
);
|
||||
}
|
||||
|
||||
let named_provider = ModelProviderInfo {
|
||||
name: "Azure".into(),
|
||||
base_url: Some("https://example.com".into()),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let named_api = named_provider.to_api_provider(None).expect("api provider");
|
||||
assert!(named_api.is_azure_responses_endpoint());
|
||||
|
||||
let negative_cases = [
|
||||
"https://api.openai.com/v1",
|
||||
"https://example.com/openai",
|
||||
"https://myproxy.azurewebsites.net/openai",
|
||||
];
|
||||
for base_url in negative_cases {
|
||||
let provider = ModelProviderInfo {
|
||||
name: "test".into(),
|
||||
base_url: Some(base_url.into()),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: None,
|
||||
stream_max_retries: None,
|
||||
stream_idle_timeout_ms: None,
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
let api = provider.to_api_provider(None).expect("api provider");
|
||||
assert!(
|
||||
!api.is_azure_responses_endpoint(),
|
||||
"expected {base_url} not to be detected as Azure"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::config::Config;
|
||||
use crate::config::types::OtelExporterKind as Kind;
|
||||
use crate::config::types::OtelHttpProtocol as Protocol;
|
||||
use crate::default_client::originator;
|
||||
use crate::features::Feature;
|
||||
use codex_otel::config::OtelExporter;
|
||||
use codex_otel::config::OtelHttpProtocol;
|
||||
use codex_otel::config::OtelSettings;
|
||||
@@ -77,6 +78,7 @@ pub fn build_provider(
|
||||
|
||||
let originator = originator();
|
||||
let service_name = service_name_override.unwrap_or(originator.value.as_str());
|
||||
let runtime_metrics = config.features.enabled(Feature::RuntimeMetrics);
|
||||
|
||||
OtelProvider::from(&OtelSettings {
|
||||
service_name: service_name.to_string(),
|
||||
@@ -86,6 +88,7 @@ pub fn build_provider(
|
||||
exporter,
|
||||
trace_exporter,
|
||||
metrics_exporter,
|
||||
runtime_metrics,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
265
codex-rs/core/src/personality_migration.rs
Normal file
265
codex-rs/core/src/personality_migration.rs
Normal file
@@ -0,0 +1,265 @@
|
||||
use crate::config::ConfigToml;
|
||||
use crate::config::edit::ConfigEditsBuilder;
|
||||
use crate::rollout::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use crate::rollout::SESSIONS_SUBDIR;
|
||||
use crate::rollout::list::ThreadListConfig;
|
||||
use crate::rollout::list::ThreadListLayout;
|
||||
use crate::rollout::list::ThreadSortKey;
|
||||
use crate::rollout::list::get_threads_in_root;
|
||||
use crate::state_db;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use tokio::fs::OpenOptions;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
pub const PERSONALITY_MIGRATION_FILENAME: &str = ".personality_migration";
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum PersonalityMigrationStatus {
|
||||
SkippedMarker,
|
||||
SkippedExplicitPersonality,
|
||||
SkippedNoSessions,
|
||||
Applied,
|
||||
}
|
||||
|
||||
pub async fn maybe_migrate_personality(
|
||||
codex_home: &Path,
|
||||
config_toml: &ConfigToml,
|
||||
) -> io::Result<PersonalityMigrationStatus> {
|
||||
let marker_path = codex_home.join(PERSONALITY_MIGRATION_FILENAME);
|
||||
if tokio::fs::try_exists(&marker_path).await? {
|
||||
return Ok(PersonalityMigrationStatus::SkippedMarker);
|
||||
}
|
||||
|
||||
let config_profile = config_toml
|
||||
.get_config_profile(None)
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||
if config_toml.personality.is_some() || config_profile.personality.is_some() {
|
||||
create_marker(&marker_path).await?;
|
||||
return Ok(PersonalityMigrationStatus::SkippedExplicitPersonality);
|
||||
}
|
||||
|
||||
let model_provider_id = config_profile
|
||||
.model_provider
|
||||
.or_else(|| config_toml.model_provider.clone())
|
||||
.unwrap_or_else(|| "openai".to_string());
|
||||
|
||||
if !has_recorded_sessions(codex_home, model_provider_id.as_str()).await? {
|
||||
create_marker(&marker_path).await?;
|
||||
return Ok(PersonalityMigrationStatus::SkippedNoSessions);
|
||||
}
|
||||
|
||||
ConfigEditsBuilder::new(codex_home)
|
||||
.set_personality(Some(Personality::Pragmatic))
|
||||
.apply()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
io::Error::other(format!("failed to persist personality migration: {err}"))
|
||||
})?;
|
||||
|
||||
create_marker(&marker_path).await?;
|
||||
Ok(PersonalityMigrationStatus::Applied)
|
||||
}
|
||||
|
||||
async fn has_recorded_sessions(codex_home: &Path, default_provider: &str) -> io::Result<bool> {
|
||||
let allowed_sources: &[SessionSource] = &[];
|
||||
|
||||
if let Some(state_db_ctx) = state_db::open_if_present(codex_home, default_provider).await
|
||||
&& let Some(ids) = state_db::list_thread_ids_db(
|
||||
Some(state_db_ctx.as_ref()),
|
||||
codex_home,
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
allowed_sources,
|
||||
None,
|
||||
false,
|
||||
"personality_migration",
|
||||
)
|
||||
.await
|
||||
&& !ids.is_empty()
|
||||
{
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let sessions = get_threads_in_root(
|
||||
codex_home.join(SESSIONS_SUBDIR),
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
ThreadListConfig {
|
||||
allowed_sources,
|
||||
model_providers: None,
|
||||
default_provider,
|
||||
layout: ThreadListLayout::NestedByDate,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
if !sessions.items.is_empty() {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let archived_sessions = get_threads_in_root(
|
||||
codex_home.join(ARCHIVED_SESSIONS_SUBDIR),
|
||||
1,
|
||||
None,
|
||||
ThreadSortKey::CreatedAt,
|
||||
ThreadListConfig {
|
||||
allowed_sources,
|
||||
model_providers: None,
|
||||
default_provider,
|
||||
layout: ThreadListLayout::Flat,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
Ok(!archived_sessions.items.is_empty())
|
||||
}
|
||||
|
||||
async fn create_marker(marker_path: &Path) -> io::Result<()> {
|
||||
match OpenOptions::new()
|
||||
.create_new(true)
|
||||
.write(true)
|
||||
.open(marker_path)
|
||||
.await
|
||||
{
|
||||
Ok(mut file) => file.write_all(b"v1\n").await,
|
||||
Err(err) if err.kind() == io::ErrorKind::AlreadyExists => Ok(()),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
const TEST_TIMESTAMP: &str = "2025-01-01T00-00-00";
|
||||
|
||||
async fn read_config_toml(codex_home: &Path) -> io::Result<ConfigToml> {
|
||||
let contents = tokio::fs::read_to_string(codex_home.join("config.toml")).await?;
|
||||
toml::from_str(&contents).map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))
|
||||
}
|
||||
|
||||
async fn write_session_with_user_event(codex_home: &Path) -> io::Result<()> {
|
||||
let thread_id = ThreadId::new();
|
||||
let dir = codex_home
|
||||
.join(SESSIONS_SUBDIR)
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("01");
|
||||
tokio::fs::create_dir_all(&dir).await?;
|
||||
let file_path = dir.join(format!("rollout-{TEST_TIMESTAMP}-{thread_id}.jsonl"));
|
||||
let mut file = tokio::fs::File::create(&file_path).await?;
|
||||
|
||||
let session_meta = SessionMetaLine {
|
||||
meta: SessionMeta {
|
||||
id: thread_id,
|
||||
forked_from_id: None,
|
||||
timestamp: TEST_TIMESTAMP.to_string(),
|
||||
cwd: std::path::PathBuf::from("."),
|
||||
originator: "test_originator".to_string(),
|
||||
cli_version: "test_version".to_string(),
|
||||
source: SessionSource::Cli,
|
||||
model_provider: None,
|
||||
base_instructions: None,
|
||||
dynamic_tools: None,
|
||||
},
|
||||
git: None,
|
||||
};
|
||||
let meta_line = RolloutLine {
|
||||
timestamp: TEST_TIMESTAMP.to_string(),
|
||||
item: RolloutItem::SessionMeta(session_meta),
|
||||
};
|
||||
let user_event = RolloutLine {
|
||||
timestamp: TEST_TIMESTAMP.to_string(),
|
||||
item: RolloutItem::EventMsg(EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "hello".to_string(),
|
||||
images: None,
|
||||
local_images: Vec::new(),
|
||||
text_elements: Vec::new(),
|
||||
})),
|
||||
};
|
||||
|
||||
file.write_all(format!("{}\n", serde_json::to_string(&meta_line)?).as_bytes())
|
||||
.await?;
|
||||
file.write_all(format!("{}\n", serde_json::to_string(&user_event)?).as_bytes())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn applies_when_sessions_exist_and_no_personality() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
write_session_with_user_event(temp.path()).await?;
|
||||
|
||||
let config_toml = ConfigToml::default();
|
||||
let status = maybe_migrate_personality(temp.path(), &config_toml).await?;
|
||||
|
||||
assert_eq!(status, PersonalityMigrationStatus::Applied);
|
||||
assert!(temp.path().join(PERSONALITY_MIGRATION_FILENAME).exists());
|
||||
|
||||
let persisted = read_config_toml(temp.path()).await?;
|
||||
assert_eq!(persisted.personality, Some(Personality::Pragmatic));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn skips_when_marker_exists() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
create_marker(&temp.path().join(PERSONALITY_MIGRATION_FILENAME)).await?;
|
||||
|
||||
let config_toml = ConfigToml::default();
|
||||
let status = maybe_migrate_personality(temp.path(), &config_toml).await?;
|
||||
|
||||
assert_eq!(status, PersonalityMigrationStatus::SkippedMarker);
|
||||
assert!(!temp.path().join("config.toml").exists());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn skips_when_personality_explicit() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
ConfigEditsBuilder::new(temp.path())
|
||||
.set_personality(Some(Personality::Friendly))
|
||||
.apply()
|
||||
.await
|
||||
.map_err(|err| io::Error::other(format!("failed to write config: {err}")))?;
|
||||
|
||||
let config_toml = read_config_toml(temp.path()).await?;
|
||||
let status = maybe_migrate_personality(temp.path(), &config_toml).await?;
|
||||
|
||||
assert_eq!(
|
||||
status,
|
||||
PersonalityMigrationStatus::SkippedExplicitPersonality
|
||||
);
|
||||
assert!(temp.path().join(PERSONALITY_MIGRATION_FILENAME).exists());
|
||||
|
||||
let persisted = read_config_toml(temp.path()).await?;
|
||||
assert_eq!(persisted.personality, Some(Personality::Friendly));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn skips_when_no_sessions() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
let config_toml = ConfigToml::default();
|
||||
let status = maybe_migrate_personality(temp.path(), &config_toml).await?;
|
||||
|
||||
assert_eq!(status, PersonalityMigrationStatus::SkippedNoSessions);
|
||||
assert!(temp.path().join(PERSONALITY_MIGRATION_FILENAME).exists());
|
||||
assert!(!temp.path().join("config.toml").exists());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -516,7 +516,7 @@ mod tests {
|
||||
)
|
||||
.unwrap_or_else(|_| cfg.codex_home.join("skills/pdf-processing/SKILL.md"));
|
||||
let expected_path_str = expected_path.to_string_lossy().replace('\\', "/");
|
||||
let usage_rules = "- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 4) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
|
||||
let usage_rules = "- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) When `SKILL.md` references relative paths (e.g., `scripts/foo.py`), resolve them relative to the skill directory listed above first, and only consider other paths if needed.\n 3) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 4) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 5) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
|
||||
let expected = format!(
|
||||
"base doc\n\n## Skills\nA skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.\n### Available skills\n- pdf-processing: extract from pdfs (file: {expected_path_str})\n### How to use skills\n{usage_rules}"
|
||||
);
|
||||
@@ -540,7 +540,7 @@ mod tests {
|
||||
dunce::canonicalize(cfg.codex_home.join("skills/linting/SKILL.md").as_path())
|
||||
.unwrap_or_else(|_| cfg.codex_home.join("skills/linting/SKILL.md"));
|
||||
let expected_path_str = expected_path.to_string_lossy().replace('\\', "/");
|
||||
let usage_rules = "- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 4) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
|
||||
let usage_rules = "- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.\n- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.\n- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.\n- How to use a skill (progressive disclosure):\n 1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.\n 2) When `SKILL.md` references relative paths (e.g., `scripts/foo.py`), resolve them relative to the skill directory listed above first, and only consider other paths if needed.\n 3) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.\n 4) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.\n 5) If `assets/` or templates exist, reuse them instead of recreating from scratch.\n- Coordination and sequencing:\n - If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.\n - Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.\n- Context hygiene:\n - Keep context small: summarize long sections instead of pasting them; only load extra files when needed.\n - Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.\n - When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.\n- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue.";
|
||||
let expected = format!(
|
||||
"## Skills\nA skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.\n### Available skills\n- linting: run clippy (file: {expected_path_str})\n### How to use skills\n{usage_rules}"
|
||||
);
|
||||
|
||||
@@ -6,8 +6,6 @@ use std::num::NonZero;
|
||||
use std::ops::ControlFlow;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use time::OffsetDateTime;
|
||||
use time::PrimitiveDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
@@ -1075,30 +1073,17 @@ async fn find_thread_path_by_id_str_in_subdir(
|
||||
// This is safe because we know the values are valid.
|
||||
#[allow(clippy::unwrap_used)]
|
||||
let limit = NonZero::new(1).unwrap();
|
||||
// This is safe because we know the values are valid.
|
||||
#[allow(clippy::unwrap_used)]
|
||||
let threads = NonZero::new(2).unwrap();
|
||||
let cancel = Arc::new(AtomicBool::new(false));
|
||||
let exclude: Vec<String> = Vec::new();
|
||||
let compute_indices = false;
|
||||
|
||||
let results = file_search::run(
|
||||
id_str,
|
||||
let options = file_search::FileSearchOptions {
|
||||
limit,
|
||||
&root,
|
||||
exclude,
|
||||
threads,
|
||||
cancel,
|
||||
compute_indices,
|
||||
false,
|
||||
)
|
||||
.map_err(|e| io::Error::other(format!("file search failed: {e}")))?;
|
||||
compute_indices: false,
|
||||
respect_gitignore: false,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let found = results
|
||||
.matches
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|m| root.join(m.path));
|
||||
let results = file_search::run(id_str, vec![root], options, None)
|
||||
.map_err(|e| io::Error::other(format!("file search failed: {e}")))?;
|
||||
|
||||
let found = results.matches.into_iter().next().map(|m| m.full_path());
|
||||
|
||||
// Checking if DB is at parity.
|
||||
// TODO(jif): sqlite migration phase 1
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::io::ErrorKind;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Stdio;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::time::SystemTime;
|
||||
@@ -187,6 +188,7 @@ async fn run_script_with_timeout(
|
||||
// returns a ref of handler.
|
||||
let mut handler = Command::new(&args[0]);
|
||||
handler.args(&args[1..]);
|
||||
handler.stdin(Stdio::null());
|
||||
#[cfg(unix)]
|
||||
unsafe {
|
||||
handler.pre_exec(|| {
|
||||
@@ -477,6 +479,62 @@ mod tests {
|
||||
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[cfg(unix)]
|
||||
struct BlockingStdinPipe {
|
||||
original: i32,
|
||||
write_end: i32,
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
impl BlockingStdinPipe {
|
||||
fn install() -> Result<Self> {
|
||||
let mut fds = [0i32; 2];
|
||||
if unsafe { libc::pipe(fds.as_mut_ptr()) } == -1 {
|
||||
return Err(std::io::Error::last_os_error()).context("create stdin pipe");
|
||||
}
|
||||
|
||||
let original = unsafe { libc::dup(libc::STDIN_FILENO) };
|
||||
if original == -1 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
unsafe {
|
||||
libc::close(fds[0]);
|
||||
libc::close(fds[1]);
|
||||
}
|
||||
return Err(err).context("dup stdin");
|
||||
}
|
||||
|
||||
if unsafe { libc::dup2(fds[0], libc::STDIN_FILENO) } == -1 {
|
||||
let err = std::io::Error::last_os_error();
|
||||
unsafe {
|
||||
libc::close(fds[0]);
|
||||
libc::close(fds[1]);
|
||||
libc::close(original);
|
||||
}
|
||||
return Err(err).context("replace stdin");
|
||||
}
|
||||
|
||||
unsafe {
|
||||
libc::close(fds[0]);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
original,
|
||||
write_end: fds[1],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
impl Drop for BlockingStdinPipe {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
libc::dup2(self.original, libc::STDIN_FILENO);
|
||||
libc::close(self.original);
|
||||
libc::close(self.write_end);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn assert_posix_snapshot_sections(snapshot: &str) {
|
||||
assert!(snapshot.contains("# Snapshot file"));
|
||||
@@ -557,6 +615,38 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[tokio::test]
|
||||
async fn snapshot_shell_does_not_inherit_stdin() -> Result<()> {
|
||||
let _stdin_guard = BlockingStdinPipe::install()?;
|
||||
|
||||
let dir = tempdir()?;
|
||||
let home = dir.path();
|
||||
fs::write(home.join(".bashrc"), "read -r ignored\n").await?;
|
||||
|
||||
let shell = Shell {
|
||||
shell_type: ShellType::Bash,
|
||||
shell_path: PathBuf::from("/bin/bash"),
|
||||
shell_snapshot: crate::shell::empty_shell_snapshot_receiver(),
|
||||
};
|
||||
|
||||
let home_display = home.display();
|
||||
let script = format!(
|
||||
"HOME=\"{home_display}\"; export HOME; {}",
|
||||
bash_snapshot_script()
|
||||
);
|
||||
let output = run_script_with_timeout(&shell, &script, Duration::from_millis(500), true)
|
||||
.await
|
||||
.context("run snapshot command")?;
|
||||
|
||||
assert!(
|
||||
output.contains("# Snapshot file"),
|
||||
"expected snapshot marker in output; output={output:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[tokio::test]
|
||||
async fn timed_out_snapshot_shell_is_terminated() -> Result<()> {
|
||||
|
||||
@@ -11,7 +11,7 @@ This skill provides guidance for creating effective skills.
|
||||
|
||||
## About Skills
|
||||
|
||||
Skills are modular, self-contained packages that extend Codex's capabilities by providing
|
||||
Skills are modular, self-contained folders that extend Codex's capabilities by providing
|
||||
specialized knowledge, workflows, and tools. Think of them as "onboarding guides" for specific
|
||||
domains or tasks—they transform Codex from a general-purpose agent into a specialized agent
|
||||
equipped with procedural knowledge that no model can fully possess.
|
||||
@@ -56,6 +56,8 @@ skill-name/
|
||||
│ │ ├── name: (required)
|
||||
│ │ └── description: (required)
|
||||
│ └── Markdown instructions (required)
|
||||
├── agents/ (recommended)
|
||||
│ └── openai.yaml - UI metadata for skill lists and chips
|
||||
└── Bundled Resources (optional)
|
||||
├── scripts/ - Executable code (Python/Bash/etc.)
|
||||
├── references/ - Documentation intended to be loaded into context as needed
|
||||
@@ -69,6 +71,16 @@ Every SKILL.md consists of:
|
||||
- **Frontmatter** (YAML): Contains `name` and `description` fields. These are the only fields that Codex reads to determine when the skill gets used, thus it is very important to be clear and comprehensive in describing what the skill is, and when it should be used.
|
||||
- **Body** (Markdown): Instructions and guidance for using the skill. Only loaded AFTER the skill triggers (if at all).
|
||||
|
||||
#### Agents metadata (recommended)
|
||||
|
||||
- UI-facing metadata for skill lists and chips
|
||||
- Read references/openai_yaml.md before generating values and follow its descriptions and constraints
|
||||
- Create: human-facing `display_name`, `short_description`, and `default_prompt` by reading the skill
|
||||
- Generate deterministically by passing the values as `--interface key=value` to `scripts/generate_openai_yaml.py` or `scripts/init_skill.py`
|
||||
- On updates: validate `agents/openai.yaml` still matches SKILL.md; regenerate if stale
|
||||
- Only include other optional interface fields (icons, brand color) if explicitly provided
|
||||
- See references/openai_yaml.md for field definitions and examples
|
||||
|
||||
#### Bundled Resources (optional)
|
||||
|
||||
##### Scripts (`scripts/`)
|
||||
@@ -208,7 +220,7 @@ Skill creation involves these steps:
|
||||
2. Plan reusable skill contents (scripts, references, assets)
|
||||
3. Initialize the skill (run init_skill.py)
|
||||
4. Edit the skill (implement resources and write SKILL.md)
|
||||
5. Package the skill (run package_skill.py)
|
||||
5. Validate the skill (run quick_validate.py)
|
||||
6. Iterate based on real usage
|
||||
|
||||
Follow these steps in order, skipping only if there is a clear reason why they are not applicable.
|
||||
@@ -266,7 +278,7 @@ To establish the skill's contents, analyze each concrete example to create a lis
|
||||
|
||||
At this point, it is time to actually create the skill.
|
||||
|
||||
Skip this step only if the skill being developed already exists, and iteration or packaging is needed. In this case, continue to the next step.
|
||||
Skip this step only if the skill being developed already exists. In this case, continue to the next step.
|
||||
|
||||
When creating a new skill from scratch, always run the `init_skill.py` script. The script conveniently generates a new template skill directory that automatically includes everything a skill requires, making the skill creation process much more efficient and reliable.
|
||||
|
||||
@@ -288,11 +300,20 @@ The script:
|
||||
|
||||
- Creates the skill directory at the specified path
|
||||
- Generates a SKILL.md template with proper frontmatter and TODO placeholders
|
||||
- Creates `agents/openai.yaml` using agent-generated `display_name`, `short_description`, and `default_prompt` passed via `--interface key=value`
|
||||
- Optionally creates resource directories based on `--resources`
|
||||
- Optionally adds example files when `--examples` is set
|
||||
|
||||
After initialization, customize the SKILL.md and add resources as needed. If you used `--examples`, replace or delete placeholder files.
|
||||
|
||||
Generate `display_name`, `short_description`, and `default_prompt` by reading the skill, then pass them as `--interface key=value` to `init_skill.py` or regenerate with:
|
||||
|
||||
```bash
|
||||
scripts/generate_openai_yaml.py <path/to/skill-folder> --interface key=value
|
||||
```
|
||||
|
||||
Only include other optional interface fields when the user explicitly provides them. For full field descriptions and examples, see references/openai_yaml.md.
|
||||
|
||||
### Step 4: Edit the Skill
|
||||
|
||||
When editing the (newly-generated or existing) skill, remember that the skill is being created for another instance of Codex to use. Include information that would be beneficial and non-obvious to Codex. Consider what procedural knowledge, domain-specific details, or reusable assets would help another Codex instance execute these tasks more effectively.
|
||||
@@ -328,40 +349,21 @@ Write the YAML frontmatter with `name` and `description`:
|
||||
- Include all "when to use" information here - Not in the body. The body is only loaded after triggering, so "When to Use This Skill" sections in the body are not helpful to Codex.
|
||||
- Example description for a `docx` skill: "Comprehensive document creation, editing, and analysis with support for tracked changes, comments, formatting preservation, and text extraction. Use when Codex needs to work with professional documents (.docx files) for: (1) Creating new documents, (2) Modifying or editing content, (3) Working with tracked changes, (4) Adding comments, or any other document tasks"
|
||||
|
||||
Ensure the frontmatter is valid YAML. Keep `name` and `description` as single-line scalars. If either could be interpreted as YAML syntax, wrap it in quotes.
|
||||
|
||||
Do not include any other fields in YAML frontmatter.
|
||||
|
||||
##### Body
|
||||
|
||||
Write instructions for using the skill and its bundled resources.
|
||||
|
||||
### Step 5: Packaging a Skill
|
||||
### Step 5: Validate the Skill
|
||||
|
||||
Once development of the skill is complete, it must be packaged into a distributable .skill file that gets shared with the user. The packaging process automatically validates the skill first to ensure it meets all requirements:
|
||||
Once development of the skill is complete, validate the skill folder to catch basic issues early:
|
||||
|
||||
```bash
|
||||
scripts/package_skill.py <path/to/skill-folder>
|
||||
scripts/quick_validate.py <path/to/skill-folder>
|
||||
```
|
||||
|
||||
Optional output directory specification:
|
||||
|
||||
```bash
|
||||
scripts/package_skill.py <path/to/skill-folder> ./dist
|
||||
```
|
||||
|
||||
The packaging script will:
|
||||
|
||||
1. **Validate** the skill automatically, checking:
|
||||
|
||||
- YAML frontmatter format and required fields
|
||||
- Skill naming conventions and directory structure
|
||||
- Description completeness and quality
|
||||
- File organization and resource references
|
||||
|
||||
2. **Package** the skill if validation passes, creating a .skill file named after the skill (e.g., `my-skill.skill`) that includes all files and maintains the proper directory structure for distribution. The .skill file is a zip file with a .skill extension.
|
||||
|
||||
If validation fails, the script will report the errors and exit without creating a package. Fix any validation errors and run the packaging command again.
|
||||
The validation script checks YAML frontmatter format, required fields, and naming rules. If validation fails, fix the reported issues and run the command again.
|
||||
|
||||
### Step 6: Iterate
|
||||
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
interface:
|
||||
display_name: "Skill Creator"
|
||||
short_description: "Create or update a skill"
|
||||
icon_small: "./assets/skill-creator-small.svg"
|
||||
icon_large: "./assets/skill-creator.png"
|
||||
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill="#0D0D0D" d="M12.03 4.113a3.612 3.612 0 0 1 5.108 5.108l-6.292 6.29c-.324.324-.56.561-.791.752l-.235.176c-.205.14-.422.261-.65.36l-.229.093a4.136 4.136 0 0 1-.586.16l-.764.134-2.394.4c-.142.024-.294.05-.423.06-.098.007-.232.01-.378-.026l-.149-.05a1.081 1.081 0 0 1-.521-.474l-.046-.093a1.104 1.104 0 0 1-.075-.527c.01-.129.035-.28.06-.422l.398-2.394c.1-.602.162-.987.295-1.35l.093-.23c.1-.228.22-.445.36-.65l.176-.235c.19-.232.428-.467.751-.79l6.292-6.292Zm-5.35 7.232c-.35.35-.534.535-.66.688l-.11.147a2.67 2.67 0 0 0-.24.433l-.062.154c-.08.22-.124.462-.232 1.112l-.398 2.394-.001.001h.003l2.393-.399.717-.126a2.63 2.63 0 0 0 .394-.105l.154-.063a2.65 2.65 0 0 0 .433-.24l.147-.11c.153-.126.339-.31.688-.66l4.988-4.988-3.227-3.226-4.987 4.988Zm9.517-6.291a2.281 2.281 0 0 0-3.225 0l-.364.362 3.226 3.227.363-.364c.89-.89.89-2.334 0-3.225ZM4.583 1.783a.3.3 0 0 1 .294.241c.117.585.347 1.092.707 1.48.357.385.859.668 1.549.783a.3.3 0 0 1 0 .592c-.69.115-1.192.398-1.549.783-.315.34-.53.77-.657 1.265l-.05.215a.3.3 0 0 1-.588 0c-.117-.585-.347-1.092-.707-1.48-.357-.384-.859-.668-1.549-.783a.3.3 0 0 1 0-.592c.69-.115 1.192-.398 1.549-.783.36-.388.59-.895.707-1.48l.015-.05a.3.3 0 0 1 .279-.19Z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 1.5 KiB |
@@ -0,0 +1,43 @@
|
||||
# openai.yaml fields (full example + descriptions)
|
||||
|
||||
`agents/openai.yaml` is an extended, product-specific config intended for the machine/harness to read, not the agent. Other product-specific config can also live in the `agents/` folder.
|
||||
|
||||
## Full example
|
||||
|
||||
```yaml
|
||||
interface:
|
||||
display_name: "Optional user-facing name"
|
||||
short_description: "Optional user-facing description"
|
||||
icon_small: "./assets/small-400px.png"
|
||||
icon_large: "./assets/large-logo.svg"
|
||||
brand_color: "#3B82F6"
|
||||
default_prompt: "Optional surrounding prompt to use the skill with"
|
||||
|
||||
dependencies:
|
||||
tools:
|
||||
- type: "mcp"
|
||||
value: "github"
|
||||
description: "GitHub MCP server"
|
||||
transport: "streamable_http"
|
||||
url: "https://api.githubcopilot.com/mcp/"
|
||||
```
|
||||
|
||||
## Field descriptions and constraints
|
||||
|
||||
Top-level constraints:
|
||||
|
||||
- Quote all string values.
|
||||
- Keep keys unquoted.
|
||||
- For `interface.default_prompt`: generate a helpful, short (typically 1 sentence) example starting prompt based on the skill. It must explicitly mention the skill as `$skill-name` (e.g., "Use $skill-name-here to draft a concise weekly status update.").
|
||||
|
||||
- `interface.display_name`: Human-facing title shown in UI skill lists and chips.
|
||||
- `interface.short_description`: Human-facing short UI blurb (25–64 chars) for quick scanning.
|
||||
- `interface.icon_small`: Path to a small icon asset (relative to skill dir). Default to `./assets/` and place icons in the skill's `assets/` folder.
|
||||
- `interface.icon_large`: Path to a larger logo asset (relative to skill dir). Default to `./assets/` and place icons in the skill's `assets/` folder.
|
||||
- `interface.brand_color`: Hex color used for UI accents (e.g., badges).
|
||||
- `interface.default_prompt`: Default prompt snippet inserted when invoking the skill.
|
||||
- `dependencies.tools[].type`: Dependency category. Only `mcp` is supported for now.
|
||||
- `dependencies.tools[].value`: Identifier of the tool or dependency.
|
||||
- `dependencies.tools[].description`: Human-readable explanation of the dependency.
|
||||
- `dependencies.tools[].transport`: Connection type when `type` is `mcp`.
|
||||
- `dependencies.tools[].url`: MCP server URL when `type` is `mcp`.
|
||||
@@ -0,0 +1,225 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
OpenAI YAML Generator - Creates agents/openai.yaml for a skill folder.
|
||||
|
||||
Usage:
|
||||
generate_openai_yaml.py <skill_dir> [--name <skill_name>] [--interface key=value]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
ACRONYMS = {
|
||||
"GH",
|
||||
"MCP",
|
||||
"API",
|
||||
"CI",
|
||||
"CLI",
|
||||
"LLM",
|
||||
"PDF",
|
||||
"PR",
|
||||
"UI",
|
||||
"URL",
|
||||
"SQL",
|
||||
}
|
||||
|
||||
BRANDS = {
|
||||
"openai": "OpenAI",
|
||||
"openapi": "OpenAPI",
|
||||
"github": "GitHub",
|
||||
"pagerduty": "PagerDuty",
|
||||
"datadog": "DataDog",
|
||||
"sqlite": "SQLite",
|
||||
"fastapi": "FastAPI",
|
||||
}
|
||||
|
||||
SMALL_WORDS = {"and", "or", "to", "up", "with"}
|
||||
|
||||
ALLOWED_INTERFACE_KEYS = {
|
||||
"display_name",
|
||||
"short_description",
|
||||
"icon_small",
|
||||
"icon_large",
|
||||
"brand_color",
|
||||
"default_prompt",
|
||||
}
|
||||
|
||||
|
||||
def yaml_quote(value):
|
||||
escaped = value.replace("\\", "\\\\").replace('"', '\\"').replace("\n", "\\n")
|
||||
return f'"{escaped}"'
|
||||
|
||||
|
||||
def format_display_name(skill_name):
|
||||
words = [word for word in skill_name.split("-") if word]
|
||||
formatted = []
|
||||
for index, word in enumerate(words):
|
||||
lower = word.lower()
|
||||
upper = word.upper()
|
||||
if upper in ACRONYMS:
|
||||
formatted.append(upper)
|
||||
continue
|
||||
if lower in BRANDS:
|
||||
formatted.append(BRANDS[lower])
|
||||
continue
|
||||
if index > 0 and lower in SMALL_WORDS:
|
||||
formatted.append(lower)
|
||||
continue
|
||||
formatted.append(word.capitalize())
|
||||
return " ".join(formatted)
|
||||
|
||||
|
||||
def generate_short_description(display_name):
|
||||
description = f"Help with {display_name} tasks"
|
||||
|
||||
if len(description) < 25:
|
||||
description = f"Help with {display_name} tasks and workflows"
|
||||
if len(description) < 25:
|
||||
description = f"Help with {display_name} tasks with guidance"
|
||||
|
||||
if len(description) > 64:
|
||||
description = f"Help with {display_name}"
|
||||
if len(description) > 64:
|
||||
description = f"{display_name} helper"
|
||||
if len(description) > 64:
|
||||
description = f"{display_name} tools"
|
||||
if len(description) > 64:
|
||||
suffix = " helper"
|
||||
max_name_length = 64 - len(suffix)
|
||||
trimmed = display_name[:max_name_length].rstrip()
|
||||
description = f"{trimmed}{suffix}"
|
||||
if len(description) > 64:
|
||||
description = description[:64].rstrip()
|
||||
|
||||
if len(description) < 25:
|
||||
description = f"{description} workflows"
|
||||
if len(description) > 64:
|
||||
description = description[:64].rstrip()
|
||||
|
||||
return description
|
||||
|
||||
|
||||
def read_frontmatter_name(skill_dir):
|
||||
skill_md = Path(skill_dir) / "SKILL.md"
|
||||
if not skill_md.exists():
|
||||
print(f"[ERROR] SKILL.md not found in {skill_dir}")
|
||||
return None
|
||||
content = skill_md.read_text()
|
||||
match = re.match(r"^---\n(.*?)\n---", content, re.DOTALL)
|
||||
if not match:
|
||||
print("[ERROR] Invalid SKILL.md frontmatter format.")
|
||||
return None
|
||||
frontmatter_text = match.group(1)
|
||||
try:
|
||||
frontmatter = yaml.safe_load(frontmatter_text)
|
||||
except yaml.YAMLError as exc:
|
||||
print(f"[ERROR] Invalid YAML frontmatter: {exc}")
|
||||
return None
|
||||
if not isinstance(frontmatter, dict):
|
||||
print("[ERROR] Frontmatter must be a YAML dictionary.")
|
||||
return None
|
||||
name = frontmatter.get("name", "")
|
||||
if not isinstance(name, str) or not name.strip():
|
||||
print("[ERROR] Frontmatter 'name' is missing or invalid.")
|
||||
return None
|
||||
return name.strip()
|
||||
|
||||
|
||||
def parse_interface_overrides(raw_overrides):
|
||||
overrides = {}
|
||||
optional_order = []
|
||||
for item in raw_overrides:
|
||||
if "=" not in item:
|
||||
print(f"[ERROR] Invalid interface override '{item}'. Use key=value.")
|
||||
return None, None
|
||||
key, value = item.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
if not key:
|
||||
print(f"[ERROR] Invalid interface override '{item}'. Key is empty.")
|
||||
return None, None
|
||||
if key not in ALLOWED_INTERFACE_KEYS:
|
||||
allowed = ", ".join(sorted(ALLOWED_INTERFACE_KEYS))
|
||||
print(f"[ERROR] Unknown interface field '{key}'. Allowed: {allowed}")
|
||||
return None, None
|
||||
overrides[key] = value
|
||||
if key not in ("display_name", "short_description") and key not in optional_order:
|
||||
optional_order.append(key)
|
||||
return overrides, optional_order
|
||||
|
||||
|
||||
def write_openai_yaml(skill_dir, skill_name, raw_overrides):
|
||||
overrides, optional_order = parse_interface_overrides(raw_overrides)
|
||||
if overrides is None:
|
||||
return None
|
||||
|
||||
display_name = overrides.get("display_name") or format_display_name(skill_name)
|
||||
short_description = overrides.get("short_description") or generate_short_description(display_name)
|
||||
|
||||
if not (25 <= len(short_description) <= 64):
|
||||
print(
|
||||
"[ERROR] short_description must be 25-64 characters "
|
||||
f"(got {len(short_description)})."
|
||||
)
|
||||
return None
|
||||
|
||||
interface_lines = [
|
||||
"interface:",
|
||||
f" display_name: {yaml_quote(display_name)}",
|
||||
f" short_description: {yaml_quote(short_description)}",
|
||||
]
|
||||
|
||||
for key in optional_order:
|
||||
value = overrides.get(key)
|
||||
if value is not None:
|
||||
interface_lines.append(f" {key}: {yaml_quote(value)}")
|
||||
|
||||
agents_dir = Path(skill_dir) / "agents"
|
||||
agents_dir.mkdir(parents=True, exist_ok=True)
|
||||
output_path = agents_dir / "openai.yaml"
|
||||
output_path.write_text("\n".join(interface_lines) + "\n")
|
||||
print(f"[OK] Created agents/openai.yaml")
|
||||
return output_path
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Create agents/openai.yaml for a skill directory.",
|
||||
)
|
||||
parser.add_argument("skill_dir", help="Path to the skill directory")
|
||||
parser.add_argument(
|
||||
"--name",
|
||||
help="Skill name override (defaults to SKILL.md frontmatter)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--interface",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Interface override in key=value format (repeatable)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
skill_dir = Path(args.skill_dir).resolve()
|
||||
if not skill_dir.exists():
|
||||
print(f"[ERROR] Skill directory not found: {skill_dir}")
|
||||
sys.exit(1)
|
||||
if not skill_dir.is_dir():
|
||||
print(f"[ERROR] Path is not a directory: {skill_dir}")
|
||||
sys.exit(1)
|
||||
|
||||
skill_name = args.name or read_frontmatter_name(skill_dir)
|
||||
if not skill_name:
|
||||
sys.exit(1)
|
||||
|
||||
result = write_openai_yaml(skill_dir, skill_name, args.interface)
|
||||
if result:
|
||||
sys.exit(0)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -3,13 +3,14 @@
|
||||
Skill Initializer - Creates a new skill from template
|
||||
|
||||
Usage:
|
||||
init_skill.py <skill-name> --path <path> [--resources scripts,references,assets] [--examples]
|
||||
init_skill.py <skill-name> --path <path> [--resources scripts,references,assets] [--examples] [--interface key=value]
|
||||
|
||||
Examples:
|
||||
init_skill.py my-new-skill --path skills/public
|
||||
init_skill.py my-new-skill --path skills/public --resources scripts,references
|
||||
init_skill.py my-api-helper --path skills/private --resources scripts --examples
|
||||
init_skill.py custom-skill --path /custom/location
|
||||
init_skill.py my-skill --path skills/public --interface short_description="Short UI label"
|
||||
"""
|
||||
|
||||
import argparse
|
||||
@@ -17,6 +18,8 @@ import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from generate_openai_yaml import write_openai_yaml
|
||||
|
||||
MAX_SKILL_NAME_LENGTH = 64
|
||||
ALLOWED_RESOURCES = {"scripts", "references", "assets"}
|
||||
|
||||
@@ -252,7 +255,7 @@ def create_resource_dirs(skill_dir, skill_name, skill_title, resources, include_
|
||||
print("[OK] Created assets/")
|
||||
|
||||
|
||||
def init_skill(skill_name, path, resources, include_examples):
|
||||
def init_skill(skill_name, path, resources, include_examples, interface_overrides):
|
||||
"""
|
||||
Initialize a new skill directory with template SKILL.md.
|
||||
|
||||
@@ -293,6 +296,15 @@ def init_skill(skill_name, path, resources, include_examples):
|
||||
print(f"[ERROR] Error creating SKILL.md: {e}")
|
||||
return None
|
||||
|
||||
# Create agents/openai.yaml
|
||||
try:
|
||||
result = write_openai_yaml(skill_dir, skill_name, interface_overrides)
|
||||
if not result:
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error creating agents/openai.yaml: {e}")
|
||||
return None
|
||||
|
||||
# Create resource directories if requested
|
||||
if resources:
|
||||
try:
|
||||
@@ -312,7 +324,8 @@ def init_skill(skill_name, path, resources, include_examples):
|
||||
print("2. Add resources to scripts/, references/, and assets/ as needed")
|
||||
else:
|
||||
print("2. Create resource directories only if needed (scripts/, references/, assets/)")
|
||||
print("3. Run the validator when ready to check the skill structure")
|
||||
print("3. Update agents/openai.yaml if the UI metadata should differ")
|
||||
print("4. Run the validator when ready to check the skill structure")
|
||||
|
||||
return skill_dir
|
||||
|
||||
@@ -333,6 +346,12 @@ def main():
|
||||
action="store_true",
|
||||
help="Create example files inside the selected resource directories",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--interface",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Interface override in key=value format (repeatable)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
raw_skill_name = args.skill_name
|
||||
@@ -366,7 +385,7 @@ def main():
|
||||
print(" Resources: none (create as needed)")
|
||||
print()
|
||||
|
||||
result = init_skill(skill_name, path, resources, args.examples)
|
||||
result = init_skill(skill_name, path, resources, args.examples, args.interface)
|
||||
|
||||
if result:
|
||||
sys.exit(0)
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Skill Packager - Creates a distributable .skill file of a skill folder
|
||||
|
||||
Usage:
|
||||
python utils/package_skill.py <path/to/skill-folder> [output-directory]
|
||||
|
||||
Example:
|
||||
python utils/package_skill.py skills/public/my-skill
|
||||
python utils/package_skill.py skills/public/my-skill ./dist
|
||||
"""
|
||||
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
from quick_validate import validate_skill
|
||||
|
||||
|
||||
def package_skill(skill_path, output_dir=None):
|
||||
"""
|
||||
Package a skill folder into a .skill file.
|
||||
|
||||
Args:
|
||||
skill_path: Path to the skill folder
|
||||
output_dir: Optional output directory for the .skill file (defaults to current directory)
|
||||
|
||||
Returns:
|
||||
Path to the created .skill file, or None if error
|
||||
"""
|
||||
skill_path = Path(skill_path).resolve()
|
||||
|
||||
# Validate skill folder exists
|
||||
if not skill_path.exists():
|
||||
print(f"[ERROR] Skill folder not found: {skill_path}")
|
||||
return None
|
||||
|
||||
if not skill_path.is_dir():
|
||||
print(f"[ERROR] Path is not a directory: {skill_path}")
|
||||
return None
|
||||
|
||||
# Validate SKILL.md exists
|
||||
skill_md = skill_path / "SKILL.md"
|
||||
if not skill_md.exists():
|
||||
print(f"[ERROR] SKILL.md not found in {skill_path}")
|
||||
return None
|
||||
|
||||
# Run validation before packaging
|
||||
print("Validating skill...")
|
||||
valid, message = validate_skill(skill_path)
|
||||
if not valid:
|
||||
print(f"[ERROR] Validation failed: {message}")
|
||||
print(" Please fix the validation errors before packaging.")
|
||||
return None
|
||||
print(f"[OK] {message}\n")
|
||||
|
||||
# Determine output location
|
||||
skill_name = skill_path.name
|
||||
if output_dir:
|
||||
output_path = Path(output_dir).resolve()
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
else:
|
||||
output_path = Path.cwd()
|
||||
|
||||
skill_filename = output_path / f"{skill_name}.skill"
|
||||
|
||||
# Create the .skill file (zip format)
|
||||
try:
|
||||
with zipfile.ZipFile(skill_filename, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
# Walk through the skill directory
|
||||
for file_path in skill_path.rglob("*"):
|
||||
if file_path.is_file():
|
||||
# Calculate the relative path within the zip
|
||||
arcname = file_path.relative_to(skill_path.parent)
|
||||
zipf.write(file_path, arcname)
|
||||
print(f" Added: {arcname}")
|
||||
|
||||
print(f"\n[OK] Successfully packaged skill to: {skill_filename}")
|
||||
return skill_filename
|
||||
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Error creating .skill file: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python utils/package_skill.py <path/to/skill-folder> [output-directory]")
|
||||
print("\nExample:")
|
||||
print(" python utils/package_skill.py skills/public/my-skill")
|
||||
print(" python utils/package_skill.py skills/public/my-skill ./dist")
|
||||
sys.exit(1)
|
||||
|
||||
skill_path = sys.argv[1]
|
||||
output_dir = sys.argv[2] if len(sys.argv) > 2 else None
|
||||
|
||||
print(f"Packaging skill: {skill_path}")
|
||||
if output_dir:
|
||||
print(f" Output directory: {output_dir}")
|
||||
print()
|
||||
|
||||
result = package_skill(skill_path, output_dir)
|
||||
|
||||
if result:
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -7,10 +7,10 @@ metadata:
|
||||
|
||||
# Skill Installer
|
||||
|
||||
Helps install skills. By default these are from https://github.com/openai/skills/tree/main/skills/.curated, but users can also provide other locations.
|
||||
Helps install skills. By default these are from https://github.com/openai/skills/tree/main/skills/.curated, but users can also provide other locations. Experimental skills live in https://github.com/openai/skills/tree/main/skills/.experimental and can be installed the same way.
|
||||
|
||||
Use the helper scripts based on the task:
|
||||
- List curated skills when the user asks what is available, or if the user uses this skill without specifying what to do.
|
||||
- List skills when the user asks what is available, or if the user uses this skill without specifying what to do. Default listing is `.curated`, but you can pass `--path skills/.experimental` when they ask about experimental skills.
|
||||
- Install from the curated list when the user provides a skill name.
|
||||
- Install from another repo when the user provides a GitHub repo/path (including private repos).
|
||||
|
||||
@@ -18,7 +18,7 @@ Install skills with the helper scripts.
|
||||
|
||||
## Communication
|
||||
|
||||
When listing curated skills, output approximately as follows, depending on the context of the user's request:
|
||||
When listing skills, output approximately as follows, depending on the context of the user's request. If they ask about experimental skills, list from `.experimental` instead of `.curated` and label the source accordingly:
|
||||
"""
|
||||
Skills from {repo}:
|
||||
1. skill-1
|
||||
@@ -33,10 +33,12 @@ After installing a skill, tell the user: "Restart Codex to pick up new skills."
|
||||
|
||||
All of these scripts use network, so when running in the sandbox, request escalation when running them.
|
||||
|
||||
- `scripts/list-curated-skills.py` (prints curated list with installed annotations)
|
||||
- `scripts/list-curated-skills.py --format json`
|
||||
- `scripts/list-skills.py` (prints skills list with installed annotations)
|
||||
- `scripts/list-skills.py --format json`
|
||||
- Example (experimental list): `scripts/list-skills.py --path skills/.experimental`
|
||||
- `scripts/install-skill-from-github.py --repo <owner>/<repo> --path <path/to/skill> [<path/to/skill> ...]`
|
||||
- `scripts/install-skill-from-github.py --url https://github.com/<owner>/<repo>/tree/<ref>/<path>`
|
||||
- Example (experimental skill): `scripts/install-skill-from-github.py --repo openai/skills --path skills/.experimental/<skill-name>`
|
||||
|
||||
## Behavior and Options
|
||||
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
interface:
|
||||
display_name: "Skill Installer"
|
||||
short_description: "Install curated skills from openai/skills or other repos"
|
||||
icon_small: "./assets/skill-installer-small.svg"
|
||||
icon_large: "./assets/skill-installer.png"
|
||||
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" viewBox="0 0 16 16">
|
||||
<path fill="#0D0D0D" d="M2.145 3.959a2.033 2.033 0 0 1 2.022-1.824h5.966c.551 0 .997 0 1.357.029.367.03.692.093.993.246l.174.098c.397.243.72.593.932 1.01l.053.114c.116.269.168.557.194.878.03.36.03.805.03 1.357v4.3a2.365 2.365 0 0 1-2.366 2.365h-1.312a2.198 2.198 0 0 1-4.377 0H4.167A2.032 2.032 0 0 1 2.135 10.5V9.333l.004-.088A.865.865 0 0 1 3 8.468l.116-.006A1.135 1.135 0 0 0 3 6.199a.865.865 0 0 1-.865-.864V4.167l.01-.208Zm1.054 1.186a2.198 2.198 0 0 1 0 4.376v.98c0 .534.433.967.968.967H6l.089.004a.866.866 0 0 1 .776.861 1.135 1.135 0 0 0 2.27 0c0-.478.387-.865.865-.865h1.5c.719 0 1.301-.583 1.301-1.301v-4.3c0-.57 0-.964-.025-1.27a1.933 1.933 0 0 0-.09-.493L12.642 4a1.47 1.47 0 0 0-.541-.585l-.102-.056c-.126-.065-.295-.11-.596-.135a17.31 17.31 0 0 0-1.27-.025H4.167a.968.968 0 0 0-.968.968v.978Z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 923 B |
Binary file not shown.
|
After Width: | Height: | Size: 1.1 KiB |
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
"""List curated skills from a GitHub repo path."""
|
||||
"""List skills from a GitHub repo path."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -47,28 +47,32 @@ def _installed_skills() -> set[str]:
|
||||
return entries
|
||||
|
||||
|
||||
def _list_curated(repo: str, path: str, ref: str) -> list[str]:
|
||||
def _list_skills(repo: str, path: str, ref: str) -> list[str]:
|
||||
api_url = github_api_contents_url(repo, path, ref)
|
||||
try:
|
||||
payload = _request(api_url)
|
||||
except urllib.error.HTTPError as exc:
|
||||
if exc.code == 404:
|
||||
raise ListError(
|
||||
"Curated skills path not found: "
|
||||
"Skills path not found: "
|
||||
f"https://github.com/{repo}/tree/{ref}/{path}"
|
||||
) from exc
|
||||
raise ListError(f"Failed to fetch curated skills: HTTP {exc.code}") from exc
|
||||
raise ListError(f"Failed to fetch skills: HTTP {exc.code}") from exc
|
||||
data = json.loads(payload.decode("utf-8"))
|
||||
if not isinstance(data, list):
|
||||
raise ListError("Unexpected curated listing response.")
|
||||
raise ListError("Unexpected skills listing response.")
|
||||
skills = [item["name"] for item in data if item.get("type") == "dir"]
|
||||
return sorted(skills)
|
||||
|
||||
|
||||
def _parse_args(argv: list[str]) -> Args:
|
||||
parser = argparse.ArgumentParser(description="List curated skills.")
|
||||
parser = argparse.ArgumentParser(description="List skills.")
|
||||
parser.add_argument("--repo", default=DEFAULT_REPO)
|
||||
parser.add_argument("--path", default=DEFAULT_PATH)
|
||||
parser.add_argument(
|
||||
"--path",
|
||||
default=DEFAULT_PATH,
|
||||
help="Repo path to list (default: skills/.curated)",
|
||||
)
|
||||
parser.add_argument("--ref", default=DEFAULT_REF)
|
||||
parser.add_argument(
|
||||
"--format",
|
||||
@@ -82,7 +86,7 @@ def _parse_args(argv: list[str]) -> Args:
|
||||
def main(argv: list[str]) -> int:
|
||||
args = _parse_args(argv)
|
||||
try:
|
||||
skills = _list_curated(args.repo, args.path, args.ref)
|
||||
skills = _list_skills(args.repo, args.path, args.ref)
|
||||
installed = _installed_skills()
|
||||
if args.format == "json":
|
||||
payload = [
|
||||
@@ -2,6 +2,9 @@ use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::analytics_client::AnalyticsEventsClient;
|
||||
use crate::analytics_client::SkillInvocation;
|
||||
use crate::analytics_client::TrackEventsContext;
|
||||
use crate::instructions::SkillInstructions;
|
||||
use crate::skills::SkillMetadata;
|
||||
use codex_otel::OtelManager;
|
||||
@@ -18,6 +21,8 @@ pub(crate) struct SkillInjections {
|
||||
pub(crate) async fn build_skill_injections(
|
||||
mentioned_skills: &[SkillMetadata],
|
||||
otel: Option<&OtelManager>,
|
||||
analytics_client: &AnalyticsEventsClient,
|
||||
tracking: TrackEventsContext,
|
||||
) -> SkillInjections {
|
||||
if mentioned_skills.is_empty() {
|
||||
return SkillInjections::default();
|
||||
@@ -27,11 +32,17 @@ pub(crate) async fn build_skill_injections(
|
||||
items: Vec::with_capacity(mentioned_skills.len()),
|
||||
warnings: Vec::new(),
|
||||
};
|
||||
let mut invocations = Vec::new();
|
||||
|
||||
for skill in mentioned_skills {
|
||||
match fs::read_to_string(&skill.path).await {
|
||||
Ok(contents) => {
|
||||
emit_skill_injected_metric(otel, skill, "ok");
|
||||
invocations.push(SkillInvocation {
|
||||
skill_name: skill.name.clone(),
|
||||
skill_scope: skill.scope,
|
||||
skill_path: skill.path.clone(),
|
||||
});
|
||||
result.items.push(ResponseItem::from(SkillInstructions {
|
||||
name: skill.name.clone(),
|
||||
path: skill.path.to_string_lossy().into_owned(),
|
||||
@@ -50,6 +61,8 @@ pub(crate) async fn build_skill_injections(
|
||||
}
|
||||
}
|
||||
|
||||
analytics_client.track_skill_invocations(tracking, invocations);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use crate::config::Config;
|
||||
use crate::config_loader::ConfigLayerStack;
|
||||
use crate::config_loader::ConfigLayerStackOrdering;
|
||||
use crate::config_loader::default_project_root_markers;
|
||||
use crate::config_loader::merge_toml_values;
|
||||
use crate::config_loader::project_root_markers_from_config;
|
||||
use crate::skills::model::SkillDependencies;
|
||||
use crate::skills::model::SkillError;
|
||||
use crate::skills::model::SkillInterface;
|
||||
@@ -20,6 +23,7 @@ use std::fs;
|
||||
use std::path::Component;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use toml::Value as TomlValue;
|
||||
use tracing::error;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -72,6 +76,7 @@ struct DependencyTool {
|
||||
}
|
||||
|
||||
const SKILLS_FILENAME: &str = "SKILL.md";
|
||||
const AGENTS_DIR_NAME: &str = ".agents";
|
||||
const SKILLS_METADATA_DIR: &str = "agents";
|
||||
const SKILLS_METADATA_FILENAME: &str = "openai.yaml";
|
||||
const SKILLS_DIR_NAME: &str = "skills";
|
||||
@@ -209,15 +214,104 @@ fn skill_roots_from_layer_stack_inner(config_layer_stack: &ConfigLayerStack) ->
|
||||
}
|
||||
|
||||
fn skill_roots(config: &Config) -> Vec<SkillRoot> {
|
||||
skill_roots_from_layer_stack_inner(&config.config_layer_stack)
|
||||
skill_roots_from_layer_stack_with_agents(&config.config_layer_stack, &config.cwd)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn skill_roots_from_layer_stack(
|
||||
config_layer_stack: &ConfigLayerStack,
|
||||
) -> Vec<SkillRoot> {
|
||||
skill_roots_from_layer_stack_inner(config_layer_stack)
|
||||
}
|
||||
|
||||
pub(crate) fn skill_roots_from_layer_stack_with_agents(
|
||||
config_layer_stack: &ConfigLayerStack,
|
||||
cwd: &Path,
|
||||
) -> Vec<SkillRoot> {
|
||||
let mut roots = skill_roots_from_layer_stack_inner(config_layer_stack);
|
||||
roots.extend(repo_agents_skill_roots(config_layer_stack, cwd));
|
||||
dedupe_skill_roots_by_path(&mut roots);
|
||||
roots
|
||||
}
|
||||
|
||||
fn dedupe_skill_roots_by_path(roots: &mut Vec<SkillRoot>) {
|
||||
let mut seen: HashSet<PathBuf> = HashSet::new();
|
||||
roots.retain(|root| seen.insert(root.path.clone()));
|
||||
}
|
||||
|
||||
fn repo_agents_skill_roots(config_layer_stack: &ConfigLayerStack, cwd: &Path) -> Vec<SkillRoot> {
|
||||
let project_root_markers = project_root_markers_from_stack(config_layer_stack);
|
||||
let project_root = find_project_root(cwd, &project_root_markers);
|
||||
let dirs = dirs_between_project_root_and_cwd(cwd, &project_root);
|
||||
let mut roots = Vec::new();
|
||||
for dir in dirs {
|
||||
let agents_skills = dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME);
|
||||
if agents_skills.is_dir() {
|
||||
roots.push(SkillRoot {
|
||||
path: agents_skills,
|
||||
scope: SkillScope::Repo,
|
||||
});
|
||||
}
|
||||
}
|
||||
roots
|
||||
}
|
||||
|
||||
fn project_root_markers_from_stack(config_layer_stack: &ConfigLayerStack) -> Vec<String> {
|
||||
let mut merged = TomlValue::Table(toml::map::Map::new());
|
||||
for layer in
|
||||
config_layer_stack.get_layers(ConfigLayerStackOrdering::LowestPrecedenceFirst, false)
|
||||
{
|
||||
if matches!(layer.name, ConfigLayerSource::Project { .. }) {
|
||||
continue;
|
||||
}
|
||||
merge_toml_values(&mut merged, &layer.config);
|
||||
}
|
||||
|
||||
match project_root_markers_from_config(&merged) {
|
||||
Ok(Some(markers)) => markers,
|
||||
Ok(None) => default_project_root_markers(),
|
||||
Err(err) => {
|
||||
tracing::warn!("invalid project_root_markers: {err}");
|
||||
default_project_root_markers()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn find_project_root(cwd: &Path, project_root_markers: &[String]) -> PathBuf {
|
||||
if project_root_markers.is_empty() {
|
||||
return cwd.to_path_buf();
|
||||
}
|
||||
|
||||
for ancestor in cwd.ancestors() {
|
||||
for marker in project_root_markers {
|
||||
let marker_path = ancestor.join(marker);
|
||||
if marker_path.exists() {
|
||||
return ancestor.to_path_buf();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cwd.to_path_buf()
|
||||
}
|
||||
|
||||
fn dirs_between_project_root_and_cwd(cwd: &Path, project_root: &Path) -> Vec<PathBuf> {
|
||||
let mut dirs = cwd
|
||||
.ancestors()
|
||||
.scan(false, |done, a| {
|
||||
if *done {
|
||||
None
|
||||
} else {
|
||||
if a == project_root {
|
||||
*done = true;
|
||||
}
|
||||
Some(a.to_path_buf())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
dirs.reverse();
|
||||
dirs
|
||||
}
|
||||
|
||||
fn discover_skills_under_root(root: &Path, scope: SkillScope, outcome: &mut SkillLoadOutcome) {
|
||||
let Ok(root) = canonicalize_path(root) else {
|
||||
return;
|
||||
@@ -1615,6 +1709,40 @@ interface:
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skills_from_agents_dir_without_codex_dir() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
let repo_dir = tempfile::tempdir().expect("tempdir");
|
||||
mark_as_git_repo(repo_dir.path());
|
||||
|
||||
let skill_path = write_skill_at(
|
||||
&repo_dir.path().join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME),
|
||||
"agents",
|
||||
"agents-skill",
|
||||
"from agents",
|
||||
);
|
||||
let cfg = make_config_for_cwd(&codex_home, repo_dir.path().to_path_buf()).await;
|
||||
|
||||
let outcome = load_skills(&cfg);
|
||||
assert!(
|
||||
outcome.errors.is_empty(),
|
||||
"unexpected errors: {:?}",
|
||||
outcome.errors
|
||||
);
|
||||
assert_eq!(
|
||||
outcome.skills,
|
||||
vec![SkillMetadata {
|
||||
name: "agents-skill".to_string(),
|
||||
description: "from agents".to_string(),
|
||||
short_description: None,
|
||||
interface: None,
|
||||
dependencies: None,
|
||||
path: normalized(&skill_path),
|
||||
scope: SkillScope::Repo,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn loads_skills_from_all_codex_dirs_under_project_root() {
|
||||
let codex_home = tempfile::tempdir().expect("tempdir");
|
||||
|
||||
@@ -10,11 +10,12 @@ use tracing::warn;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::config::types::SkillsConfig;
|
||||
use crate::config_loader::CloudRequirementsLoader;
|
||||
use crate::config_loader::LoaderOverrides;
|
||||
use crate::config_loader::load_config_layers_state;
|
||||
use crate::skills::SkillLoadOutcome;
|
||||
use crate::skills::loader::load_skills_from_roots;
|
||||
use crate::skills::loader::skill_roots_from_layer_stack;
|
||||
use crate::skills::loader::skill_roots_from_layer_stack_with_agents;
|
||||
use crate::skills::system::install_system_skills;
|
||||
|
||||
pub struct SkillsManager {
|
||||
@@ -46,7 +47,8 @@ impl SkillsManager {
|
||||
return outcome;
|
||||
}
|
||||
|
||||
let roots = skill_roots_from_layer_stack(&config.config_layer_stack);
|
||||
let roots =
|
||||
skill_roots_from_layer_stack_with_agents(&config.config_layer_stack, &config.cwd);
|
||||
let mut outcome = load_skills_from_roots(roots);
|
||||
outcome.disabled_paths = disabled_paths_from_stack(&config.config_layer_stack);
|
||||
match self.cache_by_cwd.write() {
|
||||
@@ -88,7 +90,7 @@ impl SkillsManager {
|
||||
Some(cwd_abs),
|
||||
&cli_overrides,
|
||||
LoaderOverrides::default(),
|
||||
None,
|
||||
CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -104,7 +106,7 @@ impl SkillsManager {
|
||||
}
|
||||
};
|
||||
|
||||
let roots = skill_roots_from_layer_stack(&config_layer_stack);
|
||||
let roots = skill_roots_from_layer_stack_with_agents(&config_layer_stack, cwd);
|
||||
let mut outcome = load_skills_from_roots(roots);
|
||||
outcome.disabled_paths = disabled_paths_from_stack(&config_layer_stack);
|
||||
match self.cache_by_cwd.write() {
|
||||
|
||||
@@ -24,9 +24,10 @@ pub fn render_skills_section(skills: &[SkillMetadata]) -> Option<String> {
|
||||
- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.
|
||||
- How to use a skill (progressive disclosure):
|
||||
1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.
|
||||
2) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.
|
||||
3) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.
|
||||
4) If `assets/` or templates exist, reuse them instead of recreating from scratch.
|
||||
2) When `SKILL.md` references relative paths (e.g., `scripts/foo.py`), resolve them relative to the skill directory listed above first, and only consider other paths if needed.
|
||||
3) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.
|
||||
4) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.
|
||||
5) If `assets/` or templates exist, reuse them instead of recreating from scratch.
|
||||
- Coordination and sequencing:
|
||||
- If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.
|
||||
- Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.
|
||||
|
||||
@@ -86,21 +86,8 @@ fn read_marker(path: &AbsolutePathBuf) -> Result<String, SystemSkillsError> {
|
||||
}
|
||||
|
||||
fn embedded_system_skills_fingerprint() -> String {
|
||||
let mut items: Vec<(String, Option<u64>)> = SYSTEM_SKILLS_DIR
|
||||
.entries()
|
||||
.iter()
|
||||
.map(|entry| match entry {
|
||||
include_dir::DirEntry::Dir(dir) => (dir.path().to_string_lossy().to_string(), None),
|
||||
include_dir::DirEntry::File(file) => {
|
||||
let mut file_hasher = DefaultHasher::new();
|
||||
file.contents().hash(&mut file_hasher);
|
||||
(
|
||||
file.path().to_string_lossy().to_string(),
|
||||
Some(file_hasher.finish()),
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let mut items = Vec::new();
|
||||
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
|
||||
items.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
@@ -112,6 +99,25 @@ fn embedded_system_skills_fingerprint() -> String {
|
||||
format!("{:x}", hasher.finish())
|
||||
}
|
||||
|
||||
fn collect_fingerprint_items(dir: &Dir<'_>, items: &mut Vec<(String, Option<u64>)>) {
|
||||
for entry in dir.entries() {
|
||||
match entry {
|
||||
include_dir::DirEntry::Dir(subdir) => {
|
||||
items.push((subdir.path().to_string_lossy().to_string(), None));
|
||||
collect_fingerprint_items(subdir, items);
|
||||
}
|
||||
include_dir::DirEntry::File(file) => {
|
||||
let mut file_hasher = DefaultHasher::new();
|
||||
file.contents().hash(&mut file_hasher);
|
||||
items.push((
|
||||
file.path().to_string_lossy().to_string(),
|
||||
Some(file_hasher.finish()),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the embedded `include_dir::Dir` to disk under `dest`.
|
||||
///
|
||||
/// Preserves the embedded directory structure.
|
||||
@@ -163,3 +169,28 @@ impl SystemSkillsError {
|
||||
Self::Io { action, source }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::SYSTEM_SKILLS_DIR;
|
||||
use super::collect_fingerprint_items;
|
||||
|
||||
#[test]
|
||||
fn fingerprint_traverses_nested_entries() {
|
||||
let mut items = Vec::new();
|
||||
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
|
||||
let mut paths: Vec<String> = items.into_iter().map(|(path, _)| path).collect();
|
||||
paths.sort_unstable();
|
||||
|
||||
assert!(
|
||||
paths
|
||||
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/SKILL.md"))
|
||||
.is_ok()
|
||||
);
|
||||
assert!(
|
||||
paths
|
||||
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/scripts/init_skill.py"))
|
||||
.is_ok()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::sync::Arc;
|
||||
use crate::AuthManager;
|
||||
use crate::RolloutRecorder;
|
||||
use crate::agent::AgentControl;
|
||||
use crate::analytics_client::AnalyticsEventsClient;
|
||||
use crate::exec_policy::ExecPolicyManager;
|
||||
use crate::mcp_connection_manager::McpConnectionManager;
|
||||
use crate::models_manager::manager::ModelsManager;
|
||||
@@ -21,6 +22,7 @@ pub(crate) struct SessionServices {
|
||||
pub(crate) mcp_connection_manager: Arc<RwLock<McpConnectionManager>>,
|
||||
pub(crate) mcp_startup_cancellation_token: Mutex<CancellationToken>,
|
||||
pub(crate) unified_exec_manager: UnifiedExecProcessManager,
|
||||
pub(crate) analytics_events_client: AnalyticsEventsClient,
|
||||
pub(crate) notifier: UserNotifier,
|
||||
pub(crate) rollout: Mutex<Option<RolloutRecorder>>,
|
||||
pub(crate) user_shell: Arc<crate::shell::Shell>,
|
||||
|
||||
@@ -48,7 +48,7 @@ pub(crate) async fn handle_output_item_done(
|
||||
previously_active_item: Option<TurnItem>,
|
||||
) -> Result<OutputItemResult> {
|
||||
let mut output = OutputItemResult::default();
|
||||
let plan_mode = ctx.turn_context.collaboration_mode_kind == ModeKind::Plan;
|
||||
let plan_mode = ctx.turn_context.collaboration_mode.mode == ModeKind::Plan;
|
||||
|
||||
match ToolRouter::build_tool_call(ctx.sess.as_ref(), item.clone()).await {
|
||||
// The model emitted a tool call; log it, persist the item immediately, and queue the tool execution.
|
||||
|
||||
@@ -67,7 +67,7 @@ impl SessionTask for UserShellCommandTask {
|
||||
|
||||
let event = EventMsg::TurnStarted(TurnStartedEvent {
|
||||
model_context_window: turn_context.client.get_model_context_window(),
|
||||
collaboration_mode_kind: turn_context.collaboration_mode_kind,
|
||||
collaboration_mode_kind: turn_context.collaboration_mode.mode,
|
||||
});
|
||||
let session = session.clone_session();
|
||||
session.send_event(turn_context.as_ref(), event).await;
|
||||
|
||||
@@ -104,7 +104,7 @@ pub(crate) async fn handle_update_plan(
|
||||
arguments: String,
|
||||
_call_id: String,
|
||||
) -> Result<String, FunctionCallError> {
|
||||
if turn_context.collaboration_mode_kind == ModeKind::Plan {
|
||||
if turn_context.collaboration_mode.mode == ModeKind::Plan {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"update_plan is a TODO/checklist tool and is not allowed in Plan mode".to_string(),
|
||||
));
|
||||
|
||||
@@ -1576,7 +1576,7 @@ mod tests {
|
||||
// Build expected from the same helpers used by the builder.
|
||||
let mut expected: BTreeMap<String, ToolSpec> = BTreeMap::from([]);
|
||||
for spec in [
|
||||
create_exec_command_tool(false),
|
||||
create_exec_command_tool(true),
|
||||
create_write_stdin_tool(),
|
||||
create_list_mcp_resources_tool(),
|
||||
create_list_mcp_resource_templates_tool(),
|
||||
@@ -2410,7 +2410,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_shell_tool() {
|
||||
let tool = super::create_shell_tool(false);
|
||||
let tool = super::create_shell_tool(true);
|
||||
let ToolSpec::Function(ResponsesApiTool {
|
||||
description, name, ..
|
||||
}) = &tool
|
||||
@@ -2440,7 +2440,7 @@ Examples of valid command strings:
|
||||
|
||||
#[test]
|
||||
fn test_shell_command_tool() {
|
||||
let tool = super::create_shell_command_tool(false);
|
||||
let tool = super::create_shell_command_tool(true);
|
||||
let ToolSpec::Function(ResponsesApiTool {
|
||||
description, name, ..
|
||||
}) = &tool
|
||||
|
||||
@@ -34,18 +34,6 @@ impl From<TruncationPolicyConfig> for TruncationPolicy {
|
||||
}
|
||||
|
||||
impl TruncationPolicy {
|
||||
/// Scale the underlying budget by `multiplier`, rounding up to avoid under-budgeting.
|
||||
pub fn mul(self, multiplier: f64) -> Self {
|
||||
match self {
|
||||
TruncationPolicy::Bytes(bytes) => {
|
||||
TruncationPolicy::Bytes((bytes as f64 * multiplier).ceil() as usize)
|
||||
}
|
||||
TruncationPolicy::Tokens(tokens) => {
|
||||
TruncationPolicy::Tokens((tokens as f64 * multiplier).ceil() as usize)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a token budget derived from this policy.
|
||||
///
|
||||
/// - For `Tokens`, this is the explicit token limit.
|
||||
@@ -73,6 +61,21 @@ impl TruncationPolicy {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Mul<f64> for TruncationPolicy {
|
||||
type Output = Self;
|
||||
|
||||
fn mul(self, multiplier: f64) -> Self::Output {
|
||||
match self {
|
||||
TruncationPolicy::Bytes(bytes) => {
|
||||
TruncationPolicy::Bytes((bytes as f64 * multiplier).ceil() as usize)
|
||||
}
|
||||
TruncationPolicy::Tokens(tokens) => {
|
||||
TruncationPolicy::Tokens((tokens as f64 * multiplier).ceil() as usize)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn formatted_truncate_text(content: &str, policy: TruncationPolicy) -> String {
|
||||
if content.len() <= policy.byte_budget() {
|
||||
return content.to_string();
|
||||
|
||||
47
codex-rs/core/src/turn_metadata.rs
Normal file
47
codex-rs/core/src/turn_metadata.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
|
||||
use serde::Serialize;
|
||||
use tokio::time::Duration as TokioDuration;
|
||||
|
||||
use crate::git_info::get_git_remote_urls_assume_git_repo_with_timeout;
|
||||
use crate::git_info::get_git_repo_root;
|
||||
use crate::git_info::get_head_commit_hash_with_timeout;
|
||||
|
||||
const TURN_METADATA_TIMEOUT: TokioDuration = TokioDuration::from_millis(150);
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TurnMetadataWorkspace {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
associated_remote_urls: Option<BTreeMap<String, String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
latest_git_commit_hash: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TurnMetadata {
|
||||
workspaces: BTreeMap<String, TurnMetadataWorkspace>,
|
||||
}
|
||||
|
||||
/// this happens at request send time only
|
||||
pub(crate) async fn build_turn_metadata_header(cwd: &Path) -> Option<String> {
|
||||
let repo_root = get_git_repo_root(cwd)?;
|
||||
|
||||
let (latest_git_commit_hash, associated_remote_urls) = tokio::join!(
|
||||
get_head_commit_hash_with_timeout(cwd, TURN_METADATA_TIMEOUT),
|
||||
get_git_remote_urls_assume_git_repo_with_timeout(cwd, TURN_METADATA_TIMEOUT)
|
||||
);
|
||||
if latest_git_commit_hash.is_none() && associated_remote_urls.is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut workspaces = BTreeMap::new();
|
||||
workspaces.insert(
|
||||
repo_root.to_string_lossy().into_owned(),
|
||||
TurnMetadataWorkspace {
|
||||
associated_remote_urls,
|
||||
latest_git_commit_hash,
|
||||
},
|
||||
);
|
||||
serde_json::to_string(&TurnMetadata { workspaces }).ok()
|
||||
}
|
||||
@@ -1,8 +1,23 @@
|
||||
use codex_protocol::models::WebSearchAction;
|
||||
|
||||
fn search_action_detail(query: &Option<String>, queries: &Option<Vec<String>>) -> String {
|
||||
query.clone().filter(|q| !q.is_empty()).unwrap_or_else(|| {
|
||||
let items = queries.as_ref();
|
||||
let first = items
|
||||
.and_then(|queries| queries.first())
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
if items.is_some_and(|queries| queries.len() > 1) && !first.is_empty() {
|
||||
format!("{first} ...")
|
||||
} else {
|
||||
first
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn web_search_action_detail(action: &WebSearchAction) -> String {
|
||||
match action {
|
||||
WebSearchAction::Search { query } => query.clone().unwrap_or_default(),
|
||||
WebSearchAction::Search { query, queries } => search_action_detail(query, queries),
|
||||
WebSearchAction::OpenPage { url } => url.clone().unwrap_or_default(),
|
||||
WebSearchAction::FindInPage { url, pattern } => match (pattern, url) {
|
||||
(Some(pattern), Some(url)) => format!("'{pattern}' in {url}"),
|
||||
|
||||
@@ -32,7 +32,6 @@ Actions that gather truth, reduce ambiguity, or validate feasibility without cha
|
||||
Actions that implement the plan or change repo-tracked state. Examples:
|
||||
|
||||
* Editing or writing files
|
||||
* Generating, updating, or accepting snapshots
|
||||
* Running formatters or linters that rewrite files
|
||||
* Applying patches, migrations, or codegen that updates repo-tracked files
|
||||
* Side-effectful commands whose purpose is to carry out the plan rather than refine it
|
||||
@@ -45,6 +44,8 @@ Begin by grounding yourself in the actual environment. Eliminate unknowns in the
|
||||
|
||||
Before asking the user any question, perform at least one targeted non-mutating exploration pass (for example: search relevant files, inspect likely entrypoints/configs, confirm current implementation shape), unless no local environment/repo is available.
|
||||
|
||||
Exception: you may ask clarifying questions about the user's prompt before exploring, ONLY if there are obvious ambiguities or contradictions in the prompt itself. However, if ambiguity might be resolved by exploring, always prefer exploring first.
|
||||
|
||||
Do not ask questions that can be answered from the repo or system (for example, "where is this struct?" or "which UI component should we use?" when exploration can make it clear). Only ask once you have exhausted reasonable non-mutating exploration.
|
||||
|
||||
## PHASE 2 — Intent chat (what they actually want)
|
||||
@@ -56,23 +57,13 @@ Do not ask questions that can be answered from the repo or system (for example,
|
||||
|
||||
* Once intent is stable, keep asking until the spec is decision complete: approach, interfaces (APIs/schemas/I/O), data flow, edge cases/failure modes, testing + acceptance criteria, rollout/monitoring, and any migrations/compat constraints.
|
||||
|
||||
## Hard interaction rule (critical)
|
||||
## Asking questions
|
||||
|
||||
Every assistant turn MUST be exactly one of:
|
||||
A) a `request_user_input` tool call (questions/options only), OR
|
||||
B) a non-final status update with no questions and no plan content, OR
|
||||
C) the final output: a titled, plan-only document.
|
||||
Critical rules:
|
||||
|
||||
Rules:
|
||||
|
||||
* No questions in free text (only via `request_user_input`).
|
||||
* Never mix a `request_user_input` call with plan content.
|
||||
* Status updates must not include questions or plan content.
|
||||
* Internal tool/repo exploration is allowed privately before A, B, or C.
|
||||
|
||||
Status updates should be frequent during exploration. Provide 1-2 sentence updates that summarize discoveries, assumption changes, or why you are changing direction. Use Parallel tools for exploration.
|
||||
|
||||
## Ask a lot, but never ask trivia
|
||||
* Strongly prefer using the `request_user_input` tool to ask any questions.
|
||||
* Offer only meaningful multiple‑choice options; don’t include filler choices that are obviously wrong or irrelevant.
|
||||
* In rare cases where an unavoidable, important question can’t be expressed with reasonable multiple‑choice options (due to extreme ambiguity), you may ask it directly without the tool.
|
||||
|
||||
You SHOULD ask many questions, but each question must:
|
||||
|
||||
@@ -113,19 +104,15 @@ When you present the official plan, wrap it in a `<proposed_plan>` block so the
|
||||
Example:
|
||||
|
||||
<proposed_plan>
|
||||
# Plan title
|
||||
- Step 1
|
||||
- Step 2
|
||||
plan content
|
||||
</proposed_plan>
|
||||
|
||||
The final plan must be plan-only and include:
|
||||
plan content should be human and agent digestible. The final plan must be plan-only and include:
|
||||
|
||||
* A clear title
|
||||
* A TL;DR section (3–5 bullets)
|
||||
* Exact file paths to change
|
||||
* Exact structures or shapes to introduce or modify
|
||||
* Exact function, method, type, and variable names and signatures
|
||||
* Test cases
|
||||
* A brief summary section
|
||||
* Important changes or additions to public APIs/interfaces/types
|
||||
* Test cases and scenarios
|
||||
* Explicit assumptions and defaults chosen where needed
|
||||
|
||||
Do not ask "should I proceed?" in the final output. The user can easily switch out of Plan mode and request implementation if you have included a `<proposed_plan>` block in your response. Alternatively, they can decide to stay in Plan mode and continue refining the plan.
|
||||
|
||||
@@ -102,7 +102,7 @@ async fn run_request(input: Vec<ResponseItem>) -> Value {
|
||||
SessionSource::Exec,
|
||||
TransportManager::new(),
|
||||
)
|
||||
.new_session();
|
||||
.new_session(None, None);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input = input;
|
||||
|
||||
@@ -103,7 +103,7 @@ async fn run_stream_with_bytes(sse_body: &[u8]) -> Vec<ResponseEvent> {
|
||||
SessionSource::Exec,
|
||||
TransportManager::new(),
|
||||
)
|
||||
.new_session();
|
||||
.new_session(None, None);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input = vec![ResponseItem::Message {
|
||||
|
||||
@@ -12,6 +12,7 @@ use codex_core::ResponseItem;
|
||||
use codex_core::TransportManager;
|
||||
use codex_core::WEB_SEARCH_ELIGIBLE_HEADER;
|
||||
use codex_core::WireApi;
|
||||
use codex_core::X_CODEX_TURN_METADATA_HEADER;
|
||||
use codex_core::models_manager::manager::ModelsManager;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_protocol::ThreadId;
|
||||
@@ -23,6 +24,7 @@ use core_test_support::load_default_config_for_test;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use futures::StreamExt;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use wiremock::matchers::header;
|
||||
|
||||
@@ -98,7 +100,7 @@ async fn responses_stream_includes_subagent_header_on_review() {
|
||||
session_source,
|
||||
TransportManager::new(),
|
||||
)
|
||||
.new_session();
|
||||
.new_session(None, None);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input = vec![ResponseItem::Message {
|
||||
@@ -124,6 +126,101 @@ async fn responses_stream_includes_subagent_header_on_review() {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn responses_stream_includes_turn_metadata_header_when_set() {
|
||||
core_test_support::skip_if_no_network!();
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let response_body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
|
||||
let request_recorder = responses::mount_sse_once(&server, response_body).await;
|
||||
|
||||
let provider = ModelProviderInfo {
|
||||
name: "mock".into(),
|
||||
base_url: Some(format!("{}/v1", server.uri())),
|
||||
env_key: None,
|
||||
env_key_instructions: None,
|
||||
experimental_bearer_token: None,
|
||||
wire_api: WireApi::Responses,
|
||||
query_params: None,
|
||||
http_headers: None,
|
||||
env_http_headers: None,
|
||||
request_max_retries: Some(0),
|
||||
stream_max_retries: Some(0),
|
||||
stream_idle_timeout_ms: Some(5_000),
|
||||
requires_openai_auth: false,
|
||||
supports_websockets: false,
|
||||
};
|
||||
|
||||
let codex_home = TempDir::new().expect("failed to create TempDir");
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.model_provider_id = provider.name.clone();
|
||||
config.model_provider = provider.clone();
|
||||
let effort = config.model_reasoning_effort;
|
||||
let summary = config.model_reasoning_summary;
|
||||
let model = ModelsManager::get_model_offline(config.model.as_deref());
|
||||
config.model = Some(model.clone());
|
||||
let config = Arc::new(config);
|
||||
|
||||
let conversation_id = ThreadId::new();
|
||||
let auth_mode = AuthMode::Chatgpt;
|
||||
let session_source = SessionSource::SubAgent(SubAgentSource::Review);
|
||||
let model_info = ModelsManager::construct_model_info_offline(model.as_str(), &config);
|
||||
let otel_manager = OtelManager::new(
|
||||
conversation_id,
|
||||
model.as_str(),
|
||||
model_info.slug.as_str(),
|
||||
None,
|
||||
Some("test@test.com".to_string()),
|
||||
Some(auth_mode),
|
||||
false,
|
||||
"test".to_string(),
|
||||
session_source.clone(),
|
||||
);
|
||||
|
||||
let turn_metadata =
|
||||
r#"{"workspaces":{"/repo":{"latest_git_commit_hash":"abc123"}}}"#.to_string();
|
||||
let mut client_session = ModelClient::new(
|
||||
Arc::clone(&config),
|
||||
None,
|
||||
model_info,
|
||||
otel_manager,
|
||||
provider,
|
||||
effort,
|
||||
summary,
|
||||
conversation_id,
|
||||
session_source,
|
||||
TransportManager::new(),
|
||||
)
|
||||
.new_session_with_turn_metadata(Some(turn_metadata.clone()));
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input = vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".into(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "hello".into(),
|
||||
}],
|
||||
end_turn: None,
|
||||
}];
|
||||
|
||||
let mut stream = client_session.stream(&prompt).await.expect("stream failed");
|
||||
while let Some(event) = stream.next().await {
|
||||
if matches!(event, Ok(ResponseEvent::Completed { .. })) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let request = request_recorder.single_request();
|
||||
assert_eq!(
|
||||
request.header(X_CODEX_TURN_METADATA_HEADER).as_deref(),
|
||||
Some(turn_metadata.as_str())
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn responses_stream_includes_subagent_header_on_other() {
|
||||
core_test_support::skip_if_no_network!();
|
||||
@@ -197,7 +294,7 @@ async fn responses_stream_includes_subagent_header_on_other() {
|
||||
session_source,
|
||||
TransportManager::new(),
|
||||
)
|
||||
.new_session();
|
||||
.new_session(None, None);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input = vec![ResponseItem::Message {
|
||||
@@ -354,7 +451,7 @@ async fn responses_respects_model_info_overrides_from_config() {
|
||||
session_source,
|
||||
TransportManager::new(),
|
||||
)
|
||||
.new_session();
|
||||
.new_session(None, None);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input = vec![ResponseItem::Message {
|
||||
|
||||
@@ -320,7 +320,7 @@ async fn resume_includes_initial_messages_and_sends_prior_items() {
|
||||
.expect("prior assistant message");
|
||||
let pos_permissions = messages
|
||||
.iter()
|
||||
.position(|(role, text)| role == "developer" && text.contains("`approval_policy`"))
|
||||
.position(|(role, text)| role == "developer" && text.contains("<permissions instructions>"))
|
||||
.expect("permissions message");
|
||||
let pos_user_instructions = messages
|
||||
.iter()
|
||||
@@ -1190,7 +1190,7 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
SessionSource::Exec,
|
||||
TransportManager::new(),
|
||||
)
|
||||
.new_session();
|
||||
.new_session(None, None);
|
||||
|
||||
let mut prompt = Prompt::default();
|
||||
prompt.input.push(ResponseItem::Reasoning {
|
||||
@@ -1216,6 +1216,7 @@ async fn azure_responses_request_includes_store_and_reasoning_ids() {
|
||||
status: Some("completed".into()),
|
||||
action: Some(WebSearchAction::Search {
|
||||
query: Some("weather".into()),
|
||||
queries: None,
|
||||
}),
|
||||
});
|
||||
prompt.input.push(ResponseItem::FunctionCall {
|
||||
|
||||
@@ -14,6 +14,8 @@ use codex_core::features::Feature;
|
||||
use codex_core::models_manager::manager::ModelsManager;
|
||||
use codex_core::protocol::SessionSource;
|
||||
use codex_otel::OtelManager;
|
||||
use codex_otel::metrics::MetricsClient;
|
||||
use codex_otel::metrics::MetricsConfig;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::ReasoningSummary;
|
||||
use core_test_support::load_default_config_for_test;
|
||||
@@ -25,15 +27,19 @@ use core_test_support::responses::start_websocket_server;
|
||||
use core_test_support::responses::start_websocket_server_with_headers;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use futures::StreamExt;
|
||||
use opentelemetry_sdk::metrics::InMemoryMetricExporter;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
use tracing_test::traced_test;
|
||||
|
||||
const MODEL: &str = "gpt-5.2-codex";
|
||||
|
||||
struct WebsocketTestHarness {
|
||||
_codex_home: TempDir,
|
||||
client: ModelClient,
|
||||
otel_manager: OtelManager,
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
@@ -47,7 +53,7 @@ async fn responses_websocket_streams_request() {
|
||||
.await;
|
||||
|
||||
let harness = websocket_harness(&server).await;
|
||||
let mut session = harness.client.new_session();
|
||||
let mut session = harness.client.new_session(None, None);
|
||||
let prompt = prompt_with_input(vec![message_item("hello")]);
|
||||
|
||||
stream_until_complete(&mut session, &prompt).await;
|
||||
@@ -64,6 +70,38 @@ async fn responses_websocket_streams_request() {
|
||||
server.shutdown().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
#[traced_test]
|
||||
async fn responses_websocket_emits_websocket_telemetry_events() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_websocket_server(vec![vec![vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_completed("resp-1"),
|
||||
]]])
|
||||
.await;
|
||||
|
||||
let harness = websocket_harness(&server).await;
|
||||
harness.otel_manager.reset_runtime_metrics();
|
||||
let mut session = harness.client.new_session(None, None);
|
||||
let prompt = prompt_with_input(vec![message_item("hello")]);
|
||||
|
||||
stream_until_complete(&mut session, &prompt).await;
|
||||
|
||||
tokio::time::sleep(Duration::from_millis(10)).await;
|
||||
|
||||
let summary = harness
|
||||
.otel_manager
|
||||
.runtime_metrics_summary()
|
||||
.expect("runtime metrics summary");
|
||||
assert_eq!(summary.api_calls.count, 0);
|
||||
assert_eq!(summary.streaming_events.count, 0);
|
||||
assert_eq!(summary.websocket_calls.count, 1);
|
||||
assert_eq!(summary.websocket_events.count, 2);
|
||||
|
||||
server.shutdown().await;
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn responses_websocket_emits_reasoning_included_event() {
|
||||
skip_if_no_network!();
|
||||
@@ -75,7 +113,7 @@ async fn responses_websocket_emits_reasoning_included_event() {
|
||||
.await;
|
||||
|
||||
let harness = websocket_harness(&server).await;
|
||||
let mut session = harness.client.new_session();
|
||||
let mut session = harness.client.new_session(None, None);
|
||||
let prompt = prompt_with_input(vec![message_item("hello")]);
|
||||
|
||||
let mut stream = session
|
||||
@@ -109,7 +147,7 @@ async fn responses_websocket_appends_on_prefix() {
|
||||
.await;
|
||||
|
||||
let harness = websocket_harness(&server).await;
|
||||
let mut session = harness.client.new_session();
|
||||
let mut session = harness.client.new_session(None, None);
|
||||
let prompt_one = prompt_with_input(vec![message_item("hello")]);
|
||||
let prompt_two = prompt_with_input(vec![message_item("hello"), message_item("second")]);
|
||||
|
||||
@@ -145,7 +183,7 @@ async fn responses_websocket_creates_on_non_prefix() {
|
||||
.await;
|
||||
|
||||
let harness = websocket_harness(&server).await;
|
||||
let mut session = harness.client.new_session();
|
||||
let mut session = harness.client.new_session(None, None);
|
||||
let prompt_one = prompt_with_input(vec![message_item("hello")]);
|
||||
let prompt_two = prompt_with_input(vec![message_item("different")]);
|
||||
|
||||
@@ -211,6 +249,12 @@ async fn websocket_harness(server: &WebSocketTestServer) -> WebsocketTestHarness
|
||||
let model_info = ModelsManager::construct_model_info_offline(MODEL, &config);
|
||||
let conversation_id = ThreadId::new();
|
||||
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("Test API Key"));
|
||||
let exporter = InMemoryMetricExporter::default();
|
||||
let metrics = MetricsClient::new(
|
||||
MetricsConfig::in_memory("test", "codex-core", env!("CARGO_PKG_VERSION"), exporter)
|
||||
.with_runtime_reader(),
|
||||
)
|
||||
.expect("in-memory metrics client");
|
||||
let otel_manager = OtelManager::new(
|
||||
conversation_id,
|
||||
MODEL,
|
||||
@@ -221,12 +265,13 @@ async fn websocket_harness(server: &WebSocketTestServer) -> WebsocketTestHarness
|
||||
false,
|
||||
"test".to_string(),
|
||||
SessionSource::Exec,
|
||||
);
|
||||
)
|
||||
.with_metrics(metrics);
|
||||
let client = ModelClient::new(
|
||||
Arc::clone(&config),
|
||||
None,
|
||||
model_info,
|
||||
otel_manager,
|
||||
otel_manager.clone(),
|
||||
provider.clone(),
|
||||
None,
|
||||
ReasoningSummary::Auto,
|
||||
@@ -238,6 +283,7 @@ async fn websocket_harness(server: &WebSocketTestServer) -> WebsocketTestHarness
|
||||
WebsocketTestHarness {
|
||||
_codex_home: codex_home,
|
||||
client,
|
||||
otel_manager,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -22,9 +22,12 @@ fn sse_completed(id: &str) -> String {
|
||||
sse(vec![ev_response_created(id), ev_completed(id)])
|
||||
}
|
||||
|
||||
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
|
||||
fn collab_mode_with_mode_and_instructions(
|
||||
mode: ModeKind,
|
||||
instructions: Option<&str>,
|
||||
) -> CollaborationMode {
|
||||
CollaborationMode {
|
||||
mode: ModeKind::Custom,
|
||||
mode,
|
||||
settings: Settings {
|
||||
model: "gpt-5.1".to_string(),
|
||||
reasoning_effort: None,
|
||||
@@ -33,6 +36,10 @@ fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMod
|
||||
}
|
||||
}
|
||||
|
||||
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
|
||||
collab_mode_with_mode_and_instructions(ModeKind::Custom, instructions)
|
||||
}
|
||||
|
||||
fn developer_texts(input: &[Value]) -> Vec<String> {
|
||||
input
|
||||
.iter()
|
||||
@@ -83,7 +90,7 @@ async fn no_collaboration_instructions_by_default() -> Result<()> {
|
||||
let input = req.single_request().input();
|
||||
let dev_texts = developer_texts(&input);
|
||||
assert_eq!(dev_texts.len(), 1);
|
||||
assert!(dev_texts[0].contains("`approval_policy`"));
|
||||
assert!(dev_texts[0].contains("<permissions instructions>"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -171,7 +178,7 @@ async fn collaboration_instructions_added_on_user_turn() -> Result<()> {
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_then_user_turn_uses_updated_collaboration_instructions() -> Result<()> {
|
||||
async fn override_then_next_turn_uses_updated_collaboration_instructions() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
@@ -196,20 +203,12 @@ async fn override_then_user_turn_uses_updated_collaboration_instructions() -> Re
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserTurn {
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
cwd: test.config.cwd.clone(),
|
||||
approval_policy: test.config.approval_policy.value(),
|
||||
sandbox_policy: test.config.sandbox_policy.get().clone(),
|
||||
model: test.session_configured.model.clone(),
|
||||
effort: None,
|
||||
summary: test.config.model_reasoning_summary,
|
||||
collaboration_mode: None,
|
||||
final_output_json_schema: None,
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
@@ -272,7 +271,7 @@ async fn user_turn_overrides_collaboration_instructions_after_override() -> Resu
|
||||
let dev_texts = developer_texts(&input);
|
||||
let base_text = collab_xml(base_text);
|
||||
let turn_text = collab_xml(turn_text);
|
||||
assert_eq!(count_exact(&dev_texts, &base_text), 1);
|
||||
assert_eq!(count_exact(&dev_texts, &base_text), 0);
|
||||
assert_eq!(count_exact(&dev_texts, &turn_text), 1);
|
||||
|
||||
Ok(())
|
||||
@@ -419,6 +418,159 @@ async fn collaboration_mode_update_noop_does_not_append() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn collaboration_mode_update_emits_new_instruction_message_when_mode_changes() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let _req1 = mount_sse_once(&server, sse_completed("resp-1")).await;
|
||||
let req2 = mount_sse_once(&server, sse_completed("resp-2")).await;
|
||||
|
||||
let test = test_codex().build(&server).await?;
|
||||
let code_text = "code mode instructions";
|
||||
let plan_text = "plan mode instructions";
|
||||
|
||||
test.codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_policy: None,
|
||||
windows_sandbox_level: None,
|
||||
model: None,
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
|
||||
ModeKind::Code,
|
||||
Some(code_text),
|
||||
)),
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello 1".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
test.codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_policy: None,
|
||||
windows_sandbox_level: None,
|
||||
model: None,
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
|
||||
ModeKind::Plan,
|
||||
Some(plan_text),
|
||||
)),
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello 2".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let input = req2.single_request().input();
|
||||
let dev_texts = developer_texts(&input);
|
||||
let code_text = collab_xml(code_text);
|
||||
let plan_text = collab_xml(plan_text);
|
||||
assert_eq!(count_exact(&dev_texts, &code_text), 1);
|
||||
assert_eq!(count_exact(&dev_texts, &plan_text), 1);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn collaboration_mode_update_noop_does_not_append_when_mode_is_unchanged() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let _req1 = mount_sse_once(&server, sse_completed("resp-1")).await;
|
||||
let req2 = mount_sse_once(&server, sse_completed("resp-2")).await;
|
||||
|
||||
let test = test_codex().build(&server).await?;
|
||||
let collab_text = "mode-stable instructions";
|
||||
|
||||
test.codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_policy: None,
|
||||
windows_sandbox_level: None,
|
||||
model: None,
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
|
||||
ModeKind::Code,
|
||||
Some(collab_text),
|
||||
)),
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello 1".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
test.codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_policy: None,
|
||||
windows_sandbox_level: None,
|
||||
model: None,
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: Some(collab_mode_with_mode_and_instructions(
|
||||
ModeKind::Code,
|
||||
Some(collab_text),
|
||||
)),
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserInput {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello 2".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
})
|
||||
.await?;
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let input = req2.single_request().input();
|
||||
let dev_texts = developer_texts(&input);
|
||||
let collab_text = collab_xml(collab_text);
|
||||
assert_eq!(count_exact(&dev_texts, &collab_text), 1);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn resume_replays_collaboration_instructions() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
@@ -255,6 +255,7 @@ async fn web_search_item_is_emitted() -> anyhow::Result<()> {
|
||||
completed.action,
|
||||
WebSearchAction::Search {
|
||||
query: Some("weather seattle".to_string()),
|
||||
queries: None,
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ mod otel;
|
||||
mod pending_input;
|
||||
mod permissions_messages;
|
||||
mod personality;
|
||||
mod personality_migration;
|
||||
mod prompt_caching;
|
||||
mod quota_exceeded;
|
||||
mod read_file;
|
||||
@@ -80,5 +81,5 @@ mod unstable_features_warning;
|
||||
mod user_notification;
|
||||
mod user_shell_cmd;
|
||||
mod view_image;
|
||||
mod web_search_cached;
|
||||
mod web_search;
|
||||
mod websocket_fallback;
|
||||
|
||||
@@ -18,7 +18,6 @@ use core_test_support::skip_if_no_network;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
@@ -104,7 +103,7 @@ fn rollout_environment_texts(text: &str) -> Vec<String> {
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_turn_context_records_permissions_update() -> Result<()> {
|
||||
async fn override_turn_context_without_user_turn_does_not_record_permissions_update() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
@@ -138,19 +137,15 @@ async fn override_turn_context_records_permissions_update() -> Result<()> {
|
||||
.filter(|text| text.contains("`approval_policy`"))
|
||||
.collect();
|
||||
assert!(
|
||||
approval_texts
|
||||
.iter()
|
||||
.any(|text| text.contains("`approval_policy` is `never`")),
|
||||
"expected updated approval policy instructions in rollout"
|
||||
approval_texts.is_empty(),
|
||||
"did not expect permissions updates before a new user turn: {approval_texts:?}"
|
||||
);
|
||||
let unique: HashSet<&String> = approval_texts.iter().copied().collect();
|
||||
assert_eq!(unique.len(), 2);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_turn_context_records_environment_update() -> Result<()> {
|
||||
async fn override_turn_context_without_user_turn_does_not_record_environment_update() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
@@ -177,17 +172,16 @@ async fn override_turn_context_records_environment_update() -> Result<()> {
|
||||
let rollout_path = test.codex.rollout_path().expect("rollout path");
|
||||
let rollout_text = read_rollout_text(&rollout_path).await?;
|
||||
let env_texts = rollout_environment_texts(&rollout_text);
|
||||
let new_cwd_text = new_cwd.path().display().to_string();
|
||||
assert!(
|
||||
env_texts.iter().any(|text| text.contains(&new_cwd_text)),
|
||||
"expected environment update with new cwd in rollout"
|
||||
env_texts.is_empty(),
|
||||
"did not expect environment updates before a new user turn: {env_texts:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn override_turn_context_records_collaboration_update() -> Result<()> {
|
||||
async fn override_turn_context_without_user_turn_does_not_record_collaboration_update() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
@@ -220,7 +214,7 @@ async fn override_turn_context_records_collaboration_update() -> Result<()> {
|
||||
.iter()
|
||||
.filter(|text| text.as_str() == collab_text.as_str())
|
||||
.count();
|
||||
assert_eq!(collab_count, 1);
|
||||
assert_eq!(collab_count, 0);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ fn permissions_texts(input: &[serde_json::Value]) -> Vec<String> {
|
||||
.first()?
|
||||
.get("text")?
|
||||
.as_str()?;
|
||||
if text.contains("`approval_policy`") {
|
||||
if text.contains("<permissions instructions>") {
|
||||
Some(text.to_string())
|
||||
} else {
|
||||
None
|
||||
@@ -136,7 +136,7 @@ async fn permissions_message_added_on_override_change() -> Result<()> {
|
||||
let permissions_2 = permissions_texts(input2);
|
||||
|
||||
assert_eq!(permissions_1.len(), 1);
|
||||
assert_eq!(permissions_2.len(), 3);
|
||||
assert_eq!(permissions_2.len(), 2);
|
||||
let unique = permissions_2.into_iter().collect::<HashSet<String>>();
|
||||
assert_eq!(unique.len(), 2);
|
||||
|
||||
@@ -267,7 +267,7 @@ async fn resume_replays_permissions_messages() -> Result<()> {
|
||||
let body3 = req3.single_request().body_json();
|
||||
let input = body3["input"].as_array().expect("input array");
|
||||
let permissions = permissions_texts(input);
|
||||
assert_eq!(permissions.len(), 4);
|
||||
assert_eq!(permissions.len(), 3);
|
||||
let unique = permissions.into_iter().collect::<HashSet<String>>();
|
||||
assert_eq!(unique.len(), 2);
|
||||
|
||||
@@ -337,7 +337,7 @@ async fn resume_and_fork_append_permissions_messages() -> Result<()> {
|
||||
let body2 = req2.single_request().body_json();
|
||||
let input2 = body2["input"].as_array().expect("input array");
|
||||
let permissions_base = permissions_texts(input2);
|
||||
assert_eq!(permissions_base.len(), 3);
|
||||
assert_eq!(permissions_base.len(), 2);
|
||||
|
||||
builder = builder.with_config(|config| {
|
||||
config.approval_policy = Constrained::allow_any(AskForApproval::UnlessTrusted);
|
||||
@@ -439,7 +439,7 @@ async fn permissions_message_includes_writable_roots() -> Result<()> {
|
||||
&sandbox_policy,
|
||||
AskForApproval::OnRequest,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
true,
|
||||
test.config.cwd.as_path(),
|
||||
)
|
||||
.into_text();
|
||||
|
||||
@@ -39,21 +39,22 @@ use wiremock::MockServer;
|
||||
|
||||
const LOCAL_FRIENDLY_TEMPLATE: &str =
|
||||
"You optimize for team morale and being a supportive teammate as much as code quality.";
|
||||
const LOCAL_PRAGMATIC_TEMPLATE: &str = "You are a deeply pragmatic, effective software engineer.";
|
||||
|
||||
fn sse_completed(id: &str) -> String {
|
||||
sse(vec![ev_response_created(id), ev_completed(id)])
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn model_personality_does_not_mutate_base_instructions_without_template() {
|
||||
async fn personality_does_not_mutate_base_instructions_without_template() {
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.features.enable(Feature::Personality);
|
||||
config.model_personality = Some(Personality::Friendly);
|
||||
config.personality = Some(Personality::Friendly);
|
||||
|
||||
let model_info = ModelsManager::construct_model_info_offline("gpt-5.1", &config);
|
||||
assert_eq!(
|
||||
model_info.get_model_instructions(config.model_personality),
|
||||
model_info.get_model_instructions(config.personality),
|
||||
model_info.base_instructions
|
||||
);
|
||||
}
|
||||
@@ -63,14 +64,14 @@ async fn base_instructions_override_disables_personality_template() {
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.features.enable(Feature::Personality);
|
||||
config.model_personality = Some(Personality::Friendly);
|
||||
config.personality = Some(Personality::Friendly);
|
||||
config.base_instructions = Some("override instructions".to_string());
|
||||
|
||||
let model_info = ModelsManager::construct_model_info_offline("gpt-5.2-codex", &config);
|
||||
|
||||
assert_eq!(model_info.base_instructions, "override instructions");
|
||||
assert_eq!(
|
||||
model_info.get_model_instructions(config.model_personality),
|
||||
model_info.get_model_instructions(config.personality),
|
||||
"override instructions"
|
||||
);
|
||||
}
|
||||
@@ -132,7 +133,7 @@ async fn config_personality_some_sets_instructions_template() -> anyhow::Result<
|
||||
.with_config(|config| {
|
||||
config.features.disable(Feature::RemoteModels);
|
||||
config.features.enable(Feature::Personality);
|
||||
config.model_personality = Some(Personality::Friendly);
|
||||
config.personality = Some(Personality::Friendly);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -223,7 +224,7 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()>
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
personality: Some(Personality::Pragmatic),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -264,8 +265,99 @@ async fn user_turn_personality_some_adds_update_message() -> anyhow::Result<()>
|
||||
"expected personality update preamble, got {personality_text:?}"
|
||||
);
|
||||
assert!(
|
||||
personality_text.contains(LOCAL_FRIENDLY_TEMPLATE),
|
||||
"expected personality update to include the local friendly template, got: {personality_text:?}"
|
||||
personality_text.contains(LOCAL_PRAGMATIC_TEMPLATE),
|
||||
"expected personality update to include the local pragmatic template, got: {personality_text:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn user_turn_personality_same_value_does_not_add_update_message() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let resp_mock = mount_sse_sequence(
|
||||
&server,
|
||||
vec![sse_completed("resp-1"), sse_completed("resp-2")],
|
||||
)
|
||||
.await;
|
||||
let mut builder = test_codex()
|
||||
.with_model("exp-codex-personality")
|
||||
.with_config(|config| {
|
||||
config.features.disable(Feature::RemoteModels);
|
||||
config.features.enable(Feature::Personality);
|
||||
config.personality = Some(Personality::Pragmatic);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: test.cwd_path().to_path_buf(),
|
||||
approval_policy: test.config.approval_policy.value(),
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
model: test.session_configured.model.clone(),
|
||||
effort: test.config.model_reasoning_effort,
|
||||
summary: ReasoningSummary::Auto,
|
||||
collaboration_mode: None,
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
test.codex
|
||||
.submit(Op::OverrideTurnContext {
|
||||
cwd: None,
|
||||
approval_policy: None,
|
||||
sandbox_policy: None,
|
||||
windows_sandbox_level: None,
|
||||
model: None,
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: None,
|
||||
personality: Some(Personality::Pragmatic),
|
||||
})
|
||||
.await?;
|
||||
|
||||
test.codex
|
||||
.submit(Op::UserTurn {
|
||||
items: vec![UserInput::Text {
|
||||
text: "hello".into(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
final_output_json_schema: None,
|
||||
cwd: test.cwd_path().to_path_buf(),
|
||||
approval_policy: test.config.approval_policy.value(),
|
||||
sandbox_policy: SandboxPolicy::ReadOnly,
|
||||
model: test.session_configured.model.clone(),
|
||||
effort: test.config.model_reasoning_effort,
|
||||
summary: ReasoningSummary::Auto,
|
||||
collaboration_mode: None,
|
||||
personality: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
wait_for_event(&test.codex, |ev| matches!(ev, EventMsg::TurnComplete(_))).await;
|
||||
|
||||
let requests = resp_mock.requests();
|
||||
assert_eq!(requests.len(), 2, "expected two requests");
|
||||
let request = requests
|
||||
.last()
|
||||
.expect("expected second request after personality override");
|
||||
|
||||
let developer_texts = request.message_input_texts("developer");
|
||||
let personality_text = developer_texts
|
||||
.iter()
|
||||
.find(|text| text.contains("<personality_spec>"));
|
||||
assert!(
|
||||
personality_text.is_none(),
|
||||
"expected no personality preamble for unchanged personality, got {personality_text:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
@@ -276,11 +368,11 @@ async fn instructions_uses_base_if_feature_disabled() -> anyhow::Result<()> {
|
||||
let codex_home = TempDir::new().expect("create temp dir");
|
||||
let mut config = load_default_config_for_test(&codex_home).await;
|
||||
config.features.disable(Feature::Personality);
|
||||
config.model_personality = Some(Personality::Friendly);
|
||||
config.personality = Some(Personality::Friendly);
|
||||
|
||||
let model_info = ModelsManager::construct_model_info_offline("gpt-5.2-codex", &config);
|
||||
assert_eq!(
|
||||
model_info.get_model_instructions(config.model_personality),
|
||||
model_info.get_model_instructions(config.personality),
|
||||
model_info.base_instructions
|
||||
);
|
||||
|
||||
@@ -335,7 +427,7 @@ async fn user_turn_personality_skips_if_feature_disabled() -> anyhow::Result<()>
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
personality: Some(Personality::Pragmatic),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -377,7 +469,7 @@ async fn user_turn_personality_skips_if_feature_disabled() -> anyhow::Result<()>
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn ignores_remote_model_personality_if_remote_models_disabled() -> anyhow::Result<()> {
|
||||
async fn ignores_remote_personality_if_remote_models_disabled() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = MockServer::builder()
|
||||
@@ -403,9 +495,7 @@ async fn ignores_remote_model_personality_if_remote_models_disabled() -> anyhow:
|
||||
upgrade: None,
|
||||
base_instructions: "base instructions".to_string(),
|
||||
model_messages: Some(ModelMessages {
|
||||
instructions_template: Some(
|
||||
"Base instructions\n{{ personality_message }}\n".to_string(),
|
||||
),
|
||||
instructions_template: Some("Base instructions\n{{ personality }}\n".to_string()),
|
||||
instructions_variables: Some(ModelInstructionsVariables {
|
||||
personality_default: None,
|
||||
personality_friendly: Some(remote_personality_message.to_string()),
|
||||
@@ -440,7 +530,7 @@ async fn ignores_remote_model_personality_if_remote_models_disabled() -> anyhow:
|
||||
config.features.disable(Feature::RemoteModels);
|
||||
config.features.enable(Feature::Personality);
|
||||
config.model = Some(remote_slug.to_string());
|
||||
config.model_personality = Some(Personality::Friendly);
|
||||
config.personality = Some(Personality::Friendly);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -485,7 +575,7 @@ async fn ignores_remote_model_personality_if_remote_models_disabled() -> anyhow:
|
||||
"expected instructions to include the local friendly personality template, got: {instructions_text:?}"
|
||||
);
|
||||
assert!(
|
||||
!instructions_text.contains("{{ personality_message }}"),
|
||||
!instructions_text.contains("{{ personality }}"),
|
||||
"expected legacy personality placeholder to be replaced, got: {instructions_text:?}"
|
||||
);
|
||||
|
||||
@@ -493,7 +583,7 @@ async fn ignores_remote_model_personality_if_remote_models_disabled() -> anyhow:
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn remote_model_default_personality_instructions_with_feature() -> anyhow::Result<()> {
|
||||
async fn remote_model_friendly_personality_instructions_with_feature() -> anyhow::Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = MockServer::builder()
|
||||
@@ -503,6 +593,7 @@ async fn remote_model_default_personality_instructions_with_feature() -> anyhow:
|
||||
|
||||
let remote_slug = "codex-remote-default-personality";
|
||||
let default_personality_message = "Default from remote template";
|
||||
let friendly_personality_message = "Friendly variant";
|
||||
let remote_model = ModelInfo {
|
||||
slug: remote_slug.to_string(),
|
||||
display_name: "Remote default personality test".to_string(),
|
||||
@@ -522,7 +613,7 @@ async fn remote_model_default_personality_instructions_with_feature() -> anyhow:
|
||||
instructions_template: Some("Base instructions\n{{ personality }}\n".to_string()),
|
||||
instructions_variables: Some(ModelInstructionsVariables {
|
||||
personality_default: Some(default_personality_message.to_string()),
|
||||
personality_friendly: Some("Friendly variant".to_string()),
|
||||
personality_friendly: Some(friendly_personality_message.to_string()),
|
||||
personality_pragmatic: Some("Pragmatic variant".to_string()),
|
||||
}),
|
||||
}),
|
||||
@@ -554,6 +645,7 @@ async fn remote_model_default_personality_instructions_with_feature() -> anyhow:
|
||||
config.features.enable(Feature::RemoteModels);
|
||||
config.features.enable(Feature::Personality);
|
||||
config.model = Some(remote_slug.to_string());
|
||||
config.personality = Some(Personality::Friendly);
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
@@ -578,7 +670,7 @@ async fn remote_model_default_personality_instructions_with_feature() -> anyhow:
|
||||
effort: test.config.model_reasoning_effort,
|
||||
summary: ReasoningSummary::Auto,
|
||||
collaboration_mode: None,
|
||||
personality: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -588,8 +680,12 @@ async fn remote_model_default_personality_instructions_with_feature() -> anyhow:
|
||||
let instructions_text = request.instructions_text();
|
||||
|
||||
assert!(
|
||||
instructions_text.contains(default_personality_message),
|
||||
"expected instructions to include the remote default personality template, got: {instructions_text:?}"
|
||||
instructions_text.contains(friendly_personality_message),
|
||||
"expected instructions to include the remote friendly personality template, got: {instructions_text:?}"
|
||||
);
|
||||
assert!(
|
||||
!instructions_text.contains(default_personality_message),
|
||||
"expected instructions to skip the remote default personality template, got: {instructions_text:?}"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
@@ -606,7 +702,8 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
|
||||
.await;
|
||||
|
||||
let remote_slug = "codex-remote-personality";
|
||||
let remote_personality_message = "Friendly from remote template";
|
||||
let remote_friendly_message = "Friendly from remote template";
|
||||
let remote_pragmatic_message = "Pragmatic from remote template";
|
||||
let remote_model = ModelInfo {
|
||||
slug: remote_slug.to_string(),
|
||||
display_name: "Remote personality test".to_string(),
|
||||
@@ -623,13 +720,11 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
|
||||
upgrade: None,
|
||||
base_instructions: "base instructions".to_string(),
|
||||
model_messages: Some(ModelMessages {
|
||||
instructions_template: Some(
|
||||
"Base instructions\n{{ personality_message }}\n".to_string(),
|
||||
),
|
||||
instructions_template: Some("Base instructions\n{{ personality }}\n".to_string()),
|
||||
instructions_variables: Some(ModelInstructionsVariables {
|
||||
personality_default: None,
|
||||
personality_friendly: Some(remote_personality_message.to_string()),
|
||||
personality_pragmatic: None,
|
||||
personality_friendly: Some(remote_friendly_message.to_string()),
|
||||
personality_pragmatic: Some(remote_pragmatic_message.to_string()),
|
||||
}),
|
||||
}),
|
||||
supports_reasoning_summaries: false,
|
||||
@@ -704,7 +799,7 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
|
||||
effort: None,
|
||||
summary: None,
|
||||
collaboration_mode: None,
|
||||
personality: Some(Personality::Friendly),
|
||||
personality: Some(Personality::Pragmatic),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -744,7 +839,7 @@ async fn user_turn_personality_remote_model_template_includes_update_message() -
|
||||
"expected personality update preamble, got {personality_text:?}"
|
||||
);
|
||||
assert!(
|
||||
personality_text.contains(remote_personality_message),
|
||||
personality_text.contains(remote_pragmatic_message),
|
||||
"expected personality update to include remote template, got: {personality_text:?}"
|
||||
);
|
||||
|
||||
|
||||
154
codex-rs/core/tests/suite/personality_migration.rs
Normal file
154
codex-rs/core/tests/suite/personality_migration.rs
Normal file
@@ -0,0 +1,154 @@
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_core::SESSIONS_SUBDIR;
|
||||
use codex_core::config::ConfigToml;
|
||||
use codex_core::personality_migration::PERSONALITY_MIGRATION_FILENAME;
|
||||
use codex_core::personality_migration::PersonalityMigrationStatus;
|
||||
use codex_core::personality_migration::maybe_migrate_personality;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::config_types::Personality;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
use codex_protocol::protocol::SessionSource;
|
||||
use codex_protocol::protocol::UserMessageEvent;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
const TEST_TIMESTAMP: &str = "2025-01-01T00-00-00";
|
||||
|
||||
async fn read_config_toml(codex_home: &Path) -> io::Result<ConfigToml> {
|
||||
let contents = tokio::fs::read_to_string(codex_home.join("config.toml")).await?;
|
||||
toml::from_str(&contents).map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))
|
||||
}
|
||||
|
||||
async fn write_session_with_user_event(codex_home: &Path) -> io::Result<()> {
|
||||
let thread_id = ThreadId::new();
|
||||
let dir = codex_home
|
||||
.join(SESSIONS_SUBDIR)
|
||||
.join("2025")
|
||||
.join("01")
|
||||
.join("01");
|
||||
write_rollout_with_user_event(&dir, thread_id).await
|
||||
}
|
||||
|
||||
async fn write_archived_session_with_user_event(codex_home: &Path) -> io::Result<()> {
|
||||
let thread_id = ThreadId::new();
|
||||
let dir = codex_home.join(ARCHIVED_SESSIONS_SUBDIR);
|
||||
write_rollout_with_user_event(&dir, thread_id).await
|
||||
}
|
||||
|
||||
async fn write_rollout_with_user_event(dir: &Path, thread_id: ThreadId) -> io::Result<()> {
|
||||
tokio::fs::create_dir_all(&dir).await?;
|
||||
let file_path = dir.join(format!("rollout-{TEST_TIMESTAMP}-{thread_id}.jsonl"));
|
||||
let mut file = tokio::fs::File::create(&file_path).await?;
|
||||
|
||||
let session_meta = SessionMetaLine {
|
||||
meta: SessionMeta {
|
||||
id: thread_id,
|
||||
forked_from_id: None,
|
||||
timestamp: TEST_TIMESTAMP.to_string(),
|
||||
cwd: std::path::PathBuf::from("."),
|
||||
originator: "test_originator".to_string(),
|
||||
cli_version: "test_version".to_string(),
|
||||
source: SessionSource::Cli,
|
||||
model_provider: None,
|
||||
base_instructions: None,
|
||||
dynamic_tools: None,
|
||||
},
|
||||
git: None,
|
||||
};
|
||||
let meta_line = RolloutLine {
|
||||
timestamp: TEST_TIMESTAMP.to_string(),
|
||||
item: RolloutItem::SessionMeta(session_meta),
|
||||
};
|
||||
let user_event = RolloutLine {
|
||||
timestamp: TEST_TIMESTAMP.to_string(),
|
||||
item: RolloutItem::EventMsg(EventMsg::UserMessage(UserMessageEvent {
|
||||
message: "hello".to_string(),
|
||||
images: None,
|
||||
local_images: Vec::new(),
|
||||
text_elements: Vec::new(),
|
||||
})),
|
||||
};
|
||||
|
||||
let meta_json = serde_json::to_string(&meta_line)?;
|
||||
file.write_all(format!("{meta_json}\n").as_bytes()).await?;
|
||||
let user_json = serde_json::to_string(&user_event)?;
|
||||
file.write_all(format!("{user_json}\n").as_bytes()).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn migration_marker_exists_no_sessions_no_change() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
let marker_path = temp.path().join(PERSONALITY_MIGRATION_FILENAME);
|
||||
tokio::fs::write(&marker_path, "v1\n").await?;
|
||||
|
||||
let status = maybe_migrate_personality(temp.path(), &ConfigToml::default()).await?;
|
||||
|
||||
assert_eq!(status, PersonalityMigrationStatus::SkippedMarker);
|
||||
assert_eq!(
|
||||
tokio::fs::try_exists(temp.path().join("config.toml")).await?,
|
||||
false
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn no_marker_no_sessions_no_change() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
|
||||
let status = maybe_migrate_personality(temp.path(), &ConfigToml::default()).await?;
|
||||
|
||||
assert_eq!(status, PersonalityMigrationStatus::SkippedNoSessions);
|
||||
assert_eq!(
|
||||
tokio::fs::try_exists(temp.path().join(PERSONALITY_MIGRATION_FILENAME)).await?,
|
||||
true
|
||||
);
|
||||
assert_eq!(
|
||||
tokio::fs::try_exists(temp.path().join("config.toml")).await?,
|
||||
false
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn no_marker_sessions_sets_personality() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
write_session_with_user_event(temp.path()).await?;
|
||||
|
||||
let status = maybe_migrate_personality(temp.path(), &ConfigToml::default()).await?;
|
||||
|
||||
assert_eq!(status, PersonalityMigrationStatus::Applied);
|
||||
assert_eq!(
|
||||
tokio::fs::try_exists(temp.path().join(PERSONALITY_MIGRATION_FILENAME)).await?,
|
||||
true
|
||||
);
|
||||
|
||||
let persisted = read_config_toml(temp.path()).await?;
|
||||
assert_eq!(persisted.personality, Some(Personality::Pragmatic));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn no_marker_archived_sessions_sets_personality() -> io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
write_archived_session_with_user_event(temp.path()).await?;
|
||||
|
||||
let status = maybe_migrate_personality(temp.path(), &ConfigToml::default()).await?;
|
||||
|
||||
assert_eq!(status, PersonalityMigrationStatus::Applied);
|
||||
assert_eq!(
|
||||
tokio::fs::try_exists(temp.path().join(PERSONALITY_MIGRATION_FILENAME)).await?,
|
||||
true
|
||||
);
|
||||
|
||||
let persisted = read_config_toml(temp.path()).await?;
|
||||
assert_eq!(persisted.personality, Some(Personality::Pragmatic));
|
||||
Ok(())
|
||||
}
|
||||
@@ -388,17 +388,14 @@ async fn overrides_turn_context_but_keeps_cached_prefix_and_key_constant() -> an
|
||||
});
|
||||
let expected_permissions_msg = body1["input"][0].clone();
|
||||
let body1_input = body1["input"].as_array().expect("input array");
|
||||
// After overriding the turn context, emit two updated permissions messages.
|
||||
// After overriding the turn context, emit one updated permissions message.
|
||||
let expected_permissions_msg_2 = body2["input"][body1_input.len()].clone();
|
||||
let expected_permissions_msg_3 = body2["input"][body1_input.len() + 1].clone();
|
||||
assert_ne!(
|
||||
expected_permissions_msg_2, expected_permissions_msg,
|
||||
"expected updated permissions message after override"
|
||||
);
|
||||
assert_eq!(expected_permissions_msg_2, expected_permissions_msg_3);
|
||||
let mut expected_body2 = body1_input.to_vec();
|
||||
expected_body2.push(expected_permissions_msg_2);
|
||||
expected_body2.push(expected_permissions_msg_3);
|
||||
expected_body2.push(expected_user_message_2);
|
||||
assert_eq!(body2["input"], serde_json::Value::Array(expected_body2));
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use codex_core::WireApi;
|
||||
use codex_core::built_in_model_providers;
|
||||
use codex_core::features::Feature;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_protocol::config_types::WebSearchMode;
|
||||
@@ -25,6 +27,15 @@ fn find_web_search_tool(body: &Value) -> &Value {
|
||||
.expect("tools should include a web_search tool")
|
||||
}
|
||||
|
||||
#[allow(clippy::expect_used)]
|
||||
fn has_web_search_tool(body: &Value) -> bool {
|
||||
body["tools"]
|
||||
.as_array()
|
||||
.expect("request body should include tools array")
|
||||
.iter()
|
||||
.any(|tool| tool.get("type").and_then(Value::as_str) == Some("web_search"))
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn web_search_mode_cached_sets_external_web_access_false() {
|
||||
skip_if_no_network!();
|
||||
@@ -174,3 +185,45 @@ async fn web_search_mode_updates_between_turns_with_sandbox_policy() {
|
||||
"danger-full-access policy should default web_search to live"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn web_search_mode_defaults_to_disabled_for_azure_responses() {
|
||||
skip_if_no_network!();
|
||||
|
||||
let server = start_mock_server().await;
|
||||
let sse = sse_completed("resp-1");
|
||||
let resp_mock = responses::mount_sse_once(&server, sse).await;
|
||||
|
||||
let mut builder = test_codex()
|
||||
.with_model("gpt-5-codex")
|
||||
.with_config(|config| {
|
||||
let base_url = config.model_provider.base_url.clone();
|
||||
let mut provider = built_in_model_providers()["openai"].clone();
|
||||
provider.name = "Azure".to_string();
|
||||
provider.base_url = base_url;
|
||||
provider.wire_api = WireApi::Responses;
|
||||
config.model_provider_id = provider.name.clone();
|
||||
config.model_provider = provider;
|
||||
config.web_search_mode = None;
|
||||
config.features.disable(Feature::WebSearchCached);
|
||||
config.features.disable(Feature::WebSearchRequest);
|
||||
});
|
||||
let test = builder
|
||||
.build(&server)
|
||||
.await
|
||||
.expect("create test Codex conversation");
|
||||
|
||||
test.submit_turn_with_policy(
|
||||
"hello azure default web search",
|
||||
SandboxPolicy::DangerFullAccess,
|
||||
)
|
||||
.await
|
||||
.expect("submit turn");
|
||||
|
||||
let body = resp_mock.single_request().body_json();
|
||||
assert_eq!(
|
||||
has_web_search_tool(&body),
|
||||
false,
|
||||
"azure responses requests should disable web_search by default"
|
||||
);
|
||||
}
|
||||
@@ -22,6 +22,11 @@ rustPlatform.buildRustPackage (_: {
|
||||
cargoLock.outputHashes = {
|
||||
"ratatui-0.29.0" = "sha256-HBvT5c8GsiCxMffNjJGLmHnvG77A6cqEL+1ARurBXho=";
|
||||
"crossterm-0.28.1" = "sha256-6qCtfSMuXACKFb9ATID39XyFDIEMFDmbx6SSmNe+728=";
|
||||
"nucleo-0.5.0" = "sha256-Hm4SxtTSBrcWpXrtSqeO0TACbUxq3gizg1zD/6Yw/sI=";
|
||||
"nucleo-matcher-0.3.1" = "sha256-Hm4SxtTSBrcWpXrtSqeO0TACbUxq3gizg1zD/6Yw/sI=";
|
||||
"runfiles-0.1.0" = "sha256-uJpVLcQh8wWZA3GPv9D8Nt43EOirajfDJ7eq/FB+tek=";
|
||||
"tokio-tungstenite-0.28.0" = "sha256-vJZ3S41gHtRt4UAODsjAoSCaTksgzCALiBmbWgyDCi8=";
|
||||
"tungstenite-0.28.0" = "sha256-CyXZp58zGlUhEor7WItjQoS499IoSP55uWqr++ia+0A=";
|
||||
};
|
||||
|
||||
meta = with lib; {
|
||||
|
||||
@@ -241,7 +241,7 @@ async fn load_exec_policy() -> anyhow::Result<Policy> {
|
||||
cwd,
|
||||
&cli_overrides,
|
||||
overrides,
|
||||
None,
|
||||
codex_core::config_loader::CloudRequirementsLoader::default(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ workspace = true
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-arg0 = { workspace = true }
|
||||
codex-cloud-requirements = { workspace = true }
|
||||
codex-common = { workspace = true, features = [
|
||||
"cli",
|
||||
"elapsed",
|
||||
|
||||
@@ -13,6 +13,7 @@ pub mod exec_events;
|
||||
pub use cli::Cli;
|
||||
pub use cli::Command;
|
||||
pub use cli::ReviewArgs;
|
||||
use codex_cloud_requirements::cloud_requirements_loader;
|
||||
use codex_common::oss::ensure_oss_provider_ready;
|
||||
use codex_common::oss::get_default_model_for_oss_provider;
|
||||
use codex_common::oss::ollama_chat_deprecation_notice;
|
||||
@@ -24,6 +25,7 @@ use codex_core::OLLAMA_OSS_PROVIDER_ID;
|
||||
use codex_core::ThreadManager;
|
||||
use codex_core::auth::enforce_login_restrictions;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::find_codex_home;
|
||||
use codex_core::config::load_config_as_toml_with_cli_overrides;
|
||||
@@ -64,6 +66,7 @@ use uuid::Uuid;
|
||||
use crate::cli::Command as ExecCommand;
|
||||
use crate::event_processor::CodexStatus;
|
||||
use crate::event_processor::EventProcessor;
|
||||
use codex_core::default_client::set_default_client_residency_requirement;
|
||||
use codex_core::default_client::set_default_originator;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use codex_core::find_thread_path_by_name_str;
|
||||
@@ -159,41 +162,52 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
|
||||
// we load config.toml here to determine project state.
|
||||
#[allow(clippy::print_stderr)]
|
||||
let config_toml = {
|
||||
let codex_home = match find_codex_home() {
|
||||
Ok(codex_home) => codex_home,
|
||||
Err(err) => {
|
||||
eprintln!("Error finding codex home: {err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
match load_config_as_toml_with_cli_overrides(
|
||||
&codex_home,
|
||||
&config_cwd,
|
||||
cli_kv_overrides.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(config_toml) => config_toml,
|
||||
Err(err) => {
|
||||
let config_error = err
|
||||
.get_ref()
|
||||
.and_then(|err| err.downcast_ref::<ConfigLoadError>())
|
||||
.map(ConfigLoadError::config_error);
|
||||
if let Some(config_error) = config_error {
|
||||
eprintln!(
|
||||
"Error loading config.toml:\n{}",
|
||||
format_config_error_with_source(config_error)
|
||||
);
|
||||
} else {
|
||||
eprintln!("Error loading config.toml: {err}");
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
let codex_home = match find_codex_home() {
|
||||
Ok(codex_home) => codex_home,
|
||||
Err(err) => {
|
||||
eprintln!("Error finding codex home: {err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
let config_toml = match load_config_as_toml_with_cli_overrides(
|
||||
&codex_home,
|
||||
&config_cwd,
|
||||
cli_kv_overrides.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(config_toml) => config_toml,
|
||||
Err(err) => {
|
||||
let config_error = err
|
||||
.get_ref()
|
||||
.and_then(|err| err.downcast_ref::<ConfigLoadError>())
|
||||
.map(ConfigLoadError::config_error);
|
||||
if let Some(config_error) = config_error {
|
||||
eprintln!(
|
||||
"Error loading config.toml:\n{}",
|
||||
format_config_error_with_source(config_error)
|
||||
);
|
||||
} else {
|
||||
eprintln!("Error loading config.toml: {err}");
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let cloud_auth_manager = AuthManager::shared(
|
||||
codex_home.clone(),
|
||||
false,
|
||||
config_toml.cli_auth_credentials_store.unwrap_or_default(),
|
||||
);
|
||||
let chatgpt_base_url = config_toml
|
||||
.chatgpt_base_url
|
||||
.clone()
|
||||
.unwrap_or_else(|| "https://chatgpt.com/backend-api/".to_string());
|
||||
// TODO(gt): Make cloud requirements failures blocking once we can fail-closed.
|
||||
let cloud_requirements = cloud_requirements_loader(cloud_auth_manager, chatgpt_base_url);
|
||||
|
||||
let model_provider = if oss {
|
||||
let resolved = resolve_oss_provider(
|
||||
oss_provider.as_deref(),
|
||||
@@ -237,7 +251,7 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
codex_linux_sandbox_exe,
|
||||
base_instructions: None,
|
||||
developer_instructions: None,
|
||||
model_personality: None,
|
||||
personality: None,
|
||||
compact_prompt: None,
|
||||
include_apply_patch_tool: None,
|
||||
show_raw_agent_reasoning: oss.then_some(true),
|
||||
@@ -246,8 +260,13 @@ pub async fn run_main(cli: Cli, codex_linux_sandbox_exe: Option<PathBuf>) -> any
|
||||
additional_writable_roots: add_dir,
|
||||
};
|
||||
|
||||
let config =
|
||||
Config::load_with_cli_overrides_and_harness_overrides(cli_kv_overrides, overrides).await?;
|
||||
let config = ConfigBuilder::default()
|
||||
.cli_overrides(cli_kv_overrides)
|
||||
.harness_overrides(overrides)
|
||||
.cloud_requirements(cloud_requirements)
|
||||
.build()
|
||||
.await?;
|
||||
set_default_client_residency_requirement(config.enforce_residency.value());
|
||||
|
||||
if let Err(err) = enforce_login_restrictions(&config) {
|
||||
eprintln!("{err}");
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user