Compare commits

...

11 Commits

Author SHA1 Message Date
Owen Lin
ea9c1f0a84 attempt at separating json schema generation to a v2 namespace 2025-11-01 14:01:30 -07:00
Owen Lin
cd9d93fd73 fix test 2025-11-01 13:35:56 -07:00
Owen Lin
11c87546c3 use macros to generate notifications 2025-11-01 13:34:22 -07:00
Owen Lin
a4be03d62b expose public struct for rate limits 2025-11-01 13:15:47 -07:00
Owen Lin
59e3e7431e move rate limits updated notification to v2 2025-11-01 11:22:44 -07:00
Owen Lin
62be8a3cde add stub handlers in message processor and fix list models test 2025-11-01 11:02:59 -07:00
Owen Lin
276c7fbb5d also standardize method names to follow LSP order 2025-11-01 10:50:37 -07:00
Owen Lin
7f781c6746 add thread/turn API definitions, unify List API patterns 2025-11-01 10:46:26 -07:00
Owen Lin
31f61823c9 wip thread/turn/item types 2025-11-01 10:16:23 -07:00
Owen Lin
473600a75b remove manual list for json schema 2025-11-01 10:00:24 -07:00
Owen Lin
2bd507c6fb [app-server] generate API v2 types in its own namespace 2025-11-01 09:11:37 -07:00
10 changed files with 953 additions and 210 deletions

1
codex-rs/Cargo.lock generated
View File

@@ -871,6 +871,7 @@ dependencies = [
"anyhow",
"clap",
"codex-protocol",
"mcp-types",
"paste",
"pretty_assertions",
"schemars 0.8.22",

View File

@@ -14,6 +14,7 @@ workspace = true
anyhow = { workspace = true }
clap = { workspace = true, features = ["derive"] }
codex-protocol = { workspace = true }
mcp-types = { workspace = true }
paste = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }

View File

@@ -2,8 +2,12 @@ use crate::ClientNotification;
use crate::ClientRequest;
use crate::ServerNotification;
use crate::ServerRequest;
use crate::export_client_notification_schemas;
use crate::export_client_param_schemas;
use crate::export_client_response_schemas;
use crate::export_client_responses;
use crate::export_server_notification_schemas;
use crate::export_server_param_schemas;
use crate::export_server_response_schemas;
use crate::export_server_responses;
use anyhow::Context;
@@ -27,83 +31,7 @@ use std::process::Command;
use ts_rs::TS;
const HEADER: &str = "// GENERATED CODE! DO NOT MODIFY BY HAND!\n\n";
macro_rules! for_each_schema_type {
($macro:ident) => {
$macro!(crate::RequestId);
$macro!(crate::JSONRPCMessage);
$macro!(crate::JSONRPCRequest);
$macro!(crate::JSONRPCNotification);
$macro!(crate::JSONRPCResponse);
$macro!(crate::JSONRPCError);
$macro!(crate::JSONRPCErrorError);
$macro!(crate::AddConversationListenerParams);
$macro!(crate::AddConversationSubscriptionResponse);
$macro!(crate::ApplyPatchApprovalParams);
$macro!(crate::ApplyPatchApprovalResponse);
$macro!(crate::ArchiveConversationParams);
$macro!(crate::ArchiveConversationResponse);
$macro!(crate::AuthMode);
$macro!(crate::AuthStatusChangeNotification);
$macro!(crate::CancelLoginChatGptParams);
$macro!(crate::CancelLoginChatGptResponse);
$macro!(crate::ClientInfo);
$macro!(crate::ClientNotification);
$macro!(crate::ClientRequest);
$macro!(crate::ConversationSummary);
$macro!(crate::ExecCommandApprovalParams);
$macro!(crate::ExecCommandApprovalResponse);
$macro!(crate::ExecOneOffCommandParams);
$macro!(crate::ExecOneOffCommandResponse);
$macro!(crate::FuzzyFileSearchParams);
$macro!(crate::FuzzyFileSearchResponse);
$macro!(crate::FuzzyFileSearchResult);
$macro!(crate::GetAuthStatusParams);
$macro!(crate::GetAuthStatusResponse);
$macro!(crate::GetUserAgentResponse);
$macro!(crate::GetUserSavedConfigResponse);
$macro!(crate::GitDiffToRemoteParams);
$macro!(crate::GitDiffToRemoteResponse);
$macro!(crate::GitSha);
$macro!(crate::InitializeParams);
$macro!(crate::InitializeResponse);
$macro!(crate::InputItem);
$macro!(crate::InterruptConversationParams);
$macro!(crate::InterruptConversationResponse);
$macro!(crate::ListConversationsParams);
$macro!(crate::ListConversationsResponse);
$macro!(crate::LoginApiKeyParams);
$macro!(crate::LoginApiKeyResponse);
$macro!(crate::LoginChatGptCompleteNotification);
$macro!(crate::LoginChatGptResponse);
$macro!(crate::LogoutChatGptParams);
$macro!(crate::LogoutChatGptResponse);
$macro!(crate::NewConversationParams);
$macro!(crate::NewConversationResponse);
$macro!(crate::Profile);
$macro!(crate::RemoveConversationListenerParams);
$macro!(crate::RemoveConversationSubscriptionResponse);
$macro!(crate::ResumeConversationParams);
$macro!(crate::ResumeConversationResponse);
$macro!(crate::SandboxSettings);
$macro!(crate::SendUserMessageParams);
$macro!(crate::SendUserMessageResponse);
$macro!(crate::SendUserTurnParams);
$macro!(crate::SendUserTurnResponse);
$macro!(crate::ServerNotification);
$macro!(crate::ServerRequest);
$macro!(crate::SessionConfiguredNotification);
$macro!(crate::SetDefaultModelParams);
$macro!(crate::SetDefaultModelResponse);
$macro!(crate::Tools);
$macro!(crate::UserInfoResponse);
$macro!(crate::UserSavedConfig);
$macro!(codex_protocol::protocol::EventMsg);
$macro!(codex_protocol::protocol::FileChange);
$macro!(codex_protocol::parse_command::ParsedCommand);
$macro!(codex_protocol::protocol::SandboxPolicy);
};
}
type JsonSchemaEmitter = fn(&Path) -> Result<RootSchema>;
pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
generate_ts(out_dir, prettier)?;
@@ -112,7 +40,9 @@ pub fn generate_types(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
}
pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
let v2_out_dir = out_dir.join("v2");
ensure_dir(out_dir)?;
ensure_dir(&v2_out_dir)?;
ClientRequest::export_all_to(out_dir)?;
export_client_responses(out_dir)?;
@@ -123,12 +53,15 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
ServerNotification::export_all_to(out_dir)?;
generate_index_ts(out_dir)?;
generate_index_ts(&v2_out_dir)?;
let ts_files = ts_files_in(out_dir)?;
// Ensure our header is present on all TS files (root + subdirs like v2/).
let ts_files = ts_files_in_recursive(out_dir)?;
for file in &ts_files {
prepend_header_if_missing(file)?;
}
// Optionally run Prettier on all generated TS files.
if let Some(prettier_bin) = prettier
&& !ts_files.is_empty()
{
@@ -147,20 +80,57 @@ pub fn generate_ts(out_dir: &Path, prettier: Option<&Path>) -> Result<()> {
pub fn generate_json(out_dir: &Path) -> Result<()> {
ensure_dir(out_dir)?;
// Emit schemas for core envelope/wire types that may not be referenced
// by params/responses but are useful as standalone:
let mut bundle: BTreeMap<String, RootSchema> = BTreeMap::new();
macro_rules! add_schema {
($ty:path) => {{
let name = type_basename(stringify!($ty));
let schema = write_json_schema_with_return::<$ty>(out_dir, &name)?;
bundle.insert(name, schema);
}};
let envelope_emitters: &[(&str, JsonSchemaEmitter)] = &[
("RequestId", |d| {
write_json_schema_with_return::<crate::RequestId>(d, "RequestId")
}),
("JSONRPCMessage", |d| {
write_json_schema_with_return::<crate::JSONRPCMessage>(d, "JSONRPCMessage")
}),
("JSONRPCRequest", |d| {
write_json_schema_with_return::<crate::JSONRPCRequest>(d, "JSONRPCRequest")
}),
("JSONRPCNotification", |d| {
write_json_schema_with_return::<crate::JSONRPCNotification>(d, "JSONRPCNotification")
}),
("JSONRPCResponse", |d| {
write_json_schema_with_return::<crate::JSONRPCResponse>(d, "JSONRPCResponse")
}),
("JSONRPCError", |d| {
write_json_schema_with_return::<crate::JSONRPCError>(d, "JSONRPCError")
}),
("JSONRPCErrorError", |d| {
write_json_schema_with_return::<crate::JSONRPCErrorError>(d, "JSONRPCErrorError")
}),
("ClientRequest", |d| {
write_json_schema_with_return::<crate::ClientRequest>(d, "ClientRequest")
}),
("ServerRequest", |d| {
write_json_schema_with_return::<crate::ServerRequest>(d, "ServerRequest")
}),
("ClientNotification", |d| {
write_json_schema_with_return::<crate::ClientNotification>(d, "ClientNotification")
}),
("ServerNotification", |d| {
write_json_schema_with_return::<crate::ServerNotification>(d, "ServerNotification")
}),
];
for (name, emit) in envelope_emitters {
let schema = emit(out_dir)?;
bundle.insert((*name).to_string(), schema);
}
for_each_schema_type!(add_schema);
// Have the macros generate per-type JSON for params, responses, and notifications.
export_client_param_schemas(out_dir)?;
export_client_response_schemas(out_dir)?;
export_server_param_schemas(out_dir)?;
export_server_response_schemas(out_dir)?;
export_client_notification_schemas(out_dir)?;
export_server_notification_schemas(out_dir)?;
let mut definitions = Map::new();
@@ -176,22 +146,70 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
"ServerRequest",
];
for (name, schema) in bundle {
let mut schema_value = serde_json::to_value(schema)?;
// Merge all generated per-type JSON files (including the envelopes above)
// into a single definitions bundle.
// Collect all generated JSON files recursively (including v2/),
// excluding the bundle were about to write.
let mut all_json_files = json_files_in_recursive(out_dir)?;
all_json_files.retain(|p| {
p.file_name().and_then(OsStr::to_str) != Some("codex_app_server_protocol.schemas.json")
});
for path in all_json_files {
// Determine a namespace by the immediate subfolder under out_dir
// (currently we only expect `v2`).
let namespace = relative_first_component(&path, out_dir).and_then(|c| {
if c == OsStr::new("v2") {
Some("v2")
} else {
None
}
});
let name = path
.file_stem()
.and_then(OsStr::to_str)
.map(str::to_owned)
.ok_or_else(|| anyhow!(format!("Invalid schema file name {}", path.display())))?;
let mut schema_value: Value = serde_json::from_str(
&fs::read_to_string(&path)
.with_context(|| format!("Failed to read {}", path.display()))?,
)
.with_context(|| format!("Failed to parse JSON from {}", path.display()))?;
// Ensure the same normalization logic applies when building the bundle.
annotate_schema(&mut schema_value, Some(name.as_str()));
// If this is a v2 schema, prefix its internal definitions and $refs
// with the namespace (e.g., v2.) so it wont collide with legacy.
if let Some(namespace) = namespace {
prefix_defs_and_refs(&mut schema_value, namespace);
}
if let Value::Object(ref mut obj) = schema_value
&& let Some(defs) = obj.remove("definitions")
&& let Value::Object(defs_obj) = defs
{
for (def_name, mut def_schema) in defs_obj {
if !SPECIAL_DEFINITIONS.contains(&def_name.as_str()) {
let def_key = if let Some(namespace) = namespace {
format!("{namespace}.{def_name}")
} else {
def_name.clone()
};
annotate_schema(&mut def_schema, Some(def_name.as_str()));
definitions.insert(def_name, def_schema);
definitions.insert(def_key, def_schema);
}
}
}
definitions.insert(name, schema_value);
let top_key = if let Some(namespace) = namespace {
format!("{namespace}.{name}")
} else {
name
};
definitions.insert(top_key, schema_value);
}
let mut root = Map::new();
@@ -214,15 +232,28 @@ pub fn generate_json(out_dir: &Path) -> Result<()> {
Ok(())
}
fn write_json_schema_with_return<T>(out_dir: &Path, name: &str) -> Result<RootSchema>
fn write_json_schema_with_return<T>(out_dir: &Path, _name: &str) -> Result<RootSchema>
where
T: JsonSchema,
{
let file_stem = name.trim();
// Determine the fully-qualified Rust type name to infer v2 placement and
// derive a clean file name (last path segment only).
let type_name = std::any::type_name::<T>();
let is_v2 = type_name.contains("::protocol::v2::");
// File stem should be just the last segment of the type (e.g., RateLimitSnapshot).
let file_stem = type_name.rsplit("::").next().unwrap_or(type_name).trim();
let schema = schema_for!(T);
let mut schema_value = serde_json::to_value(schema)?;
annotate_schema(&mut schema_value, Some(file_stem));
write_pretty_json(out_dir.join(format!("{file_stem}.json")), &schema_value)
let target_dir = if is_v2 {
out_dir.join("v2")
} else {
out_dir.to_path_buf()
};
ensure_dir(&target_dir)?;
write_pretty_json(target_dir.join(format!("{file_stem}.json")), &schema_value)
.with_context(|| format!("Failed to write JSON schema for {file_stem}"))?;
let annotated_schema = serde_json::from_value(schema_value)?;
Ok(annotated_schema)
@@ -241,14 +272,6 @@ fn write_pretty_json(path: PathBuf, value: &impl Serialize) -> Result<()> {
fs::write(&path, json).with_context(|| format!("Failed to write {}", path.display()))?;
Ok(())
}
fn type_basename(type_path: &str) -> String {
type_path
.rsplit_once("::")
.map(|(_, name)| name)
.unwrap_or(type_path)
.trim()
.to_string()
}
fn variant_definition_name(base: &str, variant: &Value) -> Option<String> {
if let Some(props) = variant.get("properties").and_then(Value::as_object) {
@@ -504,6 +527,98 @@ fn ts_files_in(dir: &Path) -> Result<Vec<PathBuf>> {
Ok(files)
}
fn ts_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
let mut files = Vec::new();
let mut stack = vec![dir.to_path_buf()];
while let Some(d) = stack.pop() {
for entry in
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
{
let entry = entry?;
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else if path.is_file() && path.extension() == Some(OsStr::new("ts")) {
files.push(path);
}
}
}
files.sort();
Ok(files)
}
fn json_files_in_recursive(dir: &Path) -> Result<Vec<PathBuf>> {
let mut files = Vec::new();
let mut stack = vec![dir.to_path_buf()];
while let Some(d) = stack.pop() {
for entry in
fs::read_dir(&d).with_context(|| format!("Failed to read dir {}", d.display()))?
{
let entry = entry?;
let path = entry.path();
if path.is_dir() {
stack.push(path);
} else if path.is_file() && path.extension() == Some(OsStr::new("json")) {
files.push(path);
}
}
}
files.sort();
Ok(files)
}
fn relative_first_component<'a>(path: &'a Path, root: &Path) -> Option<&'a OsStr> {
let rel = path.strip_prefix(root).ok()?;
rel.components().next().map(|c| match c {
std::path::Component::Normal(s) => s,
_ => OsStr::new(""),
})
}
fn prefix_defs_and_refs(value: &mut Value, namespace: &str) {
if let Some(obj) = value.as_object_mut()
&& let Some(Value::Object(defs)) = obj.get_mut("definitions")
{
let mut renamed = Map::new();
let old = std::mem::take(defs);
for (k, v) in old {
let new_key = if k.starts_with(&format!("{namespace}.")) {
k
} else {
format!("{namespace}.{k}")
};
renamed.insert(new_key, v);
}
*defs = renamed;
}
rewrite_refs(value, namespace);
}
fn rewrite_refs(value: &mut Value, namespace: &str) {
match value {
Value::Object(map) => {
if let Some(Value::String(r)) = map.get_mut("$ref") {
const PREFIX: &str = "#/definitions/";
if let Some(rest) = r.strip_prefix(PREFIX)
&& !rest.starts_with(&format!("{namespace}."))
{
*r = format!("{PREFIX}{namespace}.{rest}");
}
}
for v in map.values_mut() {
rewrite_refs(v, namespace);
}
}
Value::Array(arr) => {
for v in arr.iter_mut() {
rewrite_refs(v, namespace);
}
}
_ => {}
}
}
fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
let mut entries: Vec<String> = Vec::new();
let mut stems: Vec<String> = ts_files_in(out_dir)?
@@ -520,6 +635,14 @@ fn generate_index_ts(out_dir: &Path) -> Result<PathBuf> {
entries.push(format!("export type {{ {name} }} from \"./{name}\";\n"));
}
// If this is the root out_dir and a ./v2 folder exists with TS files,
// expose it as a namespace to avoid symbol collisions at the root.
let v2_dir = out_dir.join("v2");
let has_v2_ts = ts_files_in(&v2_dir).map(|v| !v.is_empty()).unwrap_or(false);
if has_v2_ts {
entries.push("export * as v2 from \"./v2\";\n".to_string());
}
let mut content =
String::with_capacity(HEADER.len() + entries.iter().map(String::len).sum::<usize>());
content.push_str(HEADER);
@@ -546,6 +669,7 @@ mod tests {
#[test]
fn generated_ts_has_no_optional_nullable_fields() -> Result<()> {
// Assert that there are no types of the form "?: T | null" in the generated TS files.
let output_dir = std::env::temp_dir().join(format!("codex_ts_types_{}", Uuid::now_v7()));
fs::create_dir(&output_dir)?;
@@ -734,4 +858,64 @@ mod tests {
Ok(())
}
#[test]
fn v2_json_schema_namespacing_and_refs() -> Result<()> {
// Generate JSON schemas into a temp directory and assert that:
// - v2 schemas are emitted under a `v2/` subdirectory, and
// - the bundled definitions include namespaced keys (e.g., v2.GetAccountRateLimitsResponse), and
// - refs inside v2 types point at `#/definitions/v2.*`.
let output_dir =
std::env::temp_dir().join(format!("codex_json_schemas_{}", Uuid::now_v7()));
fs::create_dir(&output_dir)?;
struct TempDirGuard(PathBuf);
impl Drop for TempDirGuard {
fn drop(&mut self) {
let _ = fs::remove_dir_all(&self.0);
}
}
let _guard = TempDirGuard(output_dir.clone());
generate_json(&output_dir)?;
// Ensure a v2 subdirectory exists with at least one schema file.
let v2_dir = output_dir.join("v2");
let v2_entries = fs::read_dir(&v2_dir)
.with_context(|| format!("Expected v2 dir at {}", v2_dir.display()))?;
let has_v2_file = v2_entries
.filter_map(std::result::Result::ok)
.any(|e| e.path().extension() == Some(OsStr::new("json")));
assert!(has_v2_file, "Expected at least one v2 JSON schema file");
// Read the bundle and assert namespaced definitions and refs exist.
let bundle_path = output_dir.join("codex_app_server_protocol.schemas.json");
let bundle: Value = serde_json::from_str(&fs::read_to_string(&bundle_path)?)?;
let defs = bundle
.get("definitions")
.and_then(Value::as_object)
.expect("bundle definitions map");
// A concrete v2 top-level schema that should be present.
assert!(
defs.contains_key("v2.GetAccountRateLimitsResponse"),
"v2.GetAccountRateLimitsResponse missing from bundle definitions"
);
let v2_resp = defs.get("v2.GetAccountRateLimitsResponse").unwrap();
let rate_limits_ref = v2_resp
.get("properties")
.and_then(Value::as_object)
.and_then(|p| p.get("rateLimits"))
.and_then(Value::as_object)
.and_then(|o| o.get("$ref"))
.and_then(Value::as_str)
.unwrap_or("");
assert_eq!(
rate_limits_ref, "#/definitions/v2.RateLimitSnapshot",
"v2 $ref should be prefixed in bundle"
);
Ok(())
}
}

View File

@@ -9,7 +9,6 @@ use crate::protocol::v2;
use codex_protocol::ConversationId;
use codex_protocol::parse_command::ParsedCommand;
use codex_protocol::protocol::FileChange;
use codex_protocol::protocol::RateLimitSnapshot;
use codex_protocol::protocol::ReviewDecision;
use codex_protocol::protocol::SandboxCommandAssessment;
use paste::paste;
@@ -82,16 +81,71 @@ macro_rules! client_request_definitions {
)*
Ok(())
}
pub fn export_client_param_schemas(
out_dir: &::std::path::Path,
) -> ::anyhow::Result<()> {
$(
crate::export::write_json_schema::<$params>(out_dir, stringify!($params))?;
)*
Ok(())
}
};
}
client_request_definitions! {
/// NEW APIs
// Thread lifecycle
#[serde(rename = "thread/start")]
#[ts(rename = "thread/start")]
ThreadStart {
params: v2::ThreadStartParams,
response: v2::ThreadStartResponse,
},
#[serde(rename = "thread/resume")]
#[ts(rename = "thread/resume")]
ThreadResume {
params: v2::ThreadResumeParams,
response: v2::ThreadResumeResponse,
},
#[serde(rename = "thread/archive")]
#[ts(rename = "thread/archive")]
ThreadArchive {
params: v2::ThreadArchiveParams,
response: v2::ThreadArchiveResponse,
},
#[serde(rename = "thread/list")]
#[ts(rename = "thread/list")]
ThreadList {
params: v2::ThreadListParams,
response: v2::ThreadListResponse,
},
#[serde(rename = "thread/compact")]
#[ts(rename = "thread/compact")]
ThreadCompact {
params: v2::ThreadCompactParams,
response: v2::ThreadCompactResponse,
},
// Turn lifecycle
#[serde(rename = "turn/start")]
#[ts(rename = "turn/start")]
TurnStart {
params: v2::TurnStartParams,
response: v2::TurnStartResponse,
},
#[serde(rename = "turn/interrupt")]
#[ts(rename = "turn/interrupt")]
TurnInterrupt {
params: v2::TurnInterruptParams,
response: v2::TurnInterruptResponse,
},
#[serde(rename = "model/list")]
#[ts(rename = "model/list")]
ListModels {
params: v2::ListModelsParams,
response: v2::ListModelsResponse,
ModelList {
params: v2::ModelListParams,
response: v2::ModelListResponse,
},
#[serde(rename = "account/login")]
@@ -117,9 +171,9 @@ client_request_definitions! {
#[serde(rename = "feedback/upload")]
#[ts(rename = "feedback/upload")]
UploadFeedback {
params: v2::UploadFeedbackParams,
response: v2::UploadFeedbackResponse,
FeedbackUpload {
params: v2::FeedbackUploadParams,
response: v2::FeedbackUploadResponse,
},
#[serde(rename = "account/read")]
@@ -284,6 +338,93 @@ macro_rules! server_request_definitions {
}
Ok(())
}
pub fn export_server_param_schemas(
out_dir: &::std::path::Path,
) -> ::anyhow::Result<()> {
paste! {
$(crate::export::write_json_schema::<[<$variant Params>]>(out_dir, stringify!([<$variant Params>]))?;)*
}
Ok(())
}
};
}
/// Generates `ServerNotification` enum and helpers, including a JSON Schema
/// exporter for each variant payload type.
macro_rules! server_notification_definitions {
(
$(
$(#[$variant_meta:meta])*
$variant:ident ( $payload:ty )
),* $(,)?
) => {
/// Notification sent from the server to the client.
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
#[strum(serialize_all = "camelCase")]
pub enum ServerNotification {
$(
$(#[$variant_meta])*
$variant($payload),
)*
}
impl ServerNotification {
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
match self {
$(Self::$variant(params) => serde_json::to_value(params),)*
}
}
}
impl TryFrom<JSONRPCNotification> for ServerNotification {
type Error = serde_json::Error;
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
serde_json::from_value(serde_json::to_value(value)?)
}
}
pub fn export_server_notification_schemas(
_out_dir: &::std::path::Path,
) -> ::anyhow::Result<()> {
$(
crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?;
)*
Ok(())
}
};
}
/// Generates `ClientNotification` enum and a JSON Schema exporter for
/// any payload-bearing variants (unit variants produce no payload schema).
macro_rules! client_notification_definitions {
(
$(
$(#[$variant_meta:meta])*
$variant:ident $( ( $payload:ty ) )?
),* $(,)?
) => {
/// Notification sent from the client to the server.
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
#[strum(serialize_all = "camelCase")]
pub enum ClientNotification {
$(
$(#[$variant_meta])*
$variant $( ( $payload ) )? ,
)*
}
pub fn export_client_notification_schemas(
_out_dir: &::std::path::Path,
) -> ::anyhow::Result<()> {
$(
$(crate::export::write_json_schema::<$payload>(_out_dir, stringify!($payload))?;)?
)*
Ok(())
}
};
}
@@ -366,16 +507,12 @@ pub struct FuzzyFileSearchResponse {
pub files: Vec<FuzzyFileSearchResult>,
}
/// Notification sent from the server to the client.
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
#[strum(serialize_all = "camelCase")]
pub enum ServerNotification {
server_notification_definitions! {
/// NEW NOTIFICATIONS
#[serde(rename = "account/rateLimits/updated")]
#[ts(rename = "account/rateLimits/updated")]
#[strum(serialize = "account/rateLimits/updated")]
AccountRateLimitsUpdated(RateLimitSnapshot),
AccountRateLimitsUpdated(v2::AccountRateLimitsUpdatedNotification),
/// DEPRECATED NOTIFICATIONS below
/// Authentication status changed
@@ -388,30 +525,7 @@ pub enum ServerNotification {
SessionConfigured(v1::SessionConfiguredNotification),
}
impl ServerNotification {
pub fn to_params(self) -> Result<serde_json::Value, serde_json::Error> {
match self {
ServerNotification::AccountRateLimitsUpdated(params) => serde_json::to_value(params),
ServerNotification::AuthStatusChange(params) => serde_json::to_value(params),
ServerNotification::LoginChatGptComplete(params) => serde_json::to_value(params),
ServerNotification::SessionConfigured(params) => serde_json::to_value(params),
}
}
}
impl TryFrom<JSONRPCNotification> for ServerNotification {
type Error = serde_json::Error;
fn try_from(value: JSONRPCNotification) -> Result<Self, Self::Error> {
serde_json::from_value(serde_json::to_value(value)?)
}
}
/// Notification sent from the client to the server.
#[derive(Serialize, Deserialize, Debug, Clone, JsonSchema, TS, Display)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
#[strum(serialize_all = "camelCase")]
pub enum ClientNotification {
client_notification_definitions! {
Initialized,
}
@@ -665,17 +779,18 @@ mod tests {
#[test]
fn serialize_list_models() -> Result<()> {
let request = ClientRequest::ListModels {
let request = ClientRequest::ModelList {
request_id: RequestId::Integer(6),
params: v2::ListModelsParams::default(),
params: v2::ModelListParams::default(),
};
assert_eq!(
json!({
"method": "model/list",
"id": 6,
"params": {
"pageSize": null,
"cursor": null
"limit": null,
"cursor": null,
"order": null,
}
}),
serde_json::to_value(&request)?,

View File

@@ -1,16 +1,25 @@
use codex_protocol::ConversationId;
use codex_protocol::account::PlanType;
use codex_protocol::config_types::ReasoningEffort;
use codex_protocol::protocol::RateLimitSnapshot;
use mcp_types::ContentBlock as McpContentBlock;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value as JsonValue;
use ts_rs::TS;
use uuid::Uuid;
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub enum SortOrder {
Asc,
Desc,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum Account {
#[serde(rename = "apiKey", rename_all = "camelCase")]
#[ts(rename = "apiKey", rename_all = "camelCase")]
@@ -27,6 +36,7 @@ pub enum Account {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum LoginAccountParams {
#[serde(rename = "apiKey")]
#[ts(rename = "apiKey")]
@@ -42,6 +52,7 @@ pub enum LoginAccountParams {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct LoginAccountResponse {
/// Only set if the login method is ChatGPT.
#[schemars(with = "String")]
@@ -54,31 +65,38 @@ pub struct LoginAccountResponse {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct LogoutAccountResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct GetAccountRateLimitsResponse {
pub rate_limits: RateLimitSnapshot,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct GetAccountResponse {
pub account: Account,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ListModelsParams {
/// Optional page size; defaults to a reasonable server-side value.
pub page_size: Option<usize>,
#[ts(export_to = "v2/")]
pub struct ModelListParams {
/// Opaque pagination cursor returned by a previous call.
pub cursor: Option<String>,
/// Optional page size; defaults to a reasonable server-side value.
pub limit: Option<i32>,
/// Optional sort order; defaults to descending.
pub order: Option<SortOrder>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct Model {
pub id: String,
pub model: String,
@@ -92,6 +110,7 @@ pub struct Model {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ReasoningEffortOption {
pub reasoning_effort: ReasoningEffort,
pub description: String,
@@ -99,8 +118,9 @@ pub struct ReasoningEffortOption {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct ListModelsResponse {
pub items: Vec<Model>,
#[ts(export_to = "v2/")]
pub struct ModelListResponse {
pub data: Vec<Model>,
/// Opaque cursor to pass to the next call to continue after the last item.
/// if None, there are no more items to return.
pub next_cursor: Option<String>,
@@ -108,7 +128,8 @@ pub struct ListModelsResponse {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct UploadFeedbackParams {
#[ts(export_to = "v2/")]
pub struct FeedbackUploadParams {
pub classification: String,
pub reason: Option<String>,
pub conversation_id: Option<ConversationId>,
@@ -117,6 +138,344 @@ pub struct UploadFeedbackParams {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
pub struct UploadFeedbackResponse {
#[ts(export_to = "v2/")]
pub struct FeedbackUploadResponse {
pub thread_id: String,
}
// === Threads, Turns, and Items ===
// Thread APIs
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadStartParams {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadStartResponse {
pub thread_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadResumeParams {
pub thread_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadResumeResponse {
pub thread: Thread,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadArchiveParams {
pub thread_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadArchiveResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadListParams {
/// Opaque pagination cursor returned by a previous call.
pub cursor: Option<String>,
/// Optional page size; defaults to a reasonable server-side value.
pub limit: Option<i32>,
/// Optional sort order; defaults to descending.
pub order: Option<SortOrder>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadListResponse {
pub data: Vec<Thread>,
/// Opaque cursor to pass to the next call to continue after the last item.
/// if None, there are no more items to return.
pub next_cursor: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadCompactParams {
pub thread_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadCompactResponse {}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct Thread {
pub id: String,
pub turn: Vec<Turn>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct Turn {
pub items: Vec<ThreadItem>,
pub status: TurnStatus,
pub error: Option<TurnError>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TurnError {
pub message: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum TurnStatus {
Completed,
Interrupted,
Failed,
InProgress,
}
// Turn APIs
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TurnStartParams {
pub thread_id: String,
pub input: Vec<UserInput>,
pub model: String,
pub effort: ReasoningEffort,
pub summary: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TurnStartResponse {
pub turn_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TurnInterruptParams {
pub turn_id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TurnInterruptResponse {}
// User input types
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum UserInput {
Text(String),
Image(ImageInput),
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum ImageInput {
#[serde(rename = "image")]
Image { url: String },
}
// Thread items
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type")]
#[ts(export_to = "v2/")]
pub enum ThreadItem {
UserMessage {
id: String,
content: Vec<UserInput>,
},
AgentMessage {
id: String,
text: String,
},
Reasoning {
id: String,
text: String,
},
CommandExecution {
id: String,
command: String,
aggregated_output: String,
exit_code: Option<i32>,
status: CommandExecutionStatus,
duration_ms: Option<i64>,
},
FileChange {
id: String,
changes: Vec<FileUpdateChange>,
status: PatchApplyStatus,
},
McpToolCall {
id: String,
server: String,
tool: String,
status: McpToolCallStatus,
arguments: JsonValue,
result: Option<McpToolCallResult>,
error: Option<McpToolCallError>,
},
WebSearch {
id: String,
query: String,
},
TodoList {
id: String,
items: Vec<TodoItem>,
},
ImageView {
id: String,
path: String,
},
CodeReview {
id: String,
review: String,
},
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum CommandExecutionStatus {
InProgress,
Completed,
Failed,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct FileUpdateChange {
pub path: String,
pub kind: PatchChangeKind,
pub diff: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum PatchChangeKind {
Add,
Delete,
Update,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum PatchApplyStatus {
Completed,
Failed,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub enum McpToolCallStatus {
InProgress,
Completed,
Failed,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct McpToolCallResult {
pub content: Vec<McpContentBlock>,
pub structured_content: JsonValue,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct McpToolCallError {
pub message: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct TodoItem {
pub id: String,
pub text: String,
pub completed: bool,
}
// === Server Notifications ===
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct AccountRateLimitsUpdatedNotification {
pub rate_limits: RateLimitSnapshot,
}
// CamelCased copy of codex_protocol::protocol::{RateLimitSnapshot, RateLimitWindow}
// for the v2 surface. This avoids leaking snake_case into the v2 wire format.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct RateLimitSnapshot {
pub primary: Option<RateLimitWindow>,
pub secondary: Option<RateLimitWindow>,
}
impl From<codex_protocol::protocol::RateLimitSnapshot> for RateLimitSnapshot {
fn from(value: codex_protocol::protocol::RateLimitSnapshot) -> Self {
Self {
primary: value.primary.map(RateLimitWindow::from),
secondary: value.secondary.map(RateLimitWindow::from),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct RateLimitWindow {
/// Percentage (0-100) of the window that has been consumed.
pub used_percent: f64,
/// Rolling window duration, in minutes.
#[ts(type = "number | null")]
pub window_minutes: Option<i64>,
/// Unix timestamp (seconds since epoch) when the window resets.
#[ts(type = "number | null")]
pub resets_at: Option<i64>,
}
impl From<codex_protocol::protocol::RateLimitWindow> for RateLimitWindow {
fn from(value: codex_protocol::protocol::RateLimitWindow) -> Self {
Self {
used_percent: value.used_percent,
window_minutes: value.window_minutes,
resets_at: value.resets_at,
}
}
}

View File

@@ -4,6 +4,7 @@ use crate::fuzzy_file_search::run_fuzzy_file_search;
use crate::models::supported_models;
use crate::outgoing_message::OutgoingMessageSender;
use crate::outgoing_message::OutgoingNotification;
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
use codex_app_server_protocol::AddConversationListenerParams;
use codex_app_server_protocol::AddConversationSubscriptionResponse;
use codex_app_server_protocol::ApplyPatchApprovalParams;
@@ -18,6 +19,8 @@ use codex_app_server_protocol::ExecCommandApprovalParams;
use codex_app_server_protocol::ExecCommandApprovalResponse;
use codex_app_server_protocol::ExecOneOffCommandParams;
use codex_app_server_protocol::ExecOneOffCommandResponse;
use codex_app_server_protocol::FeedbackUploadParams;
use codex_app_server_protocol::FeedbackUploadResponse;
use codex_app_server_protocol::FuzzyFileSearchParams;
use codex_app_server_protocol::FuzzyFileSearchResponse;
use codex_app_server_protocol::GetAccountRateLimitsResponse;
@@ -32,14 +35,15 @@ use codex_app_server_protocol::InterruptConversationResponse;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::ListConversationsParams;
use codex_app_server_protocol::ListConversationsResponse;
use codex_app_server_protocol::ListModelsParams;
use codex_app_server_protocol::ListModelsResponse;
use codex_app_server_protocol::LoginApiKeyParams;
use codex_app_server_protocol::LoginApiKeyResponse;
use codex_app_server_protocol::LoginChatGptCompleteNotification;
use codex_app_server_protocol::LoginChatGptResponse;
use codex_app_server_protocol::ModelListParams;
use codex_app_server_protocol::ModelListResponse;
use codex_app_server_protocol::NewConversationParams;
use codex_app_server_protocol::NewConversationResponse;
use codex_app_server_protocol::RateLimitSnapshot;
use codex_app_server_protocol::RemoveConversationListenerParams;
use codex_app_server_protocol::RemoveConversationSubscriptionResponse;
use codex_app_server_protocol::RequestId;
@@ -54,8 +58,7 @@ use codex_app_server_protocol::ServerRequestPayload;
use codex_app_server_protocol::SessionConfiguredNotification;
use codex_app_server_protocol::SetDefaultModelParams;
use codex_app_server_protocol::SetDefaultModelResponse;
use codex_app_server_protocol::UploadFeedbackParams;
use codex_app_server_protocol::UploadFeedbackResponse;
use codex_app_server_protocol::SortOrder;
use codex_app_server_protocol::UserInfoResponse;
use codex_app_server_protocol::UserSavedConfig;
use codex_backend_client::Client as BackendClient;
@@ -174,6 +177,56 @@ impl CodexMessageProcessor {
ClientRequest::Initialize { .. } => {
panic!("Initialize should be handled in MessageProcessor");
}
// === v2 Thread/Turn APIs (stubs) ===
ClientRequest::ThreadStart {
request_id,
params: _,
} => {
self.send_unimplemented_error(request_id, "thread/start")
.await;
}
ClientRequest::ThreadResume {
request_id,
params: _,
} => {
self.send_unimplemented_error(request_id, "thread/resume")
.await;
}
ClientRequest::ThreadArchive {
request_id,
params: _,
} => {
self.send_unimplemented_error(request_id, "thread/archive")
.await;
}
ClientRequest::ThreadList {
request_id,
params: _,
} => {
self.send_unimplemented_error(request_id, "thread/list")
.await;
}
ClientRequest::ThreadCompact {
request_id,
params: _,
} => {
self.send_unimplemented_error(request_id, "thread/compact")
.await;
}
ClientRequest::TurnStart {
request_id,
params: _,
} => {
self.send_unimplemented_error(request_id, "turn/start")
.await;
}
ClientRequest::TurnInterrupt {
request_id,
params: _,
} => {
self.send_unimplemented_error(request_id, "turn/interrupt")
.await;
}
ClientRequest::NewConversation { request_id, params } => {
// Do not tokio::spawn() to process new_conversation()
// asynchronously because we need to ensure the conversation is
@@ -186,7 +239,7 @@ impl CodexMessageProcessor {
ClientRequest::ListConversations { request_id, params } => {
self.handle_list_conversations(request_id, params).await;
}
ClientRequest::ListModels { request_id, params } => {
ClientRequest::ModelList { request_id, params } => {
self.list_models(request_id, params).await;
}
ClientRequest::LoginAccount {
@@ -288,7 +341,7 @@ impl CodexMessageProcessor {
} => {
self.get_account_rate_limits(request_id).await;
}
ClientRequest::UploadFeedback { request_id, params } => {
ClientRequest::FeedbackUpload { request_id, params } => {
self.upload_feedback(request_id, params).await;
}
}
@@ -595,7 +648,9 @@ impl CodexMessageProcessor {
async fn get_account_rate_limits(&self, request_id: RequestId) {
match self.fetch_account_rate_limits().await {
Ok(rate_limits) => {
let response = GetAccountRateLimitsResponse { rate_limits };
let response = GetAccountRateLimitsResponse {
rate_limits: rate_limits.into(),
};
self.outgoing.send_response(request_id, response).await;
}
Err(error) => {
@@ -952,21 +1007,35 @@ impl CodexMessageProcessor {
self.outgoing.send_response(request_id, response).await;
}
async fn list_models(&self, request_id: RequestId, params: ListModelsParams) {
let ListModelsParams { page_size, cursor } = params;
let models = supported_models();
async fn list_models(&self, request_id: RequestId, params: ModelListParams) {
let ModelListParams {
cursor,
limit,
order,
} = params;
let mut models = supported_models();
// Sort models according to requested order; default to descending.
match order.unwrap_or(SortOrder::Desc) {
SortOrder::Asc => models.sort_by(|a, b| a.id.cmp(&b.id)),
SortOrder::Desc => models.sort_by(|a, b| b.id.cmp(&a.id)),
}
let total = models.len();
if total == 0 {
let response = ListModelsResponse {
items: Vec::new(),
let response = ModelListResponse {
data: Vec::new(),
next_cursor: None,
};
self.outgoing.send_response(request_id, response).await;
return;
}
let effective_page_size = page_size.unwrap_or(total).max(1).min(total);
// Determine pagination window
let default_limit = total as i32;
let effective_limit = limit.unwrap_or(default_limit).max(1).min(default_limit) as usize;
let start = match cursor {
Some(cursor) => match cursor.parse::<usize>() {
Ok(idx) => idx,
@@ -993,14 +1062,15 @@ impl CodexMessageProcessor {
return;
}
let end = start.saturating_add(effective_page_size).min(total);
let items = models[start..end].to_vec();
let end = start.saturating_add(effective_limit).min(total);
let data = models[start..end].to_vec();
let next_cursor = if end < total {
Some(end.to_string())
} else {
None
};
let response = ListModelsResponse { items, next_cursor };
let response = ModelListResponse { data, next_cursor };
self.outgoing.send_response(request_id, response).await;
}
@@ -1590,8 +1660,8 @@ impl CodexMessageProcessor {
self.outgoing.send_response(request_id, response).await;
}
async fn upload_feedback(&self, request_id: RequestId, params: UploadFeedbackParams) {
let UploadFeedbackParams {
async fn upload_feedback(&self, request_id: RequestId, params: FeedbackUploadParams) {
let FeedbackUploadParams {
classification,
reason,
conversation_id,
@@ -1636,7 +1706,7 @@ impl CodexMessageProcessor {
match upload_result {
Ok(()) => {
let response = UploadFeedbackResponse { thread_id };
let response = FeedbackUploadResponse { thread_id };
self.outgoing.send_response(request_id, response).await;
}
Err(err) => {
@@ -1720,9 +1790,12 @@ async fn apply_bespoke_event_handling(
}
EventMsg::TokenCount(token_count_event) => {
if let Some(rate_limits) = token_count_event.rate_limits {
let snapshot: RateLimitSnapshot = rate_limits.into();
outgoing
.send_server_notification(ServerNotification::AccountRateLimitsUpdated(
rate_limits,
AccountRateLimitsUpdatedNotification {
rate_limits: snapshot,
},
))
.await;
}

View File

@@ -142,8 +142,8 @@ pub(crate) struct OutgoingError {
#[cfg(test)]
mod tests {
use codex_app_server_protocol::LoginChatGptCompleteNotification;
use codex_protocol::protocol::RateLimitSnapshot;
use codex_protocol::protocol::RateLimitWindow;
use codex_app_server_protocol::RateLimitSnapshot;
use codex_app_server_protocol::RateLimitWindow;
use pretty_assertions::assert_eq;
use serde_json::json;
use uuid::Uuid;
@@ -177,26 +177,32 @@ mod tests {
#[test]
fn verify_account_rate_limits_notification_serialization() {
let notification = ServerNotification::AccountRateLimitsUpdated(RateLimitSnapshot {
primary: Some(RateLimitWindow {
used_percent: 25.0,
window_minutes: Some(15),
resets_at: Some(123),
}),
secondary: None,
});
let notification = ServerNotification::AccountRateLimitsUpdated(
codex_app_server_protocol::AccountRateLimitsUpdatedNotification {
rate_limits: RateLimitSnapshot {
primary: Some(RateLimitWindow {
used_percent: 25.0,
window_minutes: Some(15),
resets_at: Some(123),
}),
secondary: None,
},
},
);
let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification);
assert_eq!(
json!({
"method": "account/rateLimits/updated",
"params": {
"primary": {
"used_percent": 25.0,
"window_minutes": 15,
"resets_at": 123,
},
"secondary": null,
"rateLimits": {
"primary": {
"usedPercent": 25.0,
"windowMinutes": 15,
"resetsAt": 123
},
"secondary": null
}
},
}),
serde_json::to_value(jsonrpc_notification)

View File

@@ -17,12 +17,13 @@ use codex_app_server_protocol::ArchiveConversationParams;
use codex_app_server_protocol::CancelLoginChatGptParams;
use codex_app_server_protocol::ClientInfo;
use codex_app_server_protocol::ClientNotification;
use codex_app_server_protocol::FeedbackUploadParams;
use codex_app_server_protocol::GetAuthStatusParams;
use codex_app_server_protocol::InitializeParams;
use codex_app_server_protocol::InterruptConversationParams;
use codex_app_server_protocol::ListConversationsParams;
use codex_app_server_protocol::ListModelsParams;
use codex_app_server_protocol::LoginApiKeyParams;
use codex_app_server_protocol::ModelListParams;
use codex_app_server_protocol::NewConversationParams;
use codex_app_server_protocol::RemoveConversationListenerParams;
use codex_app_server_protocol::ResumeConversationParams;
@@ -30,7 +31,6 @@ use codex_app_server_protocol::SendUserMessageParams;
use codex_app_server_protocol::SendUserTurnParams;
use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::SetDefaultModelParams;
use codex_app_server_protocol::UploadFeedbackParams;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCMessage;
@@ -246,7 +246,7 @@ impl McpProcess {
/// Send a `feedback/upload` JSON-RPC request.
pub async fn send_upload_feedback_request(
&mut self,
params: UploadFeedbackParams,
params: FeedbackUploadParams,
) -> anyhow::Result<i64> {
let params = Some(serde_json::to_value(params)?);
self.send_request("feedback/upload", params).await
@@ -278,7 +278,7 @@ impl McpProcess {
/// Send a `model/list` JSON-RPC request.
pub async fn send_list_models_request(
&mut self,
params: ListModelsParams,
params: ModelListParams,
) -> anyhow::Result<i64> {
let params = Some(serde_json::to_value(params)?);
self.send_request("model/list", params).await

View File

@@ -6,9 +6,9 @@ use app_test_support::McpProcess;
use app_test_support::to_response;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::ListModelsParams;
use codex_app_server_protocol::ListModelsResponse;
use codex_app_server_protocol::Model;
use codex_app_server_protocol::ModelListParams;
use codex_app_server_protocol::ModelListResponse;
use codex_app_server_protocol::ReasoningEffortOption;
use codex_app_server_protocol::RequestId;
use codex_protocol::config_types::ReasoningEffort;
@@ -27,9 +27,10 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_list_models_request(ListModelsParams {
page_size: Some(100),
.send_list_models_request(ModelListParams {
cursor: None,
limit: Some(100),
order: None,
})
.await?;
@@ -39,7 +40,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
)
.await??;
let ListModelsResponse { items, next_cursor } = to_response::<ListModelsResponse>(response)?;
let ModelListResponse { data, next_cursor } = to_response::<ModelListResponse>(response)?;
let expected_models = vec![
Model {
@@ -98,7 +99,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
},
];
assert_eq!(items, expected_models);
assert_eq!(data, expected_models);
assert!(next_cursor.is_none());
Ok(())
}
@@ -111,9 +112,10 @@ async fn list_models_pagination_works() -> Result<()> {
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let first_request = mcp
.send_list_models_request(ListModelsParams {
page_size: Some(1),
.send_list_models_request(ModelListParams {
cursor: None,
limit: Some(1),
order: None,
})
.await?;
@@ -123,19 +125,20 @@ async fn list_models_pagination_works() -> Result<()> {
)
.await??;
let ListModelsResponse {
items: first_items,
let ModelListResponse {
data: first_items,
next_cursor: first_cursor,
} = to_response::<ListModelsResponse>(first_response)?;
} = to_response::<ModelListResponse>(first_response)?;
assert_eq!(first_items.len(), 1);
assert_eq!(first_items[0].id, "gpt-5-codex");
let next_cursor = first_cursor.ok_or_else(|| anyhow!("cursor for second page"))?;
let second_request = mcp
.send_list_models_request(ListModelsParams {
page_size: Some(1),
.send_list_models_request(ModelListParams {
cursor: Some(next_cursor.clone()),
limit: Some(1),
order: None,
})
.await?;
@@ -145,10 +148,10 @@ async fn list_models_pagination_works() -> Result<()> {
)
.await??;
let ListModelsResponse {
items: second_items,
let ModelListResponse {
data: second_items,
next_cursor: second_cursor,
} = to_response::<ListModelsResponse>(second_response)?;
} = to_response::<ModelListResponse>(second_response)?;
assert_eq!(second_items.len(), 1);
assert_eq!(second_items[0].id, "gpt-5");
@@ -164,9 +167,10 @@ async fn list_models_rejects_invalid_cursor() -> Result<()> {
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_list_models_request(ListModelsParams {
page_size: None,
.send_list_models_request(ModelListParams {
cursor: Some("invalid".to_string()),
limit: None,
order: None,
})
.await?;

View File

@@ -7,10 +7,10 @@ use codex_app_server_protocol::GetAccountRateLimitsResponse;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::LoginApiKeyParams;
use codex_app_server_protocol::RateLimitSnapshot;
use codex_app_server_protocol::RateLimitWindow;
use codex_app_server_protocol::RequestId;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_protocol::protocol::RateLimitSnapshot;
use codex_protocol::protocol::RateLimitWindow;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::path::Path;