Compare commits

..

13 Commits

Author SHA1 Message Date
rhan-oai
0055796543 Merge branch 'main' into codex/loading 2026-02-27 11:01:07 -08:00
Roy Han
05bc61c428 fix issue with resume 2026-02-27 11:00:32 -08:00
Roy Han
ecaa77e499 reformat line width check 2026-02-27 10:27:22 -08:00
Roy Han
6f1485d89c constants 2026-02-27 10:14:13 -08:00
Roy Han
6b560a46be clippy 2026-02-26 21:26:51 -08:00
Roy Han
83726aebe6 cleanup 2026-02-26 21:14:43 -08:00
Roy Han
dda7973531 cleanup 2026-02-26 21:08:33 -08:00
Roy Han
d927cea570 cleanup 2026-02-26 20:59:07 -08:00
Roy Han
bee23c7917 cleanup 2026-02-26 20:53:36 -08:00
Roy Han
0ed71a0c3b cleanup 2026-02-26 20:50:51 -08:00
Roy Han
e89f442a57 cleanup 2026-02-26 20:45:23 -08:00
Roy Han
311bc6660d cleanup 2026-02-26 20:41:16 -08:00
Roy Han
c800db5cd5 working draft 2026-02-26 19:36:13 -08:00
45 changed files with 492 additions and 2552 deletions

View File

@@ -28,17 +28,14 @@ jobs:
target: x86_64-apple-darwin
# Linux
- os: ubuntu-24.04-arm
target: aarch64-unknown-linux-gnu
- os: ubuntu-24.04
target: x86_64-unknown-linux-gnu
- os: ubuntu-24.04-arm
target: aarch64-unknown-linux-musl
- os: ubuntu-24.04
target: x86_64-unknown-linux-musl
# 2026-02-27 Bazel tests have been flaky on arm in CI.
# Disable until we can investigate and stabilize them.
# - os: ubuntu-24.04-arm
# target: aarch64-unknown-linux-musl
# - os: ubuntu-24.04-arm
# target: aarch64-unknown-linux-gnu
# TODO: Enable Windows once we fix the toolchain issues there.
#- os: windows-latest
# target: x86_64-pc-windows-gnullvm

1
codex-rs/Cargo.lock generated
View File

@@ -1900,7 +1900,6 @@ version = "0.0.0"
dependencies = [
"anyhow",
"clap",
"codex-utils-absolute-path",
"multimap",
"pretty_assertions",
"serde",

View File

@@ -1422,32 +1422,6 @@
],
"type": "object"
},
"RequestId": {
"anyOf": [
{
"type": "string"
},
{
"format": "int64",
"type": "integer"
}
]
},
"ServerRequestResolvedNotification": {
"properties": {
"requestId": {
"$ref": "#/definitions/RequestId"
},
"threadId": {
"type": "string"
}
},
"required": [
"requestId",
"threadId"
],
"type": "object"
},
"SessionSource": {
"oneOf": [
{
@@ -3448,26 +3422,6 @@
"title": "Item/fileChange/outputDeltaNotification",
"type": "object"
},
{
"properties": {
"method": {
"enum": [
"serverRequest/resolved"
],
"title": "ServerRequest/resolvedNotificationMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/ServerRequestResolvedNotification"
}
},
"required": [
"method",
"params"
],
"title": "ServerRequest/resolvedNotification",
"type": "object"
},
{
"properties": {
"method": {

View File

@@ -357,7 +357,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -382,7 +382,7 @@
"description": "NEW APIs",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -406,7 +406,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -430,7 +430,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -454,7 +454,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -478,7 +478,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -502,7 +502,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -526,7 +526,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -550,7 +550,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -574,7 +574,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -598,7 +598,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -622,7 +622,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -646,7 +646,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -670,7 +670,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -694,7 +694,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -718,7 +718,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -742,7 +742,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -766,7 +766,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -790,7 +790,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -814,7 +814,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -838,7 +838,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -862,7 +862,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -886,7 +886,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -910,7 +910,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -934,7 +934,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -958,7 +958,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -981,7 +981,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1005,7 +1005,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1029,7 +1029,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1053,7 +1053,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1077,7 +1077,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1100,7 +1100,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1123,7 +1123,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1148,7 +1148,7 @@
"description": "Execute a command (argv vector) under the server's sandbox.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1172,7 +1172,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1196,7 +1196,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1220,7 +1220,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1244,7 +1244,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1268,7 +1268,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1292,7 +1292,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1315,7 +1315,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -1339,7 +1339,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -3065,7 +3065,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"message": {
"type": "string"
@@ -4943,7 +4943,7 @@
"$ref": "#/definitions/JSONRPCErrorError"
},
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
}
},
"required": [
@@ -5011,7 +5011,7 @@
"description": "A request that expects a response.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"type": "string"
@@ -5030,7 +5030,7 @@
"description": "A successful (non-error) response to a request.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"result": true
},
@@ -5544,7 +5544,6 @@
"type": "object"
},
"RequestId": {
"$schema": "http://json-schema.org/draft-07/schema#",
"anyOf": [
{
"type": "string"
@@ -5554,7 +5553,7 @@
"type": "integer"
}
],
"title": "RequestId"
"description": "ID of a request, which can be either a string or an integer."
},
"RequestUserInputQuestion": {
"properties": {
@@ -6195,26 +6194,6 @@
"title": "Item/fileChange/outputDeltaNotification",
"type": "object"
},
{
"properties": {
"method": {
"enum": [
"serverRequest/resolved"
],
"title": "ServerRequest/resolvedNotificationMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/v2/ServerRequestResolvedNotification"
}
},
"required": [
"method",
"params"
],
"title": "ServerRequest/resolvedNotification",
"type": "object"
},
{
"properties": {
"method": {
@@ -6668,7 +6647,7 @@
"description": "NEW APIs Sent when approval is requested for a specific command execution. This request is used for Turns started via turn/start.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -6693,7 +6672,7 @@
"description": "Sent when approval is requested for a specific file change. This request is used for Turns started via turn/start.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -6718,7 +6697,7 @@
"description": "EXPERIMENTAL - Request input from the user for a tool call.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -6743,7 +6722,7 @@
"description": "Execute a dynamic tool call on the client.",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -6767,7 +6746,7 @@
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -6792,7 +6771,7 @@
"description": "DEPRECATED APIs below Request to approve a patch. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -6817,7 +6796,7 @@
"description": "Request to exec a command. This request is used for Turns started via the legacy APIs (i.e. SendUserTurn, SendUserMessage).",
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
@@ -11131,17 +11110,6 @@
],
"type": "object"
},
"RequestId": {
"anyOf": [
{
"type": "string"
},
{
"format": "int64",
"type": "integer"
}
]
},
"ResidencyRequirement": {
"enum": [
"us"
@@ -11881,23 +11849,6 @@
},
"type": "object"
},
"ServerRequestResolvedNotification": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"requestId": {
"$ref": "#/definitions/v2/RequestId"
},
"threadId": {
"type": "string"
}
},
"required": [
"requestId",
"threadId"
],
"title": "ServerRequestResolvedNotification",
"type": "object"
},
"SessionSource": {
"oneOf": [
{

View File

@@ -1,30 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"RequestId": {
"anyOf": [
{
"type": "string"
},
{
"format": "int64",
"type": "integer"
}
]
}
},
"properties": {
"requestId": {
"$ref": "#/definitions/RequestId"
},
"threadId": {
"type": "string"
}
},
"required": [
"requestId",
"threadId"
],
"title": "ServerRequestResolvedNotification",
"type": "object"
}

View File

@@ -27,7 +27,6 @@ import type { RawResponseItemCompletedNotification } from "./v2/RawResponseItemC
import type { ReasoningSummaryPartAddedNotification } from "./v2/ReasoningSummaryPartAddedNotification";
import type { ReasoningSummaryTextDeltaNotification } from "./v2/ReasoningSummaryTextDeltaNotification";
import type { ReasoningTextDeltaNotification } from "./v2/ReasoningTextDeltaNotification";
import type { ServerRequestResolvedNotification } from "./v2/ServerRequestResolvedNotification";
import type { TerminalInteractionNotification } from "./v2/TerminalInteractionNotification";
import type { ThreadArchivedNotification } from "./v2/ThreadArchivedNotification";
import type { ThreadClosedNotification } from "./v2/ThreadClosedNotification";
@@ -51,4 +50,4 @@ import type { WindowsWorldWritableWarningNotification } from "./v2/WindowsWorldW
/**
* Notification sent from the server to the client.
*/
export type ServerNotification = { "method": "error", "params": ErrorNotification } | { "method": "thread/started", "params": ThreadStartedNotification } | { "method": "thread/status/changed", "params": ThreadStatusChangedNotification } | { "method": "thread/archived", "params": ThreadArchivedNotification } | { "method": "thread/unarchived", "params": ThreadUnarchivedNotification } | { "method": "thread/closed", "params": ThreadClosedNotification } | { "method": "thread/name/updated", "params": ThreadNameUpdatedNotification } | { "method": "thread/tokenUsage/updated", "params": ThreadTokenUsageUpdatedNotification } | { "method": "turn/started", "params": TurnStartedNotification } | { "method": "turn/completed", "params": TurnCompletedNotification } | { "method": "turn/diff/updated", "params": TurnDiffUpdatedNotification } | { "method": "turn/plan/updated", "params": TurnPlanUpdatedNotification } | { "method": "item/started", "params": ItemStartedNotification } | { "method": "item/completed", "params": ItemCompletedNotification } | { "method": "rawResponseItem/completed", "params": RawResponseItemCompletedNotification } | { "method": "item/agentMessage/delta", "params": AgentMessageDeltaNotification } | { "method": "item/plan/delta", "params": PlanDeltaNotification } | { "method": "item/commandExecution/outputDelta", "params": CommandExecutionOutputDeltaNotification } | { "method": "item/commandExecution/terminalInteraction", "params": TerminalInteractionNotification } | { "method": "item/fileChange/outputDelta", "params": FileChangeOutputDeltaNotification } | { "method": "serverRequest/resolved", "params": ServerRequestResolvedNotification } | { "method": "item/mcpToolCall/progress", "params": McpToolCallProgressNotification } | { "method": "mcpServer/oauthLogin/completed", "params": McpServerOauthLoginCompletedNotification } | { "method": "account/updated", "params": AccountUpdatedNotification } | { "method": "account/rateLimits/updated", "params": AccountRateLimitsUpdatedNotification } | { "method": "app/list/updated", "params": AppListUpdatedNotification } | { "method": "item/reasoning/summaryTextDelta", "params": ReasoningSummaryTextDeltaNotification } | { "method": "item/reasoning/summaryPartAdded", "params": ReasoningSummaryPartAddedNotification } | { "method": "item/reasoning/textDelta", "params": ReasoningTextDeltaNotification } | { "method": "thread/compacted", "params": ContextCompactedNotification } | { "method": "model/rerouted", "params": ModelReroutedNotification } | { "method": "deprecationNotice", "params": DeprecationNoticeNotification } | { "method": "configWarning", "params": ConfigWarningNotification } | { "method": "fuzzyFileSearch/sessionUpdated", "params": FuzzyFileSearchSessionUpdatedNotification } | { "method": "fuzzyFileSearch/sessionCompleted", "params": FuzzyFileSearchSessionCompletedNotification } | { "method": "thread/realtime/started", "params": ThreadRealtimeStartedNotification } | { "method": "thread/realtime/itemAdded", "params": ThreadRealtimeItemAddedNotification } | { "method": "thread/realtime/outputAudio/delta", "params": ThreadRealtimeOutputAudioDeltaNotification } | { "method": "thread/realtime/error", "params": ThreadRealtimeErrorNotification } | { "method": "thread/realtime/closed", "params": ThreadRealtimeClosedNotification } | { "method": "windows/worldWritableWarning", "params": WindowsWorldWritableWarningNotification } | { "method": "windowsSandbox/setupCompleted", "params": WindowsSandboxSetupCompletedNotification } | { "method": "account/login/completed", "params": AccountLoginCompletedNotification } | { "method": "authStatusChange", "params": AuthStatusChangeNotification } | { "method": "loginChatGptComplete", "params": LoginChatGptCompleteNotification } | { "method": "sessionConfigured", "params": SessionConfiguredNotification };
export type ServerNotification = { "method": "error", "params": ErrorNotification } | { "method": "thread/started", "params": ThreadStartedNotification } | { "method": "thread/status/changed", "params": ThreadStatusChangedNotification } | { "method": "thread/archived", "params": ThreadArchivedNotification } | { "method": "thread/unarchived", "params": ThreadUnarchivedNotification } | { "method": "thread/closed", "params": ThreadClosedNotification } | { "method": "thread/name/updated", "params": ThreadNameUpdatedNotification } | { "method": "thread/tokenUsage/updated", "params": ThreadTokenUsageUpdatedNotification } | { "method": "turn/started", "params": TurnStartedNotification } | { "method": "turn/completed", "params": TurnCompletedNotification } | { "method": "turn/diff/updated", "params": TurnDiffUpdatedNotification } | { "method": "turn/plan/updated", "params": TurnPlanUpdatedNotification } | { "method": "item/started", "params": ItemStartedNotification } | { "method": "item/completed", "params": ItemCompletedNotification } | { "method": "rawResponseItem/completed", "params": RawResponseItemCompletedNotification } | { "method": "item/agentMessage/delta", "params": AgentMessageDeltaNotification } | { "method": "item/plan/delta", "params": PlanDeltaNotification } | { "method": "item/commandExecution/outputDelta", "params": CommandExecutionOutputDeltaNotification } | { "method": "item/commandExecution/terminalInteraction", "params": TerminalInteractionNotification } | { "method": "item/fileChange/outputDelta", "params": FileChangeOutputDeltaNotification } | { "method": "item/mcpToolCall/progress", "params": McpToolCallProgressNotification } | { "method": "mcpServer/oauthLogin/completed", "params": McpServerOauthLoginCompletedNotification } | { "method": "account/updated", "params": AccountUpdatedNotification } | { "method": "account/rateLimits/updated", "params": AccountRateLimitsUpdatedNotification } | { "method": "app/list/updated", "params": AppListUpdatedNotification } | { "method": "item/reasoning/summaryTextDelta", "params": ReasoningSummaryTextDeltaNotification } | { "method": "item/reasoning/summaryPartAdded", "params": ReasoningSummaryPartAddedNotification } | { "method": "item/reasoning/textDelta", "params": ReasoningTextDeltaNotification } | { "method": "thread/compacted", "params": ContextCompactedNotification } | { "method": "model/rerouted", "params": ModelReroutedNotification } | { "method": "deprecationNotice", "params": DeprecationNoticeNotification } | { "method": "configWarning", "params": ConfigWarningNotification } | { "method": "fuzzyFileSearch/sessionUpdated", "params": FuzzyFileSearchSessionUpdatedNotification } | { "method": "fuzzyFileSearch/sessionCompleted", "params": FuzzyFileSearchSessionCompletedNotification } | { "method": "thread/realtime/started", "params": ThreadRealtimeStartedNotification } | { "method": "thread/realtime/itemAdded", "params": ThreadRealtimeItemAddedNotification } | { "method": "thread/realtime/outputAudio/delta", "params": ThreadRealtimeOutputAudioDeltaNotification } | { "method": "thread/realtime/error", "params": ThreadRealtimeErrorNotification } | { "method": "thread/realtime/closed", "params": ThreadRealtimeClosedNotification } | { "method": "windows/worldWritableWarning", "params": WindowsWorldWritableWarningNotification } | { "method": "windowsSandbox/setupCompleted", "params": WindowsSandboxSetupCompletedNotification } | { "method": "account/login/completed", "params": AccountLoginCompletedNotification } | { "method": "authStatusChange", "params": AuthStatusChangeNotification } | { "method": "loginChatGptComplete", "params": LoginChatGptCompleteNotification } | { "method": "sessionConfigured", "params": SessionConfiguredNotification };

View File

@@ -1,6 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { RequestId } from "../RequestId";
export type ServerRequestResolvedNotification = { threadId: string, requestId: RequestId, };

View File

@@ -142,7 +142,6 @@ export type { ReviewTarget } from "./ReviewTarget";
export type { SandboxMode } from "./SandboxMode";
export type { SandboxPolicy } from "./SandboxPolicy";
export type { SandboxWorkspaceWrite } from "./SandboxWorkspaceWrite";
export type { ServerRequestResolvedNotification } from "./ServerRequestResolvedNotification";
export type { SessionSource } from "./SessionSource";
export type { SkillDependencies } from "./SkillDependencies";
export type { SkillErrorInfo } from "./SkillErrorInfo";

View File

@@ -8,9 +8,7 @@ use ts_rs::TS;
pub const JSONRPC_VERSION: &str = "2.0";
#[derive(
Debug, Clone, PartialEq, PartialOrd, Ord, Deserialize, Serialize, Hash, Eq, JsonSchema, TS,
)]
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, JsonSchema, TS)]
#[serde(untagged)]
pub enum RequestId {
String(String),

View File

@@ -548,14 +548,6 @@ macro_rules! server_request_definitions {
)*
}
impl ServerRequest {
pub fn id(&self) -> &RequestId {
match self {
$(Self::$variant { request_id, .. } => request_id,)*
}
}
}
#[derive(Debug, Clone, PartialEq, JsonSchema)]
#[allow(clippy::large_enum_variant)]
pub enum ServerRequestPayload {
@@ -846,7 +838,6 @@ server_notification_definitions! {
CommandExecutionOutputDelta => "item/commandExecution/outputDelta" (v2::CommandExecutionOutputDeltaNotification),
TerminalInteraction => "item/commandExecution/terminalInteraction" (v2::TerminalInteractionNotification),
FileChangeOutputDelta => "item/fileChange/outputDelta" (v2::FileChangeOutputDeltaNotification),
ServerRequestResolved => "serverRequest/resolved" (v2::ServerRequestResolvedNotification),
McpToolCallProgress => "item/mcpToolCall/progress" (v2::McpToolCallProgressNotification),
McpServerOauthLoginCompleted => "mcpServer/oauthLogin/completed" (v2::McpServerOauthLoginCompletedNotification),
AccountUpdated => "account/updated" (v2::AccountUpdatedNotification),
@@ -1115,7 +1106,6 @@ mod tests {
);
let payload = ServerRequestPayload::ExecCommandApproval(params);
assert_eq!(request.id(), &RequestId::Integer(7));
assert_eq!(payload.request_with_id(RequestId::Integer(7)), request);
Ok(())
}

View File

@@ -22,7 +22,6 @@ use codex_protocol::models::MessagePhase;
use codex_protocol::protocol::AgentReasoningEvent;
use codex_protocol::protocol::AgentReasoningRawContentEvent;
use codex_protocol::protocol::AgentStatus;
use codex_protocol::protocol::ApplyPatchApprovalRequestEvent;
use codex_protocol::protocol::CompactedItem;
use codex_protocol::protocol::ContextCompactedEvent;
use codex_protocol::protocol::DynamicToolCallResponseEvent;
@@ -127,9 +126,6 @@ impl ThreadHistoryBuilder {
EventMsg::WebSearchEnd(payload) => self.handle_web_search_end(payload),
EventMsg::ExecCommandBegin(payload) => self.handle_exec_command_begin(payload),
EventMsg::ExecCommandEnd(payload) => self.handle_exec_command_end(payload),
EventMsg::ApplyPatchApprovalRequest(payload) => {
self.handle_apply_patch_approval_request(payload)
}
EventMsg::PatchApplyBegin(payload) => self.handle_patch_apply_begin(payload),
EventMsg::PatchApplyEnd(payload) => self.handle_patch_apply_end(payload),
EventMsg::DynamicToolCallRequest(payload) => {
@@ -368,19 +364,6 @@ impl ThreadHistoryBuilder {
self.upsert_item_in_turn_id(&payload.turn_id, item);
}
fn handle_apply_patch_approval_request(&mut self, payload: &ApplyPatchApprovalRequestEvent) {
let item = ThreadItem::FileChange {
id: payload.call_id.clone(),
changes: convert_patch_changes(&payload.changes),
status: PatchApplyStatus::InProgress,
};
if payload.turn_id.is_empty() {
self.upsert_item_in_current_turn(item);
} else {
self.upsert_item_in_turn_id(&payload.turn_id, item);
}
}
fn handle_patch_apply_begin(&mut self, payload: &PatchApplyBeginEvent) {
let item = ThreadItem::FileChange {
id: payload.call_id.clone(),
@@ -1097,7 +1080,6 @@ mod tests {
use codex_protocol::protocol::AgentMessageEvent;
use codex_protocol::protocol::AgentReasoningEvent;
use codex_protocol::protocol::AgentReasoningRawContentEvent;
use codex_protocol::protocol::ApplyPatchApprovalRequestEvent;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::CompactedItem;
use codex_protocol::protocol::DynamicToolCallResponseEvent;
@@ -1106,7 +1088,6 @@ mod tests {
use codex_protocol::protocol::ItemStartedEvent;
use codex_protocol::protocol::McpInvocation;
use codex_protocol::protocol::McpToolCallEndEvent;
use codex_protocol::protocol::PatchApplyBeginEvent;
use codex_protocol::protocol::ThreadRolledBackEvent;
use codex_protocol::protocol::TurnAbortReason;
use codex_protocol::protocol::TurnAbortedEvent;
@@ -1999,133 +1980,6 @@ mod tests {
);
}
#[test]
fn patch_apply_begin_updates_active_turn_snapshot_with_file_change() {
let turn_id = "turn-1";
let mut builder = ThreadHistoryBuilder::new();
let events = vec![
EventMsg::TurnStarted(TurnStartedEvent {
turn_id: turn_id.to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
EventMsg::UserMessage(UserMessageEvent {
message: "apply patch".into(),
images: None,
text_elements: Vec::new(),
local_images: Vec::new(),
}),
EventMsg::PatchApplyBegin(PatchApplyBeginEvent {
call_id: "patch-call".into(),
turn_id: turn_id.to_string(),
auto_approved: false,
changes: [(
PathBuf::from("README.md"),
codex_protocol::protocol::FileChange::Add {
content: "hello\n".into(),
},
)]
.into_iter()
.collect(),
}),
];
for event in &events {
builder.handle_event(event);
}
let snapshot = builder
.active_turn_snapshot()
.expect("active turn snapshot");
assert_eq!(snapshot.id, turn_id);
assert_eq!(snapshot.status, TurnStatus::InProgress);
assert_eq!(
snapshot.items,
vec![
ThreadItem::UserMessage {
id: "item-1".into(),
content: vec![UserInput::Text {
text: "apply patch".into(),
text_elements: Vec::new(),
}],
},
ThreadItem::FileChange {
id: "patch-call".into(),
changes: vec![FileUpdateChange {
path: "README.md".into(),
kind: PatchChangeKind::Add,
diff: "hello\n".into(),
}],
status: PatchApplyStatus::InProgress,
},
]
);
}
#[test]
fn apply_patch_approval_request_updates_active_turn_snapshot_with_file_change() {
let turn_id = "turn-1";
let mut builder = ThreadHistoryBuilder::new();
let events = vec![
EventMsg::TurnStarted(TurnStartedEvent {
turn_id: turn_id.to_string(),
model_context_window: None,
collaboration_mode_kind: Default::default(),
}),
EventMsg::UserMessage(UserMessageEvent {
message: "apply patch".into(),
images: None,
text_elements: Vec::new(),
local_images: Vec::new(),
}),
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
call_id: "patch-call".into(),
turn_id: turn_id.to_string(),
changes: [(
PathBuf::from("README.md"),
codex_protocol::protocol::FileChange::Add {
content: "hello\n".into(),
},
)]
.into_iter()
.collect(),
reason: None,
grant_root: None,
}),
];
for event in &events {
builder.handle_event(event);
}
let snapshot = builder
.active_turn_snapshot()
.expect("active turn snapshot");
assert_eq!(snapshot.id, turn_id);
assert_eq!(snapshot.status, TurnStatus::InProgress);
assert_eq!(
snapshot.items,
vec![
ThreadItem::UserMessage {
id: "item-1".into(),
content: vec![UserInput::Text {
text: "apply patch".into(),
text_elements: Vec::new(),
}],
},
ThreadItem::FileChange {
id: "patch-call".into(),
changes: vec![FileUpdateChange {
path: "README.md".into(),
kind: PatchChangeKind::Add,
diff: "hello\n".into(),
}],
status: PatchApplyStatus::InProgress,
},
]
);
}
#[test]
fn late_turn_complete_does_not_close_active_turn() {
let events = vec![

View File

@@ -1,7 +1,6 @@
use std::collections::HashMap;
use std::path::PathBuf;
use crate::RequestId;
use crate::protocol::common::AuthMode;
use codex_experimental_api_macros::ExperimentalApi;
use codex_protocol::account::PlanType;
@@ -3746,14 +3745,6 @@ pub struct FileChangeOutputDeltaNotification {
pub delta: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ServerRequestResolvedNotification {
pub thread_id: String,
pub request_id: RequestId,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]

View File

@@ -712,8 +712,7 @@ Order of messages:
1. `item/started` — shows the pending `commandExecution` item with `command`, `cwd`, and other fields so you can render the proposed action.
2. `item/commandExecution/requestApproval` (request) — carries the same `itemId`, `threadId`, `turnId`, optionally `approvalId` (for subcommand callbacks), and `reason`. For normal command approvals, it also includes `command`, `cwd`, and `commandActions` for friendly display. When `initialize.params.capabilities.experimentalApi = true`, it may also include experimental `additionalPermissions` describing requested per-command sandbox access; any filesystem paths in that payload are absolute on the wire. For network-only approvals, those command fields may be omitted and `networkApprovalContext` is provided instead. Optional persistence hints may also be included via `proposedExecpolicyAmendment` and `proposedNetworkPolicyAmendments`. Clients can prefer `availableDecisions` when present to render the exact set of choices the server wants to expose, while still falling back to the older heuristics if it is omitted.
3. Client response — for example `{ "decision": "accept" }`, `{ "decision": "acceptForSession" }`, `{ "decision": { "acceptWithExecpolicyAmendment": { "execpolicy_amendment": [...] } } }`, `{ "decision": { "applyNetworkPolicyAmendment": { "network_policy_amendment": { "host": "example.com", "action": "allow" } } } }`, `{ "decision": "decline" }`, or `{ "decision": "cancel" }`.
4. `serverRequest/resolved``{ threadId, requestId }` confirms the pending request has been resolved or cleared, including lifecycle cleanup on turn start/complete/interrupt.
5. `item/completed` — final `commandExecution` item with `status: "completed" | "failed" | "declined"` and execution output. Render this as the authoritative result.
4. `item/completed` — final `commandExecution` item with `status: "completed" | "failed" | "declined"` and execution output. Render this as the authoritative result.
### File change approvals
@@ -722,15 +721,10 @@ Order of messages:
1. `item/started` — emits a `fileChange` item with `changes` (diff chunk summaries) and `status: "inProgress"`. Show the proposed edits and paths to the user.
2. `item/fileChange/requestApproval` (request) — includes `itemId`, `threadId`, `turnId`, and an optional `reason`.
3. Client response — `{ "decision": "accept" }` or `{ "decision": "decline" }`.
4. `serverRequest/resolved``{ threadId, requestId }` confirms the pending request has been resolved or cleared, including lifecycle cleanup on turn start/complete/interrupt.
5. `item/completed` — returns the same `fileChange` item with `status` updated to `completed`, `failed`, or `declined` after the patch attempt. Rely on this to show success/failure and finalize the diff state in your UI.
4. `item/completed` — returns the same `fileChange` item with `status` updated to `completed`, `failed`, or `declined` after the patch attempt. Rely on this to show success/failure and finalize the diff state in your UI.
UI guidance for IDEs: surface an approval dialog as soon as the request arrives. The turn will proceed after the server receives a response to the approval request. The terminal `item/completed` notification will be sent with the appropriate status.
### request_user_input
When the client responds to `item/tool/requestUserInput`, the server emits `serverRequest/resolved` with `{ threadId, requestId }`. If the pending request is cleared by turn start, turn completion, or turn interruption before the client answers, the server emits the same notification for that cleanup.
### Dynamic tool calls (experimental)
`dynamicTools` on `thread/start` and the corresponding `item/tool/call` request/response flow are experimental APIs. To enable them, set `initialize.params.capabilities.experimentalApi = true`.

View File

@@ -6,8 +6,6 @@ use crate::error_code::INTERNAL_ERROR_CODE;
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
use crate::outgoing_message::ClientRequestResult;
use crate::outgoing_message::ThreadScopedOutgoingMessageSender;
use crate::server_request_error::is_turn_transition_server_request_error;
use crate::thread_state::ThreadListenerCommand;
use crate::thread_state::ThreadState;
use crate::thread_state::TurnSummary;
use crate::thread_status::ThreadWatchActiveGuard;
@@ -58,7 +56,6 @@ use codex_app_server_protocol::RawResponseItemCompletedNotification;
use codex_app_server_protocol::ReasoningSummaryPartAddedNotification;
use codex_app_server_protocol::ReasoningSummaryTextDeltaNotification;
use codex_app_server_protocol::ReasoningTextDeltaNotification;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestPayload;
use codex_app_server_protocol::TerminalInteractionNotification;
@@ -135,38 +132,6 @@ struct CommandExecutionCompletionItem {
command_actions: Vec<V2ParsedCommand>,
}
async fn resolve_server_request_on_thread_listener(
thread_state: &Arc<Mutex<ThreadState>>,
request_id: RequestId,
) {
let (completion_tx, completion_rx) = oneshot::channel();
let listener_command_tx = {
let state = thread_state.lock().await;
state.listener_command_tx()
};
let Some(listener_command_tx) = listener_command_tx else {
error!("failed to remove pending client request: thread listener is not running");
return;
};
if listener_command_tx
.send(ThreadListenerCommand::ResolveServerRequest {
request_id,
completion_tx,
})
.is_err()
{
error!(
"failed to remove pending client request: thread listener command channel is closed"
);
return;
}
if let Err(err) = completion_rx.await {
error!("failed to remove pending client request: {err}");
}
}
#[allow(clippy::too_many_arguments)]
pub(crate) async fn apply_bespoke_event_handling(
event: Event,
@@ -186,15 +151,11 @@ pub(crate) async fn apply_bespoke_event_handling(
} = event;
match msg {
EventMsg::TurnStarted(_) => {
// While not technically necessary as it was already done on TurnComplete, be extra cautios and abort any pending server requests.
outgoing.abort_pending_server_requests().await;
thread_watch_manager
.note_turn_started(&conversation_id.to_string())
.await;
}
EventMsg::TurnComplete(_ev) => {
// All per-thread requests are bound to a turn, so abort them.
outgoing.abort_pending_server_requests().await;
let turn_failed = thread_state.lock().await.turn_summary.last_error.is_some();
thread_watch_manager
.note_turn_completed(&conversation_id.to_string(), turn_failed)
@@ -302,7 +263,7 @@ pub(crate) async fn apply_bespoke_event_handling(
reason,
grant_root,
};
let (_pending_request_id, rx) = outgoing
let rx = outgoing
.send_request(ServerRequestPayload::ApplyPatchApproval(params))
.await;
tokio::spawn(async move {
@@ -346,7 +307,7 @@ pub(crate) async fn apply_bespoke_event_handling(
reason,
grant_root,
};
let (pending_request_id, rx) = outgoing
let rx = outgoing
.send_request(ServerRequestPayload::FileChangeRequestApproval(params))
.await;
tokio::spawn(async move {
@@ -355,7 +316,6 @@ pub(crate) async fn apply_bespoke_event_handling(
conversation_id,
item_id,
patch_changes,
pending_request_id,
rx,
conversation,
outgoing,
@@ -402,7 +362,7 @@ pub(crate) async fn apply_bespoke_event_handling(
reason,
parsed_cmd,
};
let (_pending_request_id, rx) = outgoing
let rx = outgoing
.send_request(ServerRequestPayload::ExecCommandApproval(params))
.await;
tokio::spawn(async move {
@@ -475,7 +435,7 @@ pub(crate) async fn apply_bespoke_event_handling(
proposed_network_policy_amendments: proposed_network_policy_amendments_v2,
available_decisions: Some(available_decisions),
};
let (pending_request_id, rx) = outgoing
let rx = outgoing
.send_request(ServerRequestPayload::CommandExecutionRequestApproval(
params,
))
@@ -487,7 +447,6 @@ pub(crate) async fn apply_bespoke_event_handling(
approval_id,
call_id,
completion_item,
pending_request_id,
rx,
conversation,
outgoing,
@@ -530,16 +489,14 @@ pub(crate) async fn apply_bespoke_event_handling(
item_id: request.call_id,
questions,
};
let (pending_request_id, rx) = outgoing
let rx = outgoing
.send_request(ServerRequestPayload::ToolRequestUserInput(params))
.await;
tokio::spawn(async move {
on_request_user_input_response(
event_turn_id,
pending_request_id,
rx,
conversation,
thread_state,
user_input_guard,
)
.await;
@@ -593,7 +550,7 @@ pub(crate) async fn apply_bespoke_event_handling(
tool: tool.clone(),
arguments: arguments.clone(),
};
let (_pending_request_id, rx) = outgoing
let rx = outgoing
.send_request(ServerRequestPayload::DynamicToolCall(params))
.await;
tokio::spawn(async move {
@@ -1179,7 +1136,6 @@ pub(crate) async fn apply_bespoke_event_handling(
// Until we migrate the core to be aware of a first class FileChangeItem
// and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
let item_id = patch_begin_event.call_id.clone();
let changes = convert_patch_changes(&patch_begin_event.changes);
let first_start = {
let mut state = thread_state.lock().await;
@@ -1191,7 +1147,7 @@ pub(crate) async fn apply_bespoke_event_handling(
if first_start {
let item = ThreadItem::FileChange {
id: item_id.clone(),
changes,
changes: convert_patch_changes(&patch_begin_event.changes),
status: PatchApplyStatus::InProgress,
};
let notification = ItemStartedNotification {
@@ -1373,8 +1329,6 @@ pub(crate) async fn apply_bespoke_event_handling(
}
// If this is a TurnAborted, reply to any pending interrupt requests.
EventMsg::TurnAborted(turn_aborted_event) => {
// All per-thread requests are bound to a turn, so abort them.
outgoing.abort_pending_server_requests().await;
let pending = {
let mut state = thread_state.lock().await;
std::mem::take(&mut state.pending_interrupts)
@@ -1771,7 +1725,6 @@ async fn on_patch_approval_response(
let response = receiver.await;
let value = match response {
Ok(Ok(value)) => value,
Ok(Err(err)) if is_turn_transition_server_request_error(&err) => return,
Ok(Err(err)) => {
error!("request failed with client error: {err:?}");
if let Err(submit_err) = codex
@@ -1828,7 +1781,6 @@ async fn on_exec_approval_response(
let response = receiver.await;
let value = match response {
Ok(Ok(value)) => value,
Ok(Err(err)) if is_turn_transition_server_request_error(&err) => return,
Ok(Err(err)) => {
error!("request failed with client error: {err:?}");
return;
@@ -1864,18 +1816,14 @@ async fn on_exec_approval_response(
async fn on_request_user_input_response(
event_turn_id: String,
pending_request_id: RequestId,
receiver: oneshot::Receiver<ClientRequestResult>,
conversation: Arc<CodexThread>,
thread_state: Arc<Mutex<ThreadState>>,
user_input_guard: ThreadWatchActiveGuard,
) {
let response = receiver.await;
resolve_server_request_on_thread_listener(&thread_state, pending_request_id).await;
drop(user_input_guard);
let value = match response {
Ok(Ok(value)) => value,
Ok(Err(err)) if is_turn_transition_server_request_error(&err) => return,
Ok(Err(err)) => {
error!("request failed with client error: {err:?}");
let empty = CoreRequestUserInputResponse {
@@ -1986,7 +1934,6 @@ async fn on_file_change_request_approval_response(
conversation_id: ThreadId,
item_id: String,
changes: Vec<FileUpdateChange>,
pending_request_id: RequestId,
receiver: oneshot::Receiver<ClientRequestResult>,
codex: Arc<CodexThread>,
outgoing: ThreadScopedOutgoingMessageSender,
@@ -1994,7 +1941,6 @@ async fn on_file_change_request_approval_response(
permission_guard: ThreadWatchActiveGuard,
) {
let response = receiver.await;
resolve_server_request_on_thread_listener(&thread_state, pending_request_id).await;
drop(permission_guard);
let (decision, completion_status) = match response {
Ok(Ok(value)) => {
@@ -2012,7 +1958,6 @@ async fn on_file_change_request_approval_response(
// Only short-circuit on declines/cancels/failures.
(decision, completion_status)
}
Ok(Err(err)) if is_turn_transition_server_request_error(&err) => return,
Ok(Err(err)) => {
error!("request failed with client error: {err:?}");
(ReviewDecision::Denied, Some(PatchApplyStatus::Failed))
@@ -2054,7 +1999,6 @@ async fn on_command_execution_request_approval_response(
approval_id: Option<String>,
item_id: String,
completion_item: Option<CommandExecutionCompletionItem>,
pending_request_id: RequestId,
receiver: oneshot::Receiver<ClientRequestResult>,
conversation: Arc<CodexThread>,
outgoing: ThreadScopedOutgoingMessageSender,
@@ -2062,7 +2006,6 @@ async fn on_command_execution_request_approval_response(
permission_guard: ThreadWatchActiveGuard,
) {
let response = receiver.await;
resolve_server_request_on_thread_listener(&thread_state, pending_request_id).await;
drop(permission_guard);
let (decision, completion_status) = match response {
Ok(Ok(value)) => {
@@ -2114,7 +2057,6 @@ async fn on_command_execution_request_approval_response(
};
(decision, completion_status)
}
Ok(Err(err)) if is_turn_transition_server_request_error(&err) => return,
Ok(Err(err)) => {
error!("request failed with client error: {err:?}");
(ReviewDecision::Denied, Some(CommandExecutionStatus::Failed))

View File

@@ -100,7 +100,6 @@ use codex_app_server_protocol::NewConversationResponse;
use codex_app_server_protocol::ProductSurface as ApiProductSurface;
use codex_app_server_protocol::RemoveConversationListenerParams;
use codex_app_server_protocol::RemoveConversationSubscriptionResponse;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ResumeConversationParams;
use codex_app_server_protocol::ResumeConversationResponse;
use codex_app_server_protocol::ReviewDelivery as ApiReviewDelivery;
@@ -113,7 +112,6 @@ use codex_app_server_protocol::SendUserMessageResponse;
use codex_app_server_protocol::SendUserTurnParams;
use codex_app_server_protocol::SendUserTurnResponse;
use codex_app_server_protocol::ServerNotification;
use codex_app_server_protocol::ServerRequestResolvedNotification;
use codex_app_server_protocol::SessionConfiguredNotification;
use codex_app_server_protocol::SetDefaultModelParams;
use codex_app_server_protocol::SetDefaultModelResponse;
@@ -299,12 +297,8 @@ use tracing::info;
use tracing::warn;
use uuid::Uuid;
#[cfg(test)]
use codex_app_server_protocol::ServerRequest;
use crate::filters::compute_source_filters;
use crate::filters::source_kind_matches;
use crate::thread_state::ThreadListenerCommand;
use crate::thread_state::ThreadState;
use crate::thread_state::ThreadStateManager;
@@ -3227,11 +3221,11 @@ impl CodexMessageProcessor {
};
let command = crate::thread_state::ThreadListenerCommand::SendThreadResumeResponse(
Box::new(crate::thread_state::PendingThreadResumeRequest {
crate::thread_state::PendingThreadResumeRequest {
request_id: request_id.clone(),
rollout_path,
config_snapshot,
}),
},
);
if listener_command_tx.send(command).is_err() {
let err = JSONRPCErrorError {
@@ -4850,9 +4844,7 @@ impl CodexMessageProcessor {
async fn finalize_thread_teardown(&mut self, thread_id: ThreadId) {
self.pending_thread_unloads.lock().await.remove(&thread_id);
self.outgoing
.cancel_requests_for_thread(thread_id, None)
.await;
self.outgoing.cancel_requests_for_thread(thread_id).await;
self.thread_state_manager
.remove_thread_state(thread_id)
.await;
@@ -4913,9 +4905,7 @@ impl CodexMessageProcessor {
self.pending_thread_unloads.lock().await.insert(thread_id);
// Any pending app-server -> client requests for this thread can no longer be
// answered; cancel their callbacks before shutdown/unload.
self.outgoing
.cancel_requests_for_thread(thread_id, None)
.await;
self.outgoing.cancel_requests_for_thread(thread_id).await;
self.thread_state_manager
.remove_thread_state(thread_id)
.await;
@@ -6517,15 +6507,21 @@ impl CodexMessageProcessor {
let Some(listener_command) = listener_command else {
break;
};
handle_thread_listener_command(
conversation_id,
codex_home.as_path(),
&thread_state,
&thread_watch_manager,
&outgoing_for_task,
listener_command,
)
.await;
match listener_command {
crate::thread_state::ThreadListenerCommand::SendThreadResumeResponse(
resume_request,
) => {
handle_pending_thread_resume_request(
conversation_id,
codex_home.as_path(),
&thread_state,
&thread_watch_manager,
&outgoing_for_task,
resume_request,
)
.await;
}
}
}
}
}
@@ -6834,37 +6830,6 @@ impl CodexMessageProcessor {
}
}
async fn handle_thread_listener_command(
conversation_id: ThreadId,
codex_home: &Path,
thread_state: &Arc<Mutex<ThreadState>>,
thread_watch_manager: &ThreadWatchManager,
outgoing: &Arc<OutgoingMessageSender>,
listener_command: ThreadListenerCommand,
) {
match listener_command {
ThreadListenerCommand::SendThreadResumeResponse(resume_request) => {
handle_pending_thread_resume_request(
conversation_id,
codex_home,
thread_state,
thread_watch_manager,
outgoing,
*resume_request,
)
.await;
}
ThreadListenerCommand::ResolveServerRequest {
request_id,
completion_tx,
} => {
resolve_pending_server_request(conversation_id, thread_state, outgoing, request_id)
.await;
let _ = completion_tx.send(());
}
}
}
async fn handle_pending_thread_resume_request(
conversation_id: ThreadId,
codex_home: &Path,
@@ -6953,36 +6918,9 @@ async fn handle_pending_thread_resume_request(
reasoning_effort,
};
outgoing.send_response(request_id, response).await;
outgoing
.replay_requests_to_connection_for_thread(connection_id, conversation_id)
.await;
thread_state.lock().await.add_connection(connection_id);
}
async fn resolve_pending_server_request(
conversation_id: ThreadId,
thread_state: &Arc<Mutex<ThreadState>>,
outgoing: &Arc<OutgoingMessageSender>,
request_id: RequestId,
) {
let thread_id = conversation_id.to_string();
let subscribed_connection_ids = thread_state.lock().await.subscribed_connection_ids();
let outgoing = ThreadScopedOutgoingMessageSender::new(
outgoing.clone(),
subscribed_connection_ids,
conversation_id,
);
outgoing
.send_server_notification(ServerNotification::ServerRequestResolved(
ServerRequestResolvedNotification {
thread_id,
request_id,
},
))
.await;
}
async fn load_thread_for_running_resume_response(
conversation_id: ThreadId,
rollout_path: &Path,
@@ -7730,11 +7668,7 @@ pub(crate) fn summary_to_thread(summary: ConversationSummary) -> Thread {
#[cfg(test)]
mod tests {
use super::*;
use crate::outgoing_message::OutgoingEnvelope;
use crate::outgoing_message::OutgoingMessage;
use anyhow::Result;
use codex_app_server_protocol::ServerRequestPayload;
use codex_app_server_protocol::ToolRequestUserInputParams;
use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::SubAgentSource;
use pretty_assertions::assert_eq;
@@ -7928,67 +7862,6 @@ mod tests {
Ok(())
}
#[tokio::test]
async fn aborting_pending_request_clears_pending_state() -> Result<()> {
let thread_id = ThreadId::from_string("bfd12a78-5900-467b-9bc5-d3d35df08191")?;
let thread_state = Arc::new(Mutex::new(ThreadState::default()));
let connection_id = ConnectionId(7);
thread_state.lock().await.add_connection(connection_id);
let (outgoing_tx, mut outgoing_rx) = tokio::sync::mpsc::channel(8);
let outgoing = Arc::new(OutgoingMessageSender::new(outgoing_tx));
let thread_outgoing = ThreadScopedOutgoingMessageSender::new(
outgoing.clone(),
vec![connection_id],
thread_id,
);
let (request_id, client_request_rx) = thread_outgoing
.send_request(ServerRequestPayload::ToolRequestUserInput(
ToolRequestUserInputParams {
thread_id: thread_id.to_string(),
turn_id: "turn-1".to_string(),
item_id: "call-1".to_string(),
questions: vec![],
},
))
.await;
thread_outgoing.abort_pending_server_requests().await;
let request_message = outgoing_rx.recv().await.expect("request should be sent");
let OutgoingEnvelope::ToConnection {
connection_id: request_connection_id,
message:
OutgoingMessage::Request(ServerRequest::ToolRequestUserInput {
request_id: sent_request_id,
..
}),
} = request_message
else {
panic!("expected tool request to be sent to the subscribed connection");
};
assert_eq!(request_connection_id, connection_id);
assert_eq!(sent_request_id, request_id);
let response = client_request_rx
.await
.expect("callback should be resolved");
let error = response.expect_err("request should be aborted during cleanup");
assert_eq!(
error.message,
"client request resolved because the turn state was changed"
);
assert_eq!(error.data, Some(json!({ "reason": "turnTransition" })));
assert!(
outgoing
.pending_requests_for_thread(thread_id)
.await
.is_empty()
);
assert!(outgoing_rx.try_recv().is_err());
Ok(())
}
#[test]
fn summary_from_state_db_metadata_preserves_agent_nickname() -> Result<()> {
let conversation_id = ThreadId::from_string("bfd12a78-5900-467b-9bc5-d3d35df08191")?;

View File

@@ -9,7 +9,6 @@ use tokio::sync::oneshot;
use tracing::error;
use crate::outgoing_message::ClientRequestResult;
use crate::server_request_error::is_turn_transition_server_request_error;
pub(crate) async fn on_call_response(
call_id: String,
@@ -19,7 +18,6 @@ pub(crate) async fn on_call_response(
let response = receiver.await;
let (response, _error) = match response {
Ok(Ok(value)) => decode_response(value),
Ok(Err(err)) if is_turn_transition_server_request_error(&err) => return,
Ok(Err(err)) => {
error!("request failed with client error: {err:?}");
fallback_response("dynamic tool request failed")

View File

@@ -63,7 +63,6 @@ mod fuzzy_file_search;
mod message_processor;
mod models;
mod outgoing_message;
mod server_request_error;
mod thread_state;
mod thread_status;
mod transport;

View File

@@ -17,7 +17,6 @@ use tokio::sync::oneshot;
use tracing::warn;
use crate::error_code::INTERNAL_ERROR_CODE;
use crate::server_request_error::TURN_TRANSITION_PENDING_REQUEST_ERROR_REASON;
#[cfg(test)]
use codex_protocol::account::PlanType;
@@ -63,7 +62,6 @@ pub(crate) struct ThreadScopedOutgoingMessageSender {
struct PendingCallbackEntry {
callback: oneshot::Sender<ClientRequestResult>,
thread_id: Option<ThreadId>,
request: ServerRequest,
}
impl ThreadScopedOutgoingMessageSender {
@@ -82,12 +80,12 @@ impl ThreadScopedOutgoingMessageSender {
pub(crate) async fn send_request(
&self,
payload: ServerRequestPayload,
) -> (RequestId, oneshot::Receiver<ClientRequestResult>) {
) -> oneshot::Receiver<ClientRequestResult> {
self.outgoing
.send_request_to_connections(
Some(self.connection_ids.as_slice()),
.send_request_to_thread_connections(
self.thread_id,
self.connection_ids.as_slice(),
payload,
Some(self.thread_id),
)
.await
}
@@ -101,20 +99,6 @@ impl ThreadScopedOutgoingMessageSender {
.await;
}
pub(crate) async fn abort_pending_server_requests(&self) {
self.outgoing
.cancel_requests_for_thread(
self.thread_id,
Some(JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: "client request resolved because the turn state was changed"
.to_string(),
data: Some(serde_json::json!({ "reason": TURN_TRANSITION_PENDING_REQUEST_ERROR_REASON })),
}),
)
.await
}
pub(crate) async fn send_response<T: Serialize>(
&self,
request_id: ConnectionRequestId,
@@ -145,23 +129,38 @@ impl OutgoingMessageSender {
&self,
request: ServerRequestPayload,
) -> (RequestId, oneshot::Receiver<ClientRequestResult>) {
self.send_request_to_connections(None, request, None).await
self.send_request_with_id_to_connections(&[], request, None)
.await
}
async fn send_request_to_thread_connections(
&self,
thread_id: ThreadId,
connection_ids: &[ConnectionId],
request: ServerRequestPayload,
) -> oneshot::Receiver<ClientRequestResult> {
if connection_ids.is_empty() {
let (_tx, rx) = oneshot::channel();
return rx;
}
let (_request_id, receiver) = self
.send_request_with_id_to_connections(connection_ids, request, Some(thread_id))
.await;
receiver
}
fn next_request_id(&self) -> RequestId {
RequestId::Integer(self.next_server_request_id.fetch_add(1, Ordering::Relaxed))
}
async fn send_request_to_connections(
async fn send_request_with_id_to_connections(
&self,
connection_ids: Option<&[ConnectionId]>,
connection_ids: &[ConnectionId],
request: ServerRequestPayload,
thread_id: Option<ThreadId>,
) -> (RequestId, oneshot::Receiver<ClientRequestResult>) {
let id = self.next_request_id();
let outgoing_message_id = id.clone();
let request = request.request_with_id(outgoing_message_id.clone());
let (tx_approve, rx_approve) = oneshot::channel();
{
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
@@ -170,39 +169,36 @@ impl OutgoingMessageSender {
PendingCallbackEntry {
callback: tx_approve,
thread_id,
request: request.clone(),
},
);
}
let outgoing_message = OutgoingMessage::Request(request);
let send_result = match connection_ids {
None => {
self.sender
.send(OutgoingEnvelope::Broadcast {
message: outgoing_message,
let outgoing_message =
OutgoingMessage::Request(request.request_with_id(outgoing_message_id.clone()));
let send_result = if connection_ids.is_empty() {
self.sender
.send(OutgoingEnvelope::Broadcast {
message: outgoing_message,
})
.await
} else {
let mut send_error = None;
for connection_id in connection_ids {
if let Err(err) = self
.sender
.send(OutgoingEnvelope::ToConnection {
connection_id: *connection_id,
message: outgoing_message.clone(),
})
.await
{
send_error = Some(err);
break;
}
}
Some(connection_ids) => {
let mut send_error = None;
for connection_id in connection_ids {
if let Err(err) = self
.sender
.send(OutgoingEnvelope::ToConnection {
connection_id: *connection_id,
message: outgoing_message.clone(),
})
.await
{
send_error = Some(err);
break;
}
}
match send_error {
Some(err) => Err(err),
None => Ok(()),
}
match send_error {
Some(err) => Err(err),
None => Ok(()),
}
};
@@ -214,28 +210,11 @@ impl OutgoingMessageSender {
(outgoing_message_id, rx_approve)
}
pub(crate) async fn replay_requests_to_connection_for_thread(
&self,
connection_id: ConnectionId,
thread_id: ThreadId,
) {
let requests = self.pending_requests_for_thread(thread_id).await;
for request in requests {
if let Err(err) = self
.sender
.send(OutgoingEnvelope::ToConnection {
connection_id,
message: OutgoingMessage::Request(request),
})
.await
{
warn!("failed to resend request to client: {err:?}");
}
}
}
pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) {
let entry = self.take_request_callback(&id).await;
let entry = {
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
request_id_to_callback.remove_entry(&id)
};
match entry {
Some((id, entry)) => {
@@ -250,7 +229,10 @@ impl OutgoingMessageSender {
}
pub(crate) async fn notify_client_error(&self, id: RequestId, error: JSONRPCErrorError) {
let entry = self.take_request_callback(&id).await;
let entry = {
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
request_id_to_callback.remove_entry(&id)
};
match entry {
Some((id, entry)) => {
@@ -266,62 +248,23 @@ impl OutgoingMessageSender {
}
pub(crate) async fn cancel_request(&self, id: &RequestId) -> bool {
self.take_request_callback(id).await.is_some()
let entry = {
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
request_id_to_callback.remove_entry(id)
};
entry.is_some()
}
async fn take_request_callback(
&self,
id: &RequestId,
) -> Option<(RequestId, PendingCallbackEntry)> {
pub(crate) async fn cancel_requests_for_thread(&self, thread_id: ThreadId) {
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
request_id_to_callback.remove_entry(id)
}
pub(crate) async fn pending_requests_for_thread(
&self,
thread_id: ThreadId,
) -> Vec<ServerRequest> {
let request_id_to_callback = self.request_id_to_callback.lock().await;
let mut requests = request_id_to_callback
let request_ids = request_id_to_callback
.iter()
.filter_map(|(_, entry)| {
(entry.thread_id == Some(thread_id)).then_some(entry.request.clone())
.filter_map(|(request_id, entry)| {
(entry.thread_id == Some(thread_id)).then_some(request_id.clone())
})
.collect::<Vec<_>>();
requests.sort_by(|left, right| left.id().cmp(right.id()));
requests
}
pub(crate) async fn cancel_requests_for_thread(
&self,
thread_id: ThreadId,
error: Option<JSONRPCErrorError>,
) {
let entries = {
let mut request_id_to_callback = self.request_id_to_callback.lock().await;
let request_ids = request_id_to_callback
.iter()
.filter_map(|(request_id, entry)| {
(entry.thread_id == Some(thread_id)).then_some(request_id.clone())
})
.collect::<Vec<_>>();
let mut entries = Vec::with_capacity(request_ids.len());
for request_id in request_ids {
if let Some(entry) = request_id_to_callback.remove(&request_id) {
entries.push(entry);
}
}
entries
};
if let Some(error) = error {
for entry in entries {
if let Err(err) = entry.callback.send(Err(error.clone())) {
let request_id = entry.request.id();
warn!("could not notify callback for {request_id:?} due to: {err:?}",);
}
}
for request_id in request_ids {
request_id_to_callback.remove(&request_id);
}
}
@@ -498,18 +441,14 @@ mod tests {
use codex_app_server_protocol::ApplyPatchApprovalParams;
use codex_app_server_protocol::AuthMode;
use codex_app_server_protocol::ConfigWarningNotification;
use codex_app_server_protocol::DynamicToolCallParams;
use codex_app_server_protocol::FileChangeRequestApprovalParams;
use codex_app_server_protocol::LoginChatGptCompleteNotification;
use codex_app_server_protocol::ModelRerouteReason;
use codex_app_server_protocol::ModelReroutedNotification;
use codex_app_server_protocol::RateLimitSnapshot;
use codex_app_server_protocol::RateLimitWindow;
use codex_app_server_protocol::ToolRequestUserInputParams;
use codex_protocol::ThreadId;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::sync::Arc;
use tokio::time::timeout;
use uuid::Uuid;
@@ -784,121 +723,4 @@ mod tests {
.expect("waiter should receive a callback");
assert_eq!(result, Err(error));
}
#[tokio::test]
async fn pending_requests_for_thread_returns_thread_requests_in_request_id_order() {
let (tx, _rx) = mpsc::channel::<OutgoingEnvelope>(8);
let outgoing = Arc::new(OutgoingMessageSender::new(tx));
let thread_id = ThreadId::new();
let thread_outgoing = ThreadScopedOutgoingMessageSender::new(
outgoing.clone(),
vec![ConnectionId(1)],
thread_id,
);
let (dynamic_tool_request_id, _dynamic_tool_waiter) = thread_outgoing
.send_request(ServerRequestPayload::DynamicToolCall(
DynamicToolCallParams {
thread_id: thread_id.to_string(),
turn_id: "turn-1".to_string(),
call_id: "call-0".to_string(),
tool: "tool".to_string(),
arguments: json!({}),
},
))
.await;
let (first_request_id, _first_waiter) = thread_outgoing
.send_request(ServerRequestPayload::ToolRequestUserInput(
ToolRequestUserInputParams {
thread_id: thread_id.to_string(),
turn_id: "turn-1".to_string(),
item_id: "call-1".to_string(),
questions: vec![],
},
))
.await;
let (second_request_id, _second_waiter) = thread_outgoing
.send_request(ServerRequestPayload::FileChangeRequestApproval(
FileChangeRequestApprovalParams {
thread_id: thread_id.to_string(),
turn_id: "turn-1".to_string(),
item_id: "call-2".to_string(),
reason: None,
grant_root: None,
},
))
.await;
let pending_requests = outgoing.pending_requests_for_thread(thread_id).await;
assert_eq!(
pending_requests
.iter()
.map(ServerRequest::id)
.collect::<Vec<_>>(),
vec![
&dynamic_tool_request_id,
&first_request_id,
&second_request_id
]
);
}
#[tokio::test]
async fn cancel_requests_for_thread_cancels_all_thread_requests() {
let (tx, _rx) = mpsc::channel::<OutgoingEnvelope>(8);
let outgoing = Arc::new(OutgoingMessageSender::new(tx));
let thread_id = ThreadId::new();
let thread_outgoing = ThreadScopedOutgoingMessageSender::new(
outgoing.clone(),
vec![ConnectionId(1)],
thread_id,
);
let (_dynamic_tool_request_id, dynamic_tool_waiter) = thread_outgoing
.send_request(ServerRequestPayload::DynamicToolCall(
DynamicToolCallParams {
thread_id: thread_id.to_string(),
turn_id: "turn-1".to_string(),
call_id: "call-0".to_string(),
tool: "tool".to_string(),
arguments: json!({}),
},
))
.await;
let (_request_id, user_input_waiter) = thread_outgoing
.send_request(ServerRequestPayload::ToolRequestUserInput(
ToolRequestUserInputParams {
thread_id: thread_id.to_string(),
turn_id: "turn-1".to_string(),
item_id: "call-1".to_string(),
questions: vec![],
},
))
.await;
let error = JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: "tracked request cancelled".to_string(),
data: None,
};
outgoing
.cancel_requests_for_thread(thread_id, Some(error.clone()))
.await;
let dynamic_tool_result = timeout(Duration::from_secs(1), dynamic_tool_waiter)
.await
.expect("dynamic tool waiter should resolve")
.expect("dynamic tool waiter should receive a callback");
let user_input_result = timeout(Duration::from_secs(1), user_input_waiter)
.await
.expect("user input waiter should resolve")
.expect("user input waiter should receive a callback");
assert_eq!(dynamic_tool_result, Err(error.clone()));
assert_eq!(user_input_result, Err(error));
assert!(
outgoing
.pending_requests_for_thread(thread_id)
.await
.is_empty()
);
}
}

View File

@@ -1,42 +0,0 @@
use codex_app_server_protocol::JSONRPCErrorError;
pub(crate) const TURN_TRANSITION_PENDING_REQUEST_ERROR_REASON: &str = "turnTransition";
pub(crate) fn is_turn_transition_server_request_error(error: &JSONRPCErrorError) -> bool {
error
.data
.as_ref()
.and_then(|data| data.get("reason"))
.and_then(serde_json::Value::as_str)
== Some(TURN_TRANSITION_PENDING_REQUEST_ERROR_REASON)
}
#[cfg(test)]
mod tests {
use super::is_turn_transition_server_request_error;
use codex_app_server_protocol::JSONRPCErrorError;
use pretty_assertions::assert_eq;
use serde_json::json;
#[test]
fn turn_transition_error_is_detected() {
let error = JSONRPCErrorError {
code: -1,
message: "client request resolved because the turn state was changed".to_string(),
data: Some(json!({ "reason": "turnTransition" })),
};
assert_eq!(is_turn_transition_server_request_error(&error), true);
}
#[test]
fn unrelated_error_is_not_detected() {
let error = JSONRPCErrorError {
code: -1,
message: "boom".to_string(),
data: Some(json!({ "reason": "other" })),
};
assert_eq!(is_turn_transition_server_request_error(&error), false);
}
}

View File

@@ -1,6 +1,5 @@
use crate::outgoing_message::ConnectionId;
use crate::outgoing_message::ConnectionRequestId;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ThreadHistoryBuilder;
use codex_app_server_protocol::Turn;
use codex_app_server_protocol::TurnError;
@@ -29,16 +28,8 @@ pub(crate) struct PendingThreadResumeRequest {
pub(crate) config_snapshot: ThreadConfigSnapshot,
}
// ThreadListenerCommand is used to perform operations in the context of the thread listener, for serialization purposes.
pub(crate) enum ThreadListenerCommand {
// SendThreadResumeResponse is used to resume an already running thread by sending the thread's history to the client and atomically subscribing for new updates.
SendThreadResumeResponse(Box<PendingThreadResumeRequest>),
// ResolveServerRequest is used to notify the client that the request has been resolved.
// It is executed in the thread listener's context to ensure that the resolved notification is ordered with regard to the request itself.
ResolveServerRequest {
request_id: RequestId,
completion_tx: oneshot::Sender<()>,
},
SendThreadResumeResponse(PendingThreadResumeRequest),
}
/// Per-conversation accumulation of the latest states e.g. error message while a turn runs.

View File

@@ -4,11 +4,9 @@ use app_test_support::create_final_assistant_message_sse_response;
use app_test_support::create_mock_responses_server_sequence;
use app_test_support::create_request_user_input_sse_response;
use app_test_support::to_response;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::ServerRequestResolvedNotification;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::TurnStartParams;
@@ -88,7 +86,6 @@ async fn request_user_input_round_trip() -> Result<()> {
assert_eq!(params.turn_id, turn.id);
assert_eq!(params.item_id, "call1");
assert_eq!(params.questions.len(), 1);
let resolved_request_id = request_id.clone();
mcp.send_response(
request_id,
@@ -99,31 +96,17 @@ async fn request_user_input_round_trip() -> Result<()> {
}),
)
.await?;
let mut saw_resolved = false;
loop {
let message = timeout(DEFAULT_READ_TIMEOUT, mcp.read_next_message()).await??;
let JSONRPCMessage::Notification(notification) = message else {
continue;
};
match notification.method.as_str() {
"serverRequest/resolved" => {
let resolved: ServerRequestResolvedNotification = serde_json::from_value(
notification
.params
.clone()
.expect("serverRequest/resolved params"),
)?;
assert_eq!(resolved.thread_id, thread.id);
assert_eq!(resolved.request_id, resolved_request_id);
saw_resolved = true;
}
"turn/completed" => {
assert!(saw_resolved, "serverRequest/resolved should arrive first");
break;
}
_ => {}
}
}
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("codex/event/task_complete"),
)
.await??;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
Ok(())
}

View File

@@ -1,28 +1,13 @@
use anyhow::Context;
use anyhow::Result;
use app_test_support::McpProcess;
use app_test_support::create_apply_patch_sse_response;
use app_test_support::create_fake_rollout_with_text_elements;
use app_test_support::create_final_assistant_message_sse_response;
use app_test_support::create_mock_responses_server_repeating_assistant;
use app_test_support::create_mock_responses_server_sequence;
use app_test_support::create_shell_command_sse_response;
use app_test_support::rollout_path;
use app_test_support::to_response;
use chrono::Utc;
use codex_app_server_protocol::AskForApproval;
use codex_app_server_protocol::CommandExecutionApprovalDecision;
use codex_app_server_protocol::CommandExecutionRequestApprovalResponse;
use codex_app_server_protocol::FileChangeApprovalDecision;
use codex_app_server_protocol::FileChangeRequestApprovalResponse;
use codex_app_server_protocol::ItemStartedNotification;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCNotification;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::PatchApplyStatus;
use codex_app_server_protocol::PatchChangeKind;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::SessionSource;
use codex_app_server_protocol::ThreadItem;
use codex_app_server_protocol::ThreadResumeParams;
@@ -30,7 +15,6 @@ use codex_app_server_protocol::ThreadResumeResponse;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::ThreadStatus;
use codex_app_server_protocol::ThreadStatusChangedNotification;
use codex_app_server_protocol::TurnStartParams;
use codex_app_server_protocol::TurnStartResponse;
use codex_app_server_protocol::TurnStatus;
@@ -293,7 +277,7 @@ async fn thread_resume_keeps_in_flight_turn_streaming() -> Result<()> {
.await??;
timeout(
DEFAULT_READ_TIMEOUT,
wait_for_thread_status_active(&mut primary, &thread.id),
primary.read_stream_until_notification_message("turn/started"),
)
.await??;
@@ -400,7 +384,7 @@ async fn thread_resume_rejects_history_when_thread_is_running() -> Result<()> {
to_response::<TurnStartResponse>(running_turn_resp)?;
timeout(
DEFAULT_READ_TIMEOUT,
wait_for_thread_status_active(&mut primary, &thread_id),
primary.read_stream_until_notification_message("turn/started"),
)
.await??;
@@ -516,7 +500,7 @@ async fn thread_resume_rejects_mismatched_path_when_thread_is_running() -> Resul
to_response::<TurnStartResponse>(running_turn_resp)?;
timeout(
DEFAULT_READ_TIMEOUT,
wait_for_thread_status_active(&mut primary, &thread_id),
primary.read_stream_until_notification_message("turn/started"),
)
.await??;
@@ -619,7 +603,7 @@ async fn thread_resume_rejoins_running_thread_even_with_override_mismatch() -> R
.await??;
timeout(
DEFAULT_READ_TIMEOUT,
wait_for_thread_status_active(&mut primary, &thread.id),
primary.read_stream_until_notification_message("turn/started"),
)
.await??;
@@ -655,306 +639,6 @@ async fn thread_resume_rejoins_running_thread_even_with_override_mismatch() -> R
Ok(())
}
#[tokio::test]
async fn thread_resume_replays_pending_command_execution_request_approval() -> Result<()> {
let responses = vec![
create_final_assistant_message_sse_response("seeded")?,
create_shell_command_sse_response(
vec![
"python3".to_string(),
"-c".to_string(),
"print(42)".to_string(),
],
None,
Some(5000),
"call-1",
)?,
create_final_assistant_message_sse_response("done")?,
];
let server = create_mock_responses_server_sequence(responses).await;
let codex_home = TempDir::new()?;
create_config_toml(codex_home.path(), &server.uri())?;
let mut primary = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, primary.initialize()).await??;
let start_id = primary
.send_thread_start_request(ThreadStartParams {
model: Some("gpt-5.1-codex-max".to_string()),
..Default::default()
})
.await?;
let start_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(start_id)),
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
let seed_turn_id = primary
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![UserInput::Text {
text: "seed history".to_string(),
text_elements: Vec::new(),
}],
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(seed_turn_id)),
)
.await??;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_notification_message("turn/completed"),
)
.await??;
primary.clear_message_buffer();
let running_turn_id = primary
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![UserInput::Text {
text: "run command".to_string(),
text_elements: Vec::new(),
}],
approval_policy: Some(AskForApproval::UnlessTrusted),
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(running_turn_id)),
)
.await??;
let original_request = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_request_message(),
)
.await??;
let ServerRequest::CommandExecutionRequestApproval { .. } = &original_request else {
panic!("expected CommandExecutionRequestApproval request, got {original_request:?}");
};
let resume_id = primary
.send_thread_resume_request(ThreadResumeParams {
thread_id: thread.id.clone(),
..Default::default()
})
.await?;
let resume_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(resume_id)),
)
.await??;
let ThreadResumeResponse {
thread: resumed_thread,
..
} = to_response::<ThreadResumeResponse>(resume_resp)?;
assert_eq!(resumed_thread.id, thread.id);
assert!(
resumed_thread
.turns
.iter()
.any(|turn| matches!(turn.status, TurnStatus::InProgress))
);
let replayed_request = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_request_message(),
)
.await??;
pretty_assertions::assert_eq!(replayed_request, original_request);
let ServerRequest::CommandExecutionRequestApproval { request_id, .. } = replayed_request else {
panic!("expected CommandExecutionRequestApproval request");
};
primary
.send_response(
request_id,
serde_json::to_value(CommandExecutionRequestApprovalResponse {
decision: CommandExecutionApprovalDecision::Accept,
})?,
)
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_notification_message("turn/completed"),
)
.await??;
Ok(())
}
#[tokio::test]
async fn thread_resume_replays_pending_file_change_request_approval() -> Result<()> {
let tmp = TempDir::new()?;
let codex_home = tmp.path().join("codex_home");
std::fs::create_dir(&codex_home)?;
let workspace = tmp.path().join("workspace");
std::fs::create_dir(&workspace)?;
let patch = r#"*** Begin Patch
*** Add File: README.md
+new line
*** End Patch
"#;
let responses = vec![
create_final_assistant_message_sse_response("seeded")?,
create_apply_patch_sse_response(patch, "patch-call")?,
create_final_assistant_message_sse_response("done")?,
];
let server = create_mock_responses_server_sequence(responses).await;
create_config_toml(&codex_home, &server.uri())?;
let mut primary = McpProcess::new(&codex_home).await?;
timeout(DEFAULT_READ_TIMEOUT, primary.initialize()).await??;
let start_id = primary
.send_thread_start_request(ThreadStartParams {
model: Some("gpt-5.1-codex-max".to_string()),
cwd: Some(workspace.to_string_lossy().into_owned()),
..Default::default()
})
.await?;
let start_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(start_id)),
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
let seed_turn_id = primary
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![UserInput::Text {
text: "seed history".to_string(),
text_elements: Vec::new(),
}],
cwd: Some(workspace.clone()),
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(seed_turn_id)),
)
.await??;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_notification_message("turn/completed"),
)
.await??;
primary.clear_message_buffer();
let running_turn_id = primary
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![UserInput::Text {
text: "apply patch".to_string(),
text_elements: Vec::new(),
}],
cwd: Some(workspace.clone()),
approval_policy: Some(AskForApproval::UnlessTrusted),
..Default::default()
})
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(running_turn_id)),
)
.await??;
let original_started = timeout(DEFAULT_READ_TIMEOUT, async {
loop {
let notification = primary
.read_stream_until_notification_message("item/started")
.await?;
let started: ItemStartedNotification =
serde_json::from_value(notification.params.clone().expect("item/started params"))?;
if let ThreadItem::FileChange { .. } = started.item {
return Ok::<ThreadItem, anyhow::Error>(started.item);
}
}
})
.await??;
let expected_readme_path = workspace.join("README.md");
let expected_file_change = ThreadItem::FileChange {
id: "patch-call".to_string(),
changes: vec![codex_app_server_protocol::FileUpdateChange {
path: expected_readme_path.to_string_lossy().into_owned(),
kind: PatchChangeKind::Add,
diff: "new line\n".to_string(),
}],
status: PatchApplyStatus::InProgress,
};
assert_eq!(original_started, expected_file_change);
let original_request = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_request_message(),
)
.await??;
let ServerRequest::FileChangeRequestApproval { .. } = &original_request else {
panic!("expected FileChangeRequestApproval request, got {original_request:?}");
};
primary.clear_message_buffer();
let resume_id = primary
.send_thread_resume_request(ThreadResumeParams {
thread_id: thread.id.clone(),
..Default::default()
})
.await?;
let resume_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_response_message(RequestId::Integer(resume_id)),
)
.await??;
let ThreadResumeResponse {
thread: resumed_thread,
..
} = to_response::<ThreadResumeResponse>(resume_resp)?;
assert_eq!(resumed_thread.id, thread.id);
assert!(
resumed_thread
.turns
.iter()
.any(|turn| matches!(turn.status, TurnStatus::InProgress))
);
let replayed_request = timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_request_message(),
)
.await??;
assert_eq!(replayed_request, original_request);
let ServerRequest::FileChangeRequestApproval { request_id, .. } = replayed_request else {
panic!("expected FileChangeRequestApproval request");
};
primary
.send_response(
request_id,
serde_json::to_value(FileChangeRequestApprovalResponse {
decision: FileChangeApprovalDecision::Accept,
})?,
)
.await?;
timeout(
DEFAULT_READ_TIMEOUT,
primary.read_stream_until_notification_message("turn/completed"),
)
.await??;
Ok(())
}
#[tokio::test]
async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
@@ -1419,30 +1103,6 @@ required = true
)
}
async fn wait_for_thread_status_active(
mcp: &mut McpProcess,
thread_id: &str,
) -> Result<ThreadStatusChangedNotification> {
loop {
let status_changed_notif: JSONRPCNotification = mcp
.read_stream_until_notification_message("thread/status/changed")
.await?;
let status_changed_params = status_changed_notif
.params
.context("thread/status/changed params must be present")?;
let status_changed: ThreadStatusChangedNotification =
serde_json::from_value(status_changed_params)?;
if status_changed.thread_id == thread_id
&& status_changed.status
== (ThreadStatus::Active {
active_flags: Vec::new(),
})
{
return Ok(status_changed);
}
}
}
#[allow(dead_code)]
fn set_rollout_mtime(path: &Path, updated_at_rfc3339: &str) -> Result<()> {
let parsed = chrono::DateTime::parse_from_rfc3339(updated_at_rfc3339)?.with_timezone(&Utc);

View File

@@ -8,8 +8,6 @@ use app_test_support::to_response;
use codex_app_server_protocol::JSONRPCNotification;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::ServerRequestResolvedNotification;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::TurnCompletedNotification;
@@ -50,7 +48,7 @@ async fn turn_interrupt_aborts_running_turn() -> Result<()> {
"call_sleep",
)?])
.await;
create_config_toml(&codex_home, &server.uri(), "never")?;
create_config_toml(&codex_home, &server.uri())?;
let mut mcp = McpProcess::new(&codex_home).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
@@ -122,134 +120,15 @@ async fn turn_interrupt_aborts_running_turn() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn turn_interrupt_resolves_pending_command_approval_request() -> Result<()> {
#[cfg(target_os = "windows")]
let shell_command = vec![
"powershell".to_string(),
"-Command".to_string(),
"Start-Sleep -Seconds 10".to_string(),
];
#[cfg(not(target_os = "windows"))]
let shell_command = vec!["sleep".to_string(), "10".to_string()];
let tmp = TempDir::new()?;
let codex_home = tmp.path().join("codex_home");
std::fs::create_dir(&codex_home)?;
let working_directory = tmp.path().join("workdir");
std::fs::create_dir(&working_directory)?;
let server = create_mock_responses_server_sequence(vec![create_shell_command_sse_response(
shell_command.clone(),
Some(&working_directory),
Some(10_000),
"call_sleep_approval",
)?])
.await;
create_config_toml(&codex_home, &server.uri(), "untrusted")?;
let mut mcp = McpProcess::new(&codex_home).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let thread_req = mcp
.send_thread_start_request(ThreadStartParams {
model: Some("mock-model".to_string()),
..Default::default()
})
.await?;
let thread_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
)
.await??;
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
let turn_req = mcp
.send_turn_start_request(TurnStartParams {
thread_id: thread.id.clone(),
input: vec![V2UserInput::Text {
text: "run sleep".to_string(),
text_elements: Vec::new(),
}],
cwd: Some(working_directory),
..Default::default()
})
.await?;
let turn_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
)
.await??;
let TurnStartResponse { turn } = to_response::<TurnStartResponse>(turn_resp)?;
let request = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_request_message(),
)
.await??;
let ServerRequest::CommandExecutionRequestApproval { request_id, params } = request else {
panic!("expected CommandExecutionRequestApproval request");
};
assert_eq!(params.item_id, "call_sleep_approval");
assert_eq!(params.thread_id, thread.id);
assert_eq!(params.turn_id, turn.id);
let interrupt_id = mcp
.send_turn_interrupt_request(TurnInterruptParams {
thread_id: thread.id.clone(),
turn_id: turn.id.clone(),
})
.await?;
let interrupt_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(interrupt_id)),
)
.await??;
let _resp: TurnInterruptResponse = to_response::<TurnInterruptResponse>(interrupt_resp)?;
let resolved_notification = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("serverRequest/resolved"),
)
.await??;
let resolved: ServerRequestResolvedNotification = serde_json::from_value(
resolved_notification
.params
.clone()
.expect("serverRequest/resolved params must be present"),
)?;
assert_eq!(resolved.thread_id, thread.id);
assert_eq!(resolved.request_id, request_id);
let completed_notif: JSONRPCNotification = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
let completed: TurnCompletedNotification = serde_json::from_value(
completed_notif
.params
.expect("turn/completed params must be present"),
)?;
assert_eq!(completed.thread_id, thread.id);
assert_eq!(completed.turn.status, TurnStatus::Interrupted);
Ok(())
}
// Helper to create a config.toml pointing at the mock model server.
fn create_config_toml(
codex_home: &std::path::Path,
server_uri: &str,
approval_policy: &str,
) -> std::io::Result<()> {
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
std::fs::write(
config_toml,
format!(
r#"
model = "mock-model"
approval_policy = "{approval_policy}"
approval_policy = "never"
sandbox_mode = "danger-full-access"
model_provider = "mock_provider"

View File

@@ -22,14 +22,12 @@ use codex_app_server_protocol::FileChangeRequestApprovalResponse;
use codex_app_server_protocol::ItemCompletedNotification;
use codex_app_server_protocol::ItemStartedNotification;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCMessage;
use codex_app_server_protocol::JSONRPCNotification;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::PatchApplyStatus;
use codex_app_server_protocol::PatchChangeKind;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::ServerRequest;
use codex_app_server_protocol::ServerRequestResolvedNotification;
use codex_app_server_protocol::TextElement;
use codex_app_server_protocol::ThreadItem;
use codex_app_server_protocol::ThreadStartParams;
@@ -1073,7 +1071,6 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
panic!("expected CommandExecutionRequestApproval request");
};
assert_eq!(params.item_id, "call1");
let resolved_request_id = request_id.clone();
// Approve and wait for task completion
mcp.send_response(
@@ -1083,31 +1080,16 @@ async fn turn_start_exec_approval_toggle_v2() -> Result<()> {
})?,
)
.await?;
let mut saw_resolved = false;
loop {
let message = timeout(DEFAULT_READ_TIMEOUT, mcp.read_next_message()).await??;
let JSONRPCMessage::Notification(notification) = message else {
continue;
};
match notification.method.as_str() {
"serverRequest/resolved" => {
let resolved: ServerRequestResolvedNotification = serde_json::from_value(
notification
.params
.clone()
.expect("serverRequest/resolved params"),
)?;
assert_eq!(resolved.thread_id, thread.id);
assert_eq!(resolved.request_id, resolved_request_id);
saw_resolved = true;
}
"turn/completed" => {
assert!(saw_resolved, "serverRequest/resolved should arrive first");
break;
}
_ => {}
}
}
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("codex/event/task_complete"),
)
.await??;
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("turn/completed"),
)
.await??;
// Second turn with approval_policy=never should not elicit approval
let second_turn_id = mcp
@@ -1545,7 +1527,6 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
assert_eq!(params.item_id, "patch-call");
assert_eq!(params.thread_id, thread.id);
assert_eq!(params.turn_id, turn.id);
let resolved_request_id = request_id.clone();
let expected_readme_path = workspace.join("README.md");
let expected_readme_path = expected_readme_path.to_string_lossy().into_owned();
pretty_assertions::assert_eq!(
@@ -1564,49 +1545,18 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
})?,
)
.await?;
let mut saw_resolved = false;
let mut output_delta: Option<FileChangeOutputDeltaNotification> = None;
let mut completed_file_change: Option<ThreadItem> = None;
while !(output_delta.is_some() && completed_file_change.is_some()) {
let message = timeout(DEFAULT_READ_TIMEOUT, mcp.read_next_message()).await??;
let JSONRPCMessage::Notification(notification) = message else {
continue;
};
match notification.method.as_str() {
"serverRequest/resolved" => {
let resolved: ServerRequestResolvedNotification = serde_json::from_value(
notification
.params
.clone()
.expect("serverRequest/resolved params"),
)?;
assert_eq!(resolved.thread_id, thread.id);
assert_eq!(resolved.request_id, resolved_request_id);
saw_resolved = true;
}
"item/fileChange/outputDelta" => {
assert!(saw_resolved, "serverRequest/resolved should arrive first");
let notification: FileChangeOutputDeltaNotification = serde_json::from_value(
notification
.params
.clone()
.expect("item/fileChange/outputDelta params"),
)?;
output_delta = Some(notification);
}
"item/completed" => {
let completed: ItemCompletedNotification = serde_json::from_value(
notification.params.clone().expect("item/completed params"),
)?;
if let ThreadItem::FileChange { .. } = completed.item {
assert!(saw_resolved, "serverRequest/resolved should arrive first");
completed_file_change = Some(completed.item);
}
}
_ => {}
}
}
let output_delta = output_delta.expect("file change output delta should be observed");
let output_delta_notif = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("item/fileChange/outputDelta"),
)
.await??;
let output_delta: FileChangeOutputDeltaNotification = serde_json::from_value(
output_delta_notif
.params
.clone()
.expect("item/fileChange/outputDelta params"),
)?;
assert_eq!(output_delta.thread_id, thread.id);
assert_eq!(output_delta.turn_id, turn.id);
assert_eq!(output_delta.item_id, "patch-call");
@@ -1616,23 +1566,38 @@ async fn turn_start_file_change_approval_v2() -> Result<()> {
output_delta.delta
);
let completed_file_change =
completed_file_change.expect("file change completion should be observed");
let completed_file_change = timeout(DEFAULT_READ_TIMEOUT, async {
loop {
let completed_notif = mcp
.read_stream_until_notification_message("item/completed")
.await?;
let completed: ItemCompletedNotification = serde_json::from_value(
completed_notif
.params
.clone()
.expect("item/completed params"),
)?;
if let ThreadItem::FileChange { .. } = completed.item {
return Ok::<ThreadItem, anyhow::Error>(completed.item);
}
}
})
.await??;
let ThreadItem::FileChange { ref id, status, .. } = completed_file_change else {
unreachable!("loop ensures we break on file change items");
};
assert_eq!(id, "patch-call");
assert_eq!(status, PatchApplyStatus::Completed);
let readme_contents = std::fs::read_to_string(expected_readme_path)?;
assert_eq!(readme_contents, "new line\n");
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_notification_message("codex/event/task_complete"),
)
.await??;
let readme_contents = std::fs::read_to_string(expected_readme_path)?;
assert_eq!(readme_contents, "new line\n");
Ok(())
}

View File

@@ -1147,7 +1147,6 @@ mod tests {
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["rm"]),
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
}],
}

View File

@@ -1390,7 +1390,6 @@ prefix_rules = [
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["rm"]),
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
}],
}
@@ -1416,7 +1415,6 @@ prefix_rules = [
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "status"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
}],
}
@@ -1428,7 +1426,6 @@ prefix_rules = [
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["hg", "status"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
}],
}
@@ -1512,7 +1509,6 @@ prefix_rules = []
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["rm".to_string()],
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
}],
}
@@ -1551,7 +1547,6 @@ prefix_rules = []
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["rm".to_string()],
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
}],
}
@@ -1566,7 +1561,6 @@ prefix_rules = []
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["git".to_string(), "push".to_string()],
decision: Decision::Prompt,
resolved_program: None,
justification: None,
}],
}

View File

@@ -472,7 +472,17 @@ pub async fn load_exec_policy(config_stack: &ConfigLayerStack) -> Result<Policy,
return Ok(policy);
};
Ok(policy.merge_overlay(requirements_policy.as_ref()))
let mut combined_rules = policy.rules().clone();
for (program, rules) in requirements_policy.as_ref().rules().iter_all() {
for rule in rules {
combined_rules.insert(program.clone(), rule.clone());
}
}
let mut combined_network_rules = policy.network_rules().to_vec();
combined_network_rules.extend(requirements_policy.as_ref().network_rules().iter().cloned());
Ok(Policy::from_parts(combined_rules, combined_network_rules))
}
/// If a command is not matched by any execpolicy rule, derive a [`Decision`].
@@ -817,7 +827,6 @@ mod tests {
use pretty_assertions::assert_eq;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use tempfile::tempdir;
use toml::Value as TomlValue;
@@ -837,22 +846,6 @@ mod tests {
.expect("ConfigLayerStack")
}
fn host_absolute_path(segments: &[&str]) -> String {
let mut path = if cfg!(windows) {
PathBuf::from(r"C:\")
} else {
PathBuf::from("/")
};
for segment in segments {
path.push(segment);
}
path.to_string_lossy().into_owned()
}
fn starlark_string(value: &str) -> String {
value.replace('\\', "\\\\").replace('"', "\\\"")
}
#[tokio::test]
async fn returns_empty_policy_when_no_policy_files_exist() {
let temp_dir = tempdir().expect("create temp dir");
@@ -956,7 +949,6 @@ mod tests {
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["rm".to_string()],
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
}],
},
@@ -999,59 +991,6 @@ mod tests {
Ok(())
}
#[tokio::test]
async fn preserves_host_executables_when_requirements_overlay_is_present() -> anyhow::Result<()>
{
let temp_dir = tempdir()?;
let policy_dir = temp_dir.path().join(RULES_DIR_NAME);
fs::create_dir_all(&policy_dir)?;
let git_path = host_absolute_path(&["usr", "bin", "git"]);
let git_path_literal = starlark_string(&git_path);
fs::write(
policy_dir.join("host.rules"),
format!(
r#"
host_executable(name = "git", paths = ["{git_path_literal}"])
"#
),
)?;
let mut requirements_exec_policy = Policy::empty();
requirements_exec_policy.add_network_rule(
"blocked.example.com",
codex_execpolicy::NetworkRuleProtocol::Https,
Decision::Forbidden,
None,
)?;
let requirements = ConfigRequirements {
exec_policy: Some(codex_config::Sourced::new(
codex_config::RequirementsExecPolicy::new(requirements_exec_policy),
codex_config::RequirementSource::Unknown,
)),
..ConfigRequirements::default()
};
let dot_codex_folder = AbsolutePathBuf::from_absolute_path(temp_dir.path())?;
let layer = ConfigLayerEntry::new(
ConfigLayerSource::Project { dot_codex_folder },
TomlValue::Table(Default::default()),
);
let config_stack =
ConfigLayerStack::new(vec![layer], requirements, ConfigRequirementsToml::default())?;
let policy = load_exec_policy(&config_stack).await?;
assert_eq!(
policy
.host_executables()
.get("git")
.expect("missing git host executable")
.as_ref(),
[AbsolutePathBuf::try_from(git_path)?]
);
Ok(())
}
#[tokio::test]
async fn ignores_policies_outside_policy_dir() {
let temp_dir = tempdir().expect("create temp dir");
@@ -1167,7 +1106,6 @@ host_executable(name = "git", paths = ["{git_path_literal}"])
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["rm".to_string()],
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
}],
},
@@ -1179,7 +1117,6 @@ host_executable(name = "git", paths = ["{git_path_literal}"])
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["ls".to_string()],
decision: Decision::Prompt,
resolved_program: None,
justification: None,
}],
},
@@ -2046,7 +1983,6 @@ prefix_rule(
let matched_rules_prompt = vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["cargo".to_string()],
decision: Decision::Prompt,
resolved_program: None,
justification: None,
}];
assert_eq!(
@@ -2060,7 +1996,6 @@ prefix_rule(
let matched_rules_allow = vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["cargo".to_string()],
decision: Decision::Allow,
resolved_program: None,
justification: None,
}];
assert_eq!(
@@ -2074,7 +2009,6 @@ prefix_rule(
let matched_rules_forbidden = vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec!["cargo".to_string()],
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
}];
assert_eq!(

View File

@@ -19,7 +19,6 @@ workspace = true
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true, features = ["derive"] }
codex-utils-absolute-path = { workspace = true }
multimap = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }

View File

@@ -2,12 +2,11 @@
## Overview
- Policy engine and CLI built around `prefix_rule(pattern=[...], decision?, justification?, match?, not_match?)` plus `host_executable(name=..., paths=[...])`.
- This release covers the prefix-rule subset of the execpolicy language plus host executable metadata; a richer language will follow.
- Policy engine and CLI built around `prefix_rule(pattern=[...], decision?, justification?, match?, not_match?)`.
- This release covers the prefix-rule subset of the execpolicy language; a richer language will follow.
- Tokens are matched in order; any `pattern` element may be a list to denote alternatives. `decision` defaults to `allow`; valid values: `allow`, `prompt`, `forbidden`.
- `justification` is an optional human-readable rationale for why a rule exists. It can be provided for any `decision` and may be surfaced in different contexts (for example, in approval prompts or rejection messages). When `decision = "forbidden"` is used, include a recommended alternative in the `justification`, when appropriate (e.g., ``"Use `jj` instead of `git`."``).
- `match` / `not_match` supply example invocations that are validated at load time (think of them as unit tests); examples can be token arrays or strings (strings are tokenized with `shlex`).
- Prefer token arrays when the exact tokens matter, such as Windows absolute paths (`C:\...\git.exe`) or shell-specific quoting; string examples are shorthand for simple cases like `git status`.
- The CLI always prints the JSON serialization of the evaluation result.
- The legacy rule matcher lives in `codex-execpolicy-legacy`.
@@ -20,31 +19,11 @@ prefix_rule(
pattern = ["cmd", ["alt1", "alt2"]], # ordered tokens; list entries denote alternatives
decision = "prompt", # allow | prompt | forbidden; defaults to allow
justification = "explain why this rule exists",
match = [["cmd", "alt1"], "cmd alt2"], # token arrays are the exact, lossless form
not_match = [["cmd", "oops"], "cmd alt3"], # strings are convenient shlex shorthand
match = [["cmd", "alt1"], "cmd alt2"], # examples that must match this rule
not_match = [["cmd", "oops"], "cmd alt3"], # examples that must not match this rule
)
```
- Host executable metadata can optionally constrain which absolute paths may
resolve through basename rules:
```starlark
host_executable(
name = "git",
paths = [
"/opt/homebrew/bin/git",
"/usr/bin/git",
],
)
```
- Matching semantics:
- execpolicy always tries exact first-token matches first.
- With host-executable resolution disabled, `/usr/bin/git status` only matches a rule whose first token is `/usr/bin/git`.
- With host-executable resolution enabled, if no exact rule matches, execpolicy may fall back from `/usr/bin/git` to basename rules for `git`.
- If `host_executable(name="git", ...)` exists, basename fallback is only allowed for listed absolute paths.
- If no `host_executable()` entry exists for a basename, basename fallback is allowed.
## CLI
- From the Codex CLI, run `codex execpolicy check` subcommand with one or more policy files (for example `src/default.rules`) to check a command:
@@ -53,15 +32,6 @@ host_executable(
codex execpolicy check --rules path/to/policy.rules git status
```
- To opt into basename fallback for absolute program paths, pass `--resolve-host-executables`:
```bash
codex execpolicy check \
--rules path/to/policy.rules \
--resolve-host-executables \
/usr/bin/git status
```
- Pass multiple `--rules` flags to merge rules, evaluated in the order provided, and use `--pretty` for formatted JSON.
- You can also run the standalone dev binary directly during development:
@@ -82,7 +52,6 @@ cargo run -p codex-execpolicy -- check --rules path/to/policy.rules git status
"prefixRuleMatch": {
"matchedPrefix": ["<token>", "..."],
"decision": "allow|prompt|forbidden",
"resolvedProgram": "/absolute/path/to/program",
"justification": "..."
}
}
@@ -93,7 +62,6 @@ cargo run -p codex-execpolicy -- check --rules path/to/policy.rules git status
- When no rules match, `matchedRules` is an empty array and `decision` is omitted.
- `matchedRules` lists every rule whose prefix matched the command; `matchedPrefix` is the exact prefix that matched.
- `resolvedProgram` is omitted unless an absolute executable path matched via basename fallback.
- The effective `decision` is the strictest severity across all matches (`forbidden` > `prompt` > `allow`).
Note: `execpolicy` commands are still in preview. The API may have breaking changes in the future.

View File

@@ -38,47 +38,16 @@ pub enum Error {
ExampleDidNotMatch {
rules: Vec<String>,
examples: Vec<String>,
location: Option<ErrorLocation>,
},
#[error("expected example to not match rule `{rule}`: {example}")]
ExampleDidMatch {
rule: String,
example: String,
location: Option<ErrorLocation>,
},
ExampleDidMatch { rule: String, example: String },
#[error("starlark error: {0}")]
Starlark(StarlarkError),
}
impl Error {
pub fn with_location(self, location: ErrorLocation) -> Self {
match self {
Error::ExampleDidNotMatch {
rules,
examples,
location: None,
} => Error::ExampleDidNotMatch {
rules,
examples,
location: Some(location),
},
Error::ExampleDidMatch {
rule,
example,
location: None,
} => Error::ExampleDidMatch {
rule,
example,
location: Some(location),
},
other => other,
}
}
pub fn location(&self) -> Option<ErrorLocation> {
match self {
Error::ExampleDidNotMatch { location, .. }
| Error::ExampleDidMatch { location, .. } => location.clone(),
Error::Starlark(err) => err.span().map(|span| {
let resolved = span.resolve_span();
ErrorLocation {

View File

@@ -7,7 +7,6 @@ use clap::Parser;
use serde::Serialize;
use crate::Decision;
use crate::MatchOptions;
use crate::Policy;
use crate::PolicyParser;
use crate::RuleMatch;
@@ -23,11 +22,6 @@ pub struct ExecPolicyCheckCommand {
#[arg(long)]
pub pretty: bool,
/// Resolve absolute program paths against basename rules, gated by any
/// `host_executable()` definitions in the loaded policy files.
#[arg(long)]
pub resolve_host_executables: bool,
/// Command tokens to check against the policy.
#[arg(
value_name = "COMMAND",
@@ -42,13 +36,7 @@ impl ExecPolicyCheckCommand {
/// Load the policies for this command, evaluate the command, and render JSON output.
pub fn run(&self) -> Result<()> {
let policy = load_policies(&self.rules)?;
let matched_rules = policy.matches_for_command_with_options(
&self.command,
None,
&MatchOptions {
resolve_host_executables: self.resolve_host_executables,
},
);
let matched_rules = policy.matches_for_command(&self.command, None);
let json = format_matches_json(&matched_rules, self.pretty)?;
println!("{json}");

View File

@@ -1,29 +0,0 @@
use std::path::Path;
#[cfg(windows)]
const WINDOWS_EXECUTABLE_SUFFIXES: [&str; 4] = [".exe", ".cmd", ".bat", ".com"];
pub(crate) fn executable_lookup_key(raw: &str) -> String {
#[cfg(windows)]
{
let raw = raw.to_ascii_lowercase();
for suffix in WINDOWS_EXECUTABLE_SUFFIXES {
if raw.ends_with(suffix) {
let stripped_len = raw.len() - suffix.len();
return raw[..stripped_len].to_string();
}
}
raw
}
#[cfg(not(windows))]
{
raw.to_string()
}
}
pub(crate) fn executable_path_lookup_key(path: &Path) -> Option<String> {
path.file_name()
.and_then(|name| name.to_str())
.map(executable_lookup_key)
}

View File

@@ -2,7 +2,6 @@ pub mod amend;
pub mod decision;
pub mod error;
pub mod execpolicycheck;
mod executable_name;
pub mod parser;
pub mod policy;
pub mod rule;
@@ -19,7 +18,6 @@ pub use error::TextRange;
pub use execpolicycheck::ExecPolicyCheckCommand;
pub use parser::PolicyParser;
pub use policy::Evaluation;
pub use policy::MatchOptions;
pub use policy::Policy;
pub use rule::NetworkRuleProtocol;
pub use rule::Rule;

View File

@@ -1,8 +1,6 @@
use codex_utils_absolute_path::AbsolutePathBuf;
use multimap::MultiMap;
use shlex;
use starlark::any::ProvidesStaticType;
use starlark::codemap::FileSpan;
use starlark::environment::GlobalsBuilder;
use starlark::environment::Module;
use starlark::eval::Evaluator;
@@ -15,18 +13,11 @@ use starlark::values::list::UnpackList;
use starlark::values::none::NoneType;
use std::cell::RefCell;
use std::cell::RefMut;
use std::collections::HashMap;
use std::path::Path;
use std::sync::Arc;
use crate::decision::Decision;
use crate::error::Error;
use crate::error::ErrorLocation;
use crate::error::Result;
use crate::error::TextPosition;
use crate::error::TextRange;
use crate::executable_name::executable_lookup_key;
use crate::executable_name::executable_path_lookup_key;
use crate::rule::NetworkRule;
use crate::rule::NetworkRuleProtocol;
use crate::rule::PatternToken;
@@ -56,7 +47,6 @@ impl PolicyParser {
/// Parses a policy, tagging parser errors with `policy_identifier` so failures include the
/// identifier alongside line numbers.
pub fn parse(&mut self, policy_identifier: &str, policy_file_contents: &str) -> Result<()> {
let pending_validation_count = self.builder.borrow().pending_example_validations.len();
let mut dialect = Dialect::Extended.clone();
dialect.enable_f_strings = true;
let ast = AstModule::parse(
@@ -72,9 +62,6 @@ impl PolicyParser {
eval.extra = Some(&self.builder);
eval.eval_module(ast, &globals).map_err(Error::Starlark)?;
}
self.builder
.borrow()
.validate_pending_examples_from(pending_validation_count)?;
Ok(())
}
@@ -87,8 +74,6 @@ impl PolicyParser {
struct PolicyBuilder {
rules_by_program: MultiMap<String, RuleRef>,
network_rules: Vec<NetworkRule>,
host_executables_by_name: HashMap<String, Arc<[AbsolutePathBuf]>>,
pending_example_validations: Vec<PendingExampleValidation>,
}
impl PolicyBuilder {
@@ -96,8 +81,6 @@ impl PolicyBuilder {
Self {
rules_by_program: MultiMap::new(),
network_rules: Vec::new(),
host_executables_by_name: HashMap::new(),
pending_example_validations: Vec::new(),
}
}
@@ -110,64 +93,11 @@ impl PolicyBuilder {
self.network_rules.push(rule);
}
fn add_host_executable(&mut self, name: String, paths: Vec<AbsolutePathBuf>) {
self.host_executables_by_name.insert(name, paths.into());
}
fn add_pending_example_validation(
&mut self,
rules: Vec<RuleRef>,
matches: Vec<Vec<String>>,
not_matches: Vec<Vec<String>>,
location: Option<ErrorLocation>,
) {
self.pending_example_validations
.push(PendingExampleValidation {
rules,
matches,
not_matches,
location,
});
}
fn validate_pending_examples_from(&self, start: usize) -> Result<()> {
for validation in &self.pending_example_validations[start..] {
let mut rules_by_program = MultiMap::new();
for rule in &validation.rules {
rules_by_program.insert(rule.program().to_string(), rule.clone());
}
let policy = crate::policy::Policy::from_parts(
rules_by_program,
Vec::new(),
self.host_executables_by_name.clone(),
);
validate_not_match_examples(&policy, &validation.rules, &validation.not_matches)
.map_err(|error| attach_validation_location(error, validation.location.clone()))?;
validate_match_examples(&policy, &validation.rules, &validation.matches)
.map_err(|error| attach_validation_location(error, validation.location.clone()))?;
}
Ok(())
}
fn build(self) -> crate::policy::Policy {
crate::policy::Policy::from_parts(
self.rules_by_program,
self.network_rules,
self.host_executables_by_name,
)
crate::policy::Policy::from_parts(self.rules_by_program, self.network_rules)
}
}
#[derive(Debug)]
struct PendingExampleValidation {
rules: Vec<RuleRef>,
matches: Vec<Vec<String>>,
not_matches: Vec<Vec<String>>,
location: Option<ErrorLocation>,
}
fn parse_pattern<'v>(pattern: UnpackList<Value<'v>>) -> Result<Vec<PatternToken>> {
let tokens: Vec<PatternToken> = pattern
.items
@@ -220,36 +150,6 @@ fn parse_examples<'v>(examples: UnpackList<Value<'v>>) -> Result<Vec<Vec<String>
examples.items.into_iter().map(parse_example).collect()
}
fn parse_literal_absolute_path(raw: &str) -> Result<AbsolutePathBuf> {
if !Path::new(raw).is_absolute() {
return Err(Error::InvalidRule(format!(
"host_executable paths must be absolute (got {raw})"
)));
}
AbsolutePathBuf::try_from(raw.to_string())
.map_err(|error| Error::InvalidRule(format!("invalid absolute path `{raw}`: {error}")))
}
fn validate_host_executable_name(name: &str) -> Result<()> {
if name.is_empty() {
return Err(Error::InvalidRule(
"host_executable name cannot be empty".to_string(),
));
}
let path = Path::new(name);
if path.components().count() != 1
|| path.file_name().and_then(|value| value.to_str()) != Some(name)
{
return Err(Error::InvalidRule(format!(
"host_executable name must be a bare executable name (got {name})"
)));
}
Ok(())
}
fn parse_network_rule_decision(raw: &str) -> Result<Decision> {
match raw {
"deny" => Ok(Decision::Forbidden),
@@ -257,30 +157,6 @@ fn parse_network_rule_decision(raw: &str) -> Result<Decision> {
}
}
fn error_location_from_file_span(span: FileSpan) -> ErrorLocation {
let resolved = span.resolve_span();
ErrorLocation {
path: span.filename().to_string(),
range: TextRange {
start: TextPosition {
line: resolved.begin.line + 1,
column: resolved.begin.column + 1,
},
end: TextPosition {
line: resolved.end.line + 1,
column: resolved.end.column + 1,
},
},
}
}
fn attach_validation_location(error: Error, location: Option<ErrorLocation>) -> Error {
match location {
Some(location) => error.with_location(location),
None => error,
}
}
fn parse_example<'v>(value: Value<'v>) -> Result<Vec<String>> {
if let Some(raw) = value.unpack_str() {
parse_string_example(raw)
@@ -375,9 +251,6 @@ fn policy_builtins(builder: &mut GlobalsBuilder) {
.map(parse_examples)
.transpose()?
.unwrap_or_default();
let location = eval
.call_stack_top_location()
.map(error_location_from_file_span);
let mut builder = policy_builder(eval);
@@ -402,7 +275,9 @@ fn policy_builtins(builder: &mut GlobalsBuilder) {
})
.collect();
builder.add_pending_example_validation(rules.clone(), matches, not_matches, location);
validate_not_match_examples(&rules, &not_matches)?;
validate_match_examples(&rules, &matches)?;
rules.into_iter().for_each(|rule| builder.add_rule(rule));
Ok(NoneType)
}
@@ -433,41 +308,4 @@ fn policy_builtins(builder: &mut GlobalsBuilder) {
});
Ok(NoneType)
}
fn host_executable<'v>(
name: &'v str,
paths: UnpackList<Value<'v>>,
eval: &mut Evaluator<'v, '_, '_>,
) -> anyhow::Result<NoneType> {
validate_host_executable_name(name)?;
let mut parsed_paths = Vec::new();
for value in paths.items {
let raw = value.unpack_str().ok_or_else(|| {
Error::InvalidRule(format!(
"host_executable paths must be strings (got {})",
value.get_type()
))
})?;
let path = parse_literal_absolute_path(raw)?;
let Some(path_name) = executable_path_lookup_key(path.as_path()) else {
return Err(Error::InvalidRule(format!(
"host_executable path `{raw}` must have basename `{name}`"
))
.into());
};
if path_name != executable_lookup_key(name) {
return Err(Error::InvalidRule(format!(
"host_executable path `{raw}` must have basename `{name}`"
))
.into());
}
if !parsed_paths.iter().any(|existing| existing == &path) {
parsed_paths.push(path);
}
}
policy_builder(eval).add_host_executable(executable_lookup_key(name), parsed_paths);
Ok(NoneType)
}
}

View File

@@ -1,7 +1,6 @@
use crate::decision::Decision;
use crate::error::Error;
use crate::error::Result;
use crate::executable_name::executable_path_lookup_key;
use crate::rule::NetworkRule;
use crate::rule::NetworkRuleProtocol;
use crate::rule::PatternToken;
@@ -10,41 +9,31 @@ use crate::rule::PrefixRule;
use crate::rule::RuleMatch;
use crate::rule::RuleRef;
use crate::rule::normalize_network_rule_host;
use codex_utils_absolute_path::AbsolutePathBuf;
use multimap::MultiMap;
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
use std::sync::Arc;
type HeuristicsFallback<'a> = Option<&'a dyn Fn(&[String]) -> Decision>;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct MatchOptions {
pub resolve_host_executables: bool,
}
#[derive(Clone, Debug)]
pub struct Policy {
rules_by_program: MultiMap<String, RuleRef>,
network_rules: Vec<NetworkRule>,
host_executables_by_name: HashMap<String, Arc<[AbsolutePathBuf]>>,
}
impl Policy {
pub fn new(rules_by_program: MultiMap<String, RuleRef>) -> Self {
Self::from_parts(rules_by_program, Vec::new(), HashMap::new())
Self::from_parts(rules_by_program, Vec::new())
}
pub fn from_parts(
rules_by_program: MultiMap<String, RuleRef>,
network_rules: Vec<NetworkRule>,
host_executables_by_name: HashMap<String, Arc<[AbsolutePathBuf]>>,
) -> Self {
Self {
rules_by_program,
network_rules,
host_executables_by_name,
}
}
@@ -60,10 +49,6 @@ impl Policy {
&self.network_rules
}
pub fn host_executables(&self) -> &HashMap<String, Arc<[AbsolutePathBuf]>> {
&self.host_executables_by_name
}
pub fn get_allowed_prefixes(&self) -> Vec<Vec<String>> {
let mut prefixes = Vec::new();
@@ -134,36 +119,6 @@ impl Policy {
Ok(())
}
pub fn set_host_executable_paths(&mut self, name: String, paths: Vec<AbsolutePathBuf>) {
self.host_executables_by_name.insert(name, paths.into());
}
pub fn merge_overlay(&self, overlay: &Policy) -> Policy {
let mut combined_rules = self.rules_by_program.clone();
for (program, rules) in overlay.rules_by_program.iter_all() {
for rule in rules {
combined_rules.insert(program.clone(), rule.clone());
}
}
let mut combined_network_rules = self.network_rules.clone();
combined_network_rules.extend(overlay.network_rules.iter().cloned());
let mut host_executables_by_name = self.host_executables_by_name.clone();
host_executables_by_name.extend(
overlay
.host_executables_by_name
.iter()
.map(|(name, paths)| (name.clone(), paths.clone())),
);
Policy::from_parts(
combined_rules,
combined_network_rules,
host_executables_by_name,
)
}
pub fn compiled_network_domains(&self) -> (Vec<String>, Vec<String>) {
let mut allowed = Vec::new();
let mut denied = Vec::new();
@@ -189,25 +144,7 @@ impl Policy {
where
F: Fn(&[String]) -> Decision,
{
let matched_rules = self.matches_for_command_with_options(
cmd,
Some(heuristics_fallback),
&MatchOptions::default(),
);
Evaluation::from_matches(matched_rules)
}
pub fn check_with_options<F>(
&self,
cmd: &[String],
heuristics_fallback: &F,
options: &MatchOptions,
) -> Evaluation
where
F: Fn(&[String]) -> Decision,
{
let matched_rules =
self.matches_for_command_with_options(cmd, Some(heuristics_fallback), options);
let matched_rules = self.matches_for_command(cmd, Some(heuristics_fallback));
Evaluation::from_matches(matched_rules)
}
@@ -217,20 +154,6 @@ impl Policy {
commands: Commands,
heuristics_fallback: &F,
) -> Evaluation
where
Commands: IntoIterator,
Commands::Item: AsRef<[String]>,
F: Fn(&[String]) -> Decision,
{
self.check_multiple_with_options(commands, heuristics_fallback, &MatchOptions::default())
}
pub fn check_multiple_with_options<Commands, F>(
&self,
commands: Commands,
heuristics_fallback: &F,
options: &MatchOptions,
) -> Evaluation
where
Commands: IntoIterator,
Commands::Item: AsRef<[String]>,
@@ -239,11 +162,7 @@ impl Policy {
let matched_rules: Vec<RuleMatch> = commands
.into_iter()
.flat_map(|command| {
self.matches_for_command_with_options(
command.as_ref(),
Some(heuristics_fallback),
options,
)
self.matches_for_command(command.as_ref(), Some(heuristics_fallback))
})
.collect();
@@ -262,25 +181,14 @@ impl Policy {
cmd: &[String],
heuristics_fallback: HeuristicsFallback<'_>,
) -> Vec<RuleMatch> {
self.matches_for_command_with_options(cmd, heuristics_fallback, &MatchOptions::default())
}
pub fn matches_for_command_with_options(
&self,
cmd: &[String],
heuristics_fallback: HeuristicsFallback<'_>,
options: &MatchOptions,
) -> Vec<RuleMatch> {
let matched_rules = self
.match_exact_rules(cmd)
.filter(|matched_rules| !matched_rules.is_empty())
.or_else(|| {
options
.resolve_host_executables
.then(|| self.match_host_executable_rules(cmd))
.filter(|matched_rules| !matched_rules.is_empty())
})
.unwrap_or_default();
let matched_rules: Vec<RuleMatch> = match cmd.first() {
Some(first) => self
.rules_by_program
.get_vec(first)
.map(|rules| rules.iter().filter_map(|rule| rule.matches(cmd)).collect())
.unwrap_or_default(),
None => Vec::new(),
};
if matched_rules.is_empty()
&& let Some(heuristics_fallback) = heuristics_fallback
@@ -293,45 +201,6 @@ impl Policy {
matched_rules
}
}
fn match_exact_rules(&self, cmd: &[String]) -> Option<Vec<RuleMatch>> {
let first = cmd.first()?;
Some(
self.rules_by_program
.get_vec(first)
.map(|rules| rules.iter().filter_map(|rule| rule.matches(cmd)).collect())
.unwrap_or_default(),
)
}
fn match_host_executable_rules(&self, cmd: &[String]) -> Vec<RuleMatch> {
let Some(first) = cmd.first() else {
return Vec::new();
};
let Ok(program) = AbsolutePathBuf::try_from(first.clone()) else {
return Vec::new();
};
let Some(basename) = executable_path_lookup_key(program.as_path()) else {
return Vec::new();
};
let Some(rules) = self.rules_by_program.get_vec(&basename) else {
return Vec::new();
};
if let Some(paths) = self.host_executables_by_name.get(&basename)
&& !paths.iter().any(|path| path == &program)
{
return Vec::new();
}
let basename_command = std::iter::once(basename)
.chain(cmd.iter().skip(1).cloned())
.collect::<Vec<_>>();
rules
.iter()
.filter_map(|rule| rule.matches(&basename_command))
.map(|rule_match| rule_match.with_resolved_program(&program))
.collect()
}
}
fn upsert_domain(entries: &mut Vec<String>, host: &str) {

View File

@@ -1,9 +1,6 @@
use crate::decision::Decision;
use crate::error::Error;
use crate::error::Result;
use crate::policy::MatchOptions;
use crate::policy::Policy;
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
use serde::Serialize;
use shlex::try_join;
@@ -66,8 +63,6 @@ pub enum RuleMatch {
#[serde(rename = "matchedPrefix")]
matched_prefix: Vec<String>,
decision: Decision,
#[serde(rename = "resolvedProgram", skip_serializing_if = "Option::is_none")]
resolved_program: Option<AbsolutePathBuf>,
/// Optional rationale for why this rule exists.
///
/// This can be supplied for any decision and may be surfaced in different contexts
@@ -88,23 +83,6 @@ impl RuleMatch {
Self::HeuristicsRuleMatch { decision, .. } => *decision,
}
}
pub fn with_resolved_program(self, resolved_program: &AbsolutePathBuf) -> Self {
match self {
Self::PrefixRuleMatch {
matched_prefix,
decision,
justification,
..
} => Self::PrefixRuleMatch {
matched_prefix,
decision,
resolved_program: Some(resolved_program.clone()),
justification,
},
other => other,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -232,7 +210,6 @@ impl Rule for PrefixRule {
.map(|matched_prefix| RuleMatch::PrefixRuleMatch {
matched_prefix,
decision: self.decision,
resolved_program: None,
justification: self.justification.clone(),
})
}
@@ -243,21 +220,11 @@ impl Rule for PrefixRule {
}
/// Count how many rules match each provided example and error if any example is unmatched.
pub(crate) fn validate_match_examples(
policy: &Policy,
rules: &[RuleRef],
matches: &[Vec<String>],
) -> Result<()> {
pub(crate) fn validate_match_examples(rules: &[RuleRef], matches: &[Vec<String>]) -> Result<()> {
let mut unmatched_examples = Vec::new();
let options = MatchOptions {
resolve_host_executables: true,
};
for example in matches {
if !policy
.matches_for_command_with_options(example, None, &options)
.is_empty()
{
if rules.iter().any(|rule| rule.matches(example).is_some()) {
continue;
}
@@ -273,31 +240,21 @@ pub(crate) fn validate_match_examples(
Err(Error::ExampleDidNotMatch {
rules: rules.iter().map(|rule| format!("{rule:?}")).collect(),
examples: unmatched_examples,
location: None,
})
}
}
/// Ensure that no rule matches any provided negative example.
pub(crate) fn validate_not_match_examples(
policy: &Policy,
_rules: &[RuleRef],
rules: &[RuleRef],
not_matches: &[Vec<String>],
) -> Result<()> {
let options = MatchOptions {
resolve_host_executables: true,
};
for example in not_matches {
if let Some(rule) = policy
.matches_for_command_with_options(example, None, &options)
.first()
{
if let Some(rule) = rules.iter().find(|rule| rule.matches(example).is_some()) {
return Err(Error::ExampleDidMatch {
rule: format!("{rule:?}"),
example: try_join(example.iter().map(String::as_str))
.unwrap_or_else(|_| "unable to render example".to_string()),
location: None,
});
}
}

View File

@@ -1,6 +1,5 @@
use std::any::Any;
use std::fs;
use std::path::PathBuf;
use std::sync::Arc;
use anyhow::Context;
@@ -8,7 +7,6 @@ use anyhow::Result;
use codex_execpolicy::Decision;
use codex_execpolicy::Error;
use codex_execpolicy::Evaluation;
use codex_execpolicy::MatchOptions;
use codex_execpolicy::NetworkRuleProtocol;
use codex_execpolicy::Policy;
use codex_execpolicy::PolicyParser;
@@ -18,7 +16,6 @@ use codex_execpolicy::blocking_append_allow_prefix_rule;
use codex_execpolicy::rule::PatternToken;
use codex_execpolicy::rule::PrefixPattern;
use codex_execpolicy::rule::PrefixRule;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
@@ -34,35 +31,6 @@ fn prompt_all(_: &[String]) -> Decision {
Decision::Prompt
}
fn absolute_path(path: &str) -> AbsolutePathBuf {
AbsolutePathBuf::try_from(path.to_string())
.unwrap_or_else(|error| panic!("expected absolute path `{path}`: {error}"))
}
fn host_absolute_path(segments: &[&str]) -> String {
let mut path = if cfg!(windows) {
PathBuf::from(r"C:\")
} else {
PathBuf::from("/")
};
for segment in segments {
path.push(segment);
}
path.to_string_lossy().into_owned()
}
fn host_executable_name(name: &str) -> String {
if cfg!(windows) {
format!("{name}.exe")
} else {
name.to_string()
}
}
fn starlark_string(value: &str) -> String {
value.replace('\\', "\\\\").replace('"', "\\\"")
}
#[derive(Clone, Debug, Eq, PartialEq)]
enum RuleSnapshot {
Prefix(PrefixRule),
@@ -157,7 +125,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "status"]),
decision: Decision::Allow,
resolved_program: None,
justification: None,
}],
},
@@ -189,7 +156,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["rm"]),
decision: Decision::Forbidden,
resolved_program: None,
justification: Some("destructive command".to_string()),
}],
},
@@ -218,7 +184,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["ls"]),
decision: Decision::Allow,
resolved_program: None,
justification: Some("safe and commonly used".to_string()),
}],
},
@@ -271,7 +236,6 @@ fn add_prefix_rule_extends_policy() -> Result<()> {
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["ls", "-l"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
}],
},
@@ -341,7 +305,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
}],
},
@@ -356,13 +319,11 @@ prefix_rule(
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
},
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "commit"]),
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
},
],
@@ -420,7 +381,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["bash", "-c"]),
decision: Decision::Allow,
resolved_program: None,
justification: None,
}],
},
@@ -434,7 +394,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["sh", "-l"]),
decision: Decision::Allow,
resolved_program: None,
justification: None,
}],
},
@@ -481,7 +440,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["npm", "i", "--legacy-peer-deps"]),
decision: Decision::Allow,
resolved_program: None,
justification: None,
}],
},
@@ -498,7 +456,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["npm", "install", "--no-save"]),
decision: Decision::Allow,
resolved_program: None,
justification: None,
}],
},
@@ -529,7 +486,6 @@ prefix_rule(
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "status"]),
decision: Decision::Allow,
resolved_program: None,
justification: None,
}],
},
@@ -577,13 +533,11 @@ prefix_rule(
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
},
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "commit"]),
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
},
],
@@ -622,19 +576,16 @@ prefix_rule(
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
},
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
resolved_program: None,
justification: None,
},
RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "commit"]),
decision: Decision::Forbidden,
resolved_program: None,
justification: None,
},
],
@@ -661,303 +612,3 @@ fn heuristics_match_is_returned_when_no_policy_matches() {
evaluation
);
}
#[test]
fn parses_host_executable_paths() -> Result<()> {
let homebrew_git = host_absolute_path(&["opt", "homebrew", "bin", "git"]);
let usr_git = host_absolute_path(&["usr", "bin", "git"]);
let homebrew_git_literal = starlark_string(&homebrew_git);
let usr_git_literal = starlark_string(&usr_git);
let policy_src = format!(
r#"
host_executable(
name = "git",
paths = [
"{homebrew_git_literal}",
"{usr_git_literal}",
"{usr_git_literal}",
],
)
"#
);
let mut parser = PolicyParser::new();
parser.parse("test.rules", &policy_src)?;
let policy = parser.build();
assert_eq!(
policy
.host_executables()
.get("git")
.expect("missing git host executable")
.as_ref(),
[absolute_path(&homebrew_git), absolute_path(&usr_git)]
);
Ok(())
}
#[test]
fn host_executable_rejects_non_absolute_path() {
let policy_src = r#"
host_executable(name = "git", paths = ["git"])
"#;
let mut parser = PolicyParser::new();
let err = parser
.parse("test.rules", policy_src)
.expect_err("expected parse error");
assert!(
err.to_string()
.contains("host_executable paths must be absolute")
);
}
#[test]
fn host_executable_rejects_name_with_path_separator() {
let git_path = host_absolute_path(&["usr", "bin", "git"]);
let git_path_literal = starlark_string(&git_path);
let policy_src =
format!(r#"host_executable(name = "{git_path_literal}", paths = ["{git_path_literal}"])"#);
let mut parser = PolicyParser::new();
let err = parser
.parse("test.rules", &policy_src)
.expect_err("expected parse error");
assert!(
err.to_string()
.contains("host_executable name must be a bare executable name")
);
}
#[test]
fn host_executable_rejects_path_with_wrong_basename() {
let rg_path = host_absolute_path(&["usr", "bin", "rg"]);
let rg_path_literal = starlark_string(&rg_path);
let policy_src = format!(r#"host_executable(name = "git", paths = ["{rg_path_literal}"])"#);
let mut parser = PolicyParser::new();
let err = parser
.parse("test.rules", &policy_src)
.expect_err("expected parse error");
assert!(err.to_string().contains("must have basename `git`"));
}
#[test]
fn host_executable_last_definition_wins() -> Result<()> {
let usr_git = host_absolute_path(&["usr", "bin", "git"]);
let homebrew_git = host_absolute_path(&["opt", "homebrew", "bin", "git"]);
let usr_git_literal = starlark_string(&usr_git);
let homebrew_git_literal = starlark_string(&homebrew_git);
let mut parser = PolicyParser::new();
parser.parse(
"shared.rules",
&format!(r#"host_executable(name = "git", paths = ["{usr_git_literal}"])"#),
)?;
parser.parse(
"user.rules",
&format!(r#"host_executable(name = "git", paths = ["{homebrew_git_literal}"])"#),
)?;
let policy = parser.build();
assert_eq!(
policy
.host_executables()
.get("git")
.expect("missing git host executable")
.as_ref(),
[absolute_path(&homebrew_git)]
);
Ok(())
}
#[test]
fn host_executable_resolution_uses_basename_rule_when_allowed() -> Result<()> {
let git_name = host_executable_name("git");
let git_path = host_absolute_path(&["usr", "bin", &git_name]);
let git_path_literal = starlark_string(&git_path);
let policy_src = format!(
r#"
prefix_rule(pattern = ["git", "status"], decision = "prompt")
host_executable(name = "git", paths = ["{git_path_literal}"])
"#
);
let mut parser = PolicyParser::new();
parser.parse("test.rules", &policy_src)?;
let policy = parser.build();
let evaluation = policy.check_with_options(
&[git_path.clone(), "status".to_string()],
&allow_all,
&MatchOptions {
resolve_host_executables: true,
},
);
assert_eq!(
evaluation,
Evaluation {
decision: Decision::Prompt,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git", "status"]),
decision: Decision::Prompt,
resolved_program: Some(absolute_path(&git_path)),
justification: None,
}],
}
);
Ok(())
}
#[test]
fn prefix_rule_examples_honor_host_executable_resolution() -> Result<()> {
let allowed_git_name = host_executable_name("git");
let allowed_git = host_absolute_path(&["usr", "bin", &allowed_git_name]);
let other_git = host_absolute_path(&["opt", "homebrew", "bin", &allowed_git_name]);
let allowed_git_literal = starlark_string(&allowed_git);
let other_git_literal = starlark_string(&other_git);
let policy_src = format!(
r#"
prefix_rule(
pattern = ["git", "status"],
match = [["{allowed_git_literal}", "status"]],
not_match = [["{other_git_literal}", "status"]],
)
host_executable(name = "git", paths = ["{allowed_git_literal}"])
"#
);
let mut parser = PolicyParser::new();
parser.parse("test.rules", &policy_src)?;
Ok(())
}
#[test]
fn host_executable_resolution_respects_explicit_empty_allowlist() -> Result<()> {
let policy_src = r#"
prefix_rule(pattern = ["git"], decision = "prompt")
host_executable(name = "git", paths = [])
"#;
let mut parser = PolicyParser::new();
parser.parse("test.rules", policy_src)?;
let policy = parser.build();
let git_path = host_absolute_path(&["usr", "bin", "git"]);
let evaluation = policy.check_with_options(
&[git_path.clone(), "status".to_string()],
&allow_all,
&MatchOptions {
resolve_host_executables: true,
},
);
assert_eq!(
evaluation,
Evaluation {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::HeuristicsRuleMatch {
command: vec![git_path, "status".to_string()],
decision: Decision::Allow,
}],
}
);
Ok(())
}
#[test]
fn host_executable_resolution_ignores_path_not_in_allowlist() -> Result<()> {
let allowed_git = host_absolute_path(&["usr", "bin", "git"]);
let other_git = host_absolute_path(&["opt", "homebrew", "bin", "git"]);
let allowed_git_literal = starlark_string(&allowed_git);
let policy_src = format!(
r#"
prefix_rule(pattern = ["git"], decision = "prompt")
host_executable(name = "git", paths = ["{allowed_git_literal}"])
"#
);
let mut parser = PolicyParser::new();
parser.parse("test.rules", &policy_src)?;
let policy = parser.build();
let evaluation = policy.check_with_options(
&[other_git.clone(), "status".to_string()],
&allow_all,
&MatchOptions {
resolve_host_executables: true,
},
);
assert_eq!(
evaluation,
Evaluation {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::HeuristicsRuleMatch {
command: vec![other_git, "status".to_string()],
decision: Decision::Allow,
}],
}
);
Ok(())
}
#[test]
fn host_executable_resolution_falls_back_without_mapping() -> Result<()> {
let policy_src = r#"
prefix_rule(pattern = ["git"], decision = "prompt")
"#;
let mut parser = PolicyParser::new();
parser.parse("test.rules", policy_src)?;
let policy = parser.build();
let git_path = host_absolute_path(&["usr", "bin", "git"]);
let evaluation = policy.check_with_options(
&[git_path.clone(), "status".to_string()],
&allow_all,
&MatchOptions {
resolve_host_executables: true,
},
);
assert_eq!(
evaluation,
Evaluation {
decision: Decision::Prompt,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: tokens(&["git"]),
decision: Decision::Prompt,
resolved_program: Some(absolute_path(&git_path)),
justification: None,
}],
}
);
Ok(())
}
#[test]
fn host_executable_resolution_does_not_override_exact_match() -> Result<()> {
let git_path = host_absolute_path(&["usr", "bin", "git"]);
let git_path_literal = starlark_string(&git_path);
let policy_src = format!(
r#"
prefix_rule(pattern = ["{git_path_literal}"], decision = "allow")
prefix_rule(pattern = ["git"], decision = "prompt")
host_executable(name = "git", paths = ["{git_path_literal}"])
"#
);
let mut parser = PolicyParser::new();
parser.parse("test.rules", &policy_src)?;
let policy = parser.build();
let evaluation = policy.check_with_options(
&[git_path.clone(), "status".to_string()],
&allow_all,
&MatchOptions {
resolve_host_executables: true,
},
);
assert_eq!(
evaluation,
Evaluation {
decision: Decision::Allow,
matched_rules: vec![RuleMatch::PrefixRuleMatch {
matched_prefix: vec![git_path],
decision: Decision::Allow,
resolved_program: None,
justification: None,
}],
}
);
Ok(())
}

View File

@@ -11,6 +11,7 @@ use crate::bottom_pane::SelectionItem;
use crate::bottom_pane::SelectionViewParams;
use crate::bottom_pane::popup_consts::standard_popup_hint_line;
use crate::chatwidget::ChatWidget;
use crate::chatwidget::DEFAULT_MODEL_DISPLAY_NAME;
use crate::chatwidget::ExternalEditorState;
use crate::cwd_prompt::CwdPromptAction;
use crate::diff_render::DiffSummary;
@@ -81,6 +82,8 @@ use color_eyre::eyre::WrapErr;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyEventKind;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::widgets::Paragraph;
@@ -723,6 +726,41 @@ impl App {
.add_info_message(format!("Opened {url} in your browser."), None);
}
fn insert_history_cell(&mut self, tui: &mut tui::Tui, cell: Arc<dyn HistoryCell>) {
if let Some(Overlay::Transcript(t)) = &mut self.overlay {
t.insert_cell(cell.clone());
tui.frame_requester().schedule_frame();
}
self.transcript_cells.push(cell.clone());
let mut display = cell.display_lines(tui.terminal.last_known_screen_size.width);
if !display.is_empty() {
if !cell.is_stream_continuation() {
if self.has_emitted_history_lines {
display.insert(0, Line::from(""));
} else {
self.has_emitted_history_lines = true;
}
}
if self.overlay.is_some() {
self.deferred_history_lines.extend(display);
} else {
tui.insert_history_lines(display);
}
}
}
fn insert_startup_header(&mut self, tui: &mut tui::Tui) {
let placeholder_style = Style::default().add_modifier(Modifier::DIM | Modifier::ITALIC);
let header = Arc::new(history_cell::SessionHeaderHistoryCell::new_with_style(
DEFAULT_MODEL_DISPLAY_NAME.to_string(),
placeholder_style,
None,
self.config.cwd.clone(),
CODEX_CLI_VERSION,
)) as Arc<dyn HistoryCell>;
self.insert_history_cell(tui, header);
}
fn clear_ui_header_lines_with_version(
&self,
width: u16,
@@ -1199,6 +1237,7 @@ impl App {
};
self.chat_widget = ChatWidget::new(init, self.server.clone());
self.reset_thread_event_state();
self.insert_startup_header(tui);
if let Some(summary) = summary {
let mut lines: Vec<Line<'static>> = vec![summary.usage_line.clone().into()];
if let Some(command) = summary.resume_command {
@@ -1311,7 +1350,8 @@ impl App {
use tokio_stream::StreamExt;
let (app_event_tx, mut app_event_rx) = unbounded_channel();
let app_event_tx = AppEventSender::new(app_event_tx);
emit_project_config_warnings(&app_event_tx, &config);
let should_insert_startup_header =
matches!(&session_selection, SessionSelection::StartFresh);
tui.set_notification_method(config.tui_notification_method);
let harness_overrides =
@@ -1493,6 +1533,7 @@ impl App {
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
#[cfg(not(debug_assertions))]
let upgrade_version = crate::updates::get_upgrade_version(&config);
emit_project_config_warnings(&app_event_tx, &config);
let mut app = Self {
server: thread_manager.clone(),
@@ -1531,6 +1572,10 @@ impl App {
pending_primary_events: VecDeque::new(),
};
if should_insert_startup_header {
app.insert_startup_header(tui);
}
// On startup, if Agent mode (workspace-write) or ReadOnly is active, warn about world-writable dirs on Windows.
#[cfg(target_os = "windows")]
{
@@ -1883,29 +1928,7 @@ impl App {
}
AppEvent::InsertHistoryCell(cell) => {
let cell: Arc<dyn HistoryCell> = cell.into();
if let Some(Overlay::Transcript(t)) = &mut self.overlay {
t.insert_cell(cell.clone());
tui.frame_requester().schedule_frame();
}
self.transcript_cells.push(cell.clone());
let mut display = cell.display_lines(tui.terminal.last_known_screen_size.width);
if !display.is_empty() {
// Only insert a separating blank line for new cells that are not
// part of an ongoing stream. Streaming continuations should not
// accrue extra blank lines between chunks.
if !cell.is_stream_continuation() {
if self.has_emitted_history_lines {
display.insert(0, Line::from(""));
} else {
self.has_emitted_history_lines = true;
}
}
if self.overlay.is_some() {
self.deferred_history_lines.extend(display);
} else {
tui.insert_history_lines(display);
}
}
self.insert_history_cell(tui, cell);
}
AppEvent::ApplyThreadRollback { num_turns } => {
if self.apply_non_pending_thread_rollback(num_turns) {
@@ -3064,6 +3087,55 @@ impl App {
// thread, so unrelated shutdowns cannot consume this marker.
self.pending_shutdown_exit_thread_id = None;
}
if let EventMsg::SessionConfigured(session) = &event.msg
&& let Some(loading_header_idx) = self.transcript_cells.iter().rposition(|cell| {
matches!(
cell.as_ref()
.as_any()
.downcast_ref::<history_cell::SessionHeaderHistoryCell>(),
Some(startup_header) if startup_header.is_loading_placeholder()
)
})
{
let cell = Arc::new(history_cell::SessionHeaderHistoryCell::new(
session.model.clone(),
session.reasoning_effort,
session.cwd.clone(),
CODEX_CLI_VERSION,
)) as Arc<dyn HistoryCell>;
self.transcript_cells[loading_header_idx] = cell.clone();
if matches!(&self.overlay, Some(Overlay::Transcript(_))) {
self.overlay = Some(Overlay::new_transcript(self.transcript_cells.clone()));
tui.frame_requester().schedule_frame();
}
if loading_header_idx == 0 {
let display = cell.display_lines(tui.terminal.last_known_screen_size.width);
tui.replace_top_visible_history_lines(display)?;
} else {
self.clear_terminal_ui(tui, false)?;
self.deferred_history_lines.clear();
let transcript_cells = self.transcript_cells.clone();
for transcript_cell in transcript_cells {
let mut display =
transcript_cell.display_lines(tui.terminal.last_known_screen_size.width);
if !display.is_empty() {
if !transcript_cell.is_stream_continuation() {
if self.has_emitted_history_lines {
display.insert(0, Line::from(""));
} else {
self.has_emitted_history_lines = true;
}
}
if self.overlay.is_some() {
self.deferred_history_lines.extend(display);
} else {
tui.insert_history_lines(display);
}
}
}
}
}
self.handle_codex_event_now(event);
if self.backtrack_render_pending {
tui.frame_requester().schedule_frame();
@@ -3408,8 +3480,8 @@ mod tests {
use crate::file_search::FileSearchManager;
use crate::history_cell::AgentMessageCell;
use crate::history_cell::HistoryCell;
use crate::history_cell::SessionHeaderHistoryCell;
use crate::history_cell::UserHistoryCell;
use crate::history_cell::new_session_info;
use codex_core::CodexAuth;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigOverrides;
@@ -3843,7 +3915,7 @@ mod tests {
true,
)) as Arc<dyn HistoryCell>
};
let make_header = |is_first| -> Arc<dyn HistoryCell> {
let make_header = |_is_first| -> Arc<dyn HistoryCell> {
let event = SessionConfiguredEvent {
session_id: ThreadId::new(),
forked_from_id: None,
@@ -3860,12 +3932,11 @@ mod tests {
network_proxy: None,
rollout_path: Some(PathBuf::new()),
};
Arc::new(new_session_info(
app.chat_widget.config_ref(),
app.chat_widget.current_model(),
event,
is_first,
None,
Arc::new(SessionHeaderHistoryCell::new(
event.model,
event.reasoning_effort,
event.cwd,
crate::version::CODEX_CLI_VERSION,
)) as Arc<dyn HistoryCell>
};
@@ -4340,7 +4411,7 @@ mod tests {
)) as Arc<dyn HistoryCell>
};
let make_header = |is_first| {
let make_header = |_is_first| {
let event = SessionConfiguredEvent {
session_id: ThreadId::new(),
forked_from_id: None,
@@ -4357,12 +4428,11 @@ mod tests {
network_proxy: None,
rollout_path: Some(PathBuf::new()),
};
Arc::new(new_session_info(
app.chat_widget.config_ref(),
app.chat_widget.current_model(),
event,
is_first,
None,
Arc::new(SessionHeaderHistoryCell::new(
event.model,
event.reasoning_effort,
event.cwd,
crate::version::CODEX_CLI_VERSION,
)) as Arc<dyn HistoryCell>
};

View File

@@ -29,7 +29,7 @@ use std::sync::Arc;
use crate::app::App;
use crate::app_event::AppEvent;
use crate::history_cell::SessionInfoCell;
use crate::history_cell::SessionHeaderHistoryCell;
use crate::history_cell::UserHistoryCell;
use crate::pager_overlay::Overlay;
use crate::tui;
@@ -639,7 +639,7 @@ fn nth_user_position(
fn user_positions_iter(
cells: &[Arc<dyn crate::history_cell::HistoryCell>],
) -> impl Iterator<Item = usize> + '_ {
let session_start_type = TypeId::of::<SessionInfoCell>();
let session_start_type = TypeId::of::<SessionHeaderHistoryCell>();
let user_type = TypeId::of::<UserHistoryCell>();
let type_of = |cell: &Arc<dyn crate::history_cell::HistoryCell>| cell.as_any().type_id();

View File

@@ -146,8 +146,6 @@ use rand::Rng;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::widgets::Paragraph;
@@ -157,7 +155,7 @@ use tokio::task::JoinHandle;
use tracing::debug;
use tracing::warn;
const DEFAULT_MODEL_DISPLAY_NAME: &str = "loading";
pub(crate) const DEFAULT_MODEL_DISPLAY_NAME: &str = "loading";
const PLAN_IMPLEMENTATION_TITLE: &str = "Implement this plan?";
const PLAN_IMPLEMENTATION_YES: &str = "Yes, implement this plan";
const PLAN_IMPLEMENTATION_NO: &str = "No, stay in Plan mode";
@@ -257,8 +255,6 @@ mod agent;
use self::agent::spawn_agent;
use self::agent::spawn_agent_from_existing;
pub(crate) use self::agent::spawn_op_forwarder;
mod session_header;
use self::session_header::SessionHeader;
mod skills;
use self::skills::collect_tool_mentions;
use self::skills::find_app_mentions;
@@ -547,7 +543,6 @@ pub(crate) struct ChatWidget {
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
otel_manager: OtelManager,
session_header: SessionHeader,
initial_user_message: Option<UserMessage>,
token_info: Option<TokenUsageInfo>,
rate_limit_snapshots_by_limit_id: BTreeMap<String, RateLimitSnapshotDisplay>,
@@ -1136,7 +1131,6 @@ impl ChatWidget {
self.last_copyable_output = None;
let forked_from_id = event.forked_from_id;
let model_for_header = event.model.clone();
self.session_header.set_model(&model_for_header);
self.current_collaboration_mode = self.current_collaboration_mode.with_updates(
Some(model_for_header.clone()),
Some(event.reasoning_effort),
@@ -1144,16 +1138,17 @@ impl ChatWidget {
);
self.refresh_model_display();
self.sync_personality_command_enabled();
let session_info_cell = history_cell::new_session_info(
if let Some(session_info_body) = history_cell::new_session_info_body(
&self.config,
&model_for_header,
event,
&event,
self.show_welcome_banner,
self.auth_manager
.auth_cached()
.and_then(|auth| auth.account_plan_type()),
);
self.apply_session_info_cell(session_info_cell);
) {
self.add_boxed_history(session_info_body);
}
if let Some(messages) = initial_messages {
self.replay_initial_messages(messages);
@@ -2777,7 +2772,7 @@ impl ChatWidget {
.and_then(|mask| mask.model.clone())
.unwrap_or_else(|| model_for_header.clone());
let fallback_default = Settings {
model: header_model.clone(),
model: header_model,
reasoning_effort: None,
developer_instructions: None,
};
@@ -2787,7 +2782,7 @@ impl ChatWidget {
settings: fallback_default,
};
let active_cell = Some(Self::placeholder_session_header_cell(&config));
let active_cell = None;
let current_cwd = Some(config.cwd.clone());
let queued_message_edit_binding =
@@ -2816,7 +2811,6 @@ impl ChatWidget {
auth_manager,
models_manager,
otel_manager,
session_header: SessionHeader::new(header_model),
initial_user_message,
token_info: None,
rate_limit_snapshots_by_limit_id: BTreeMap::new(),
@@ -2954,7 +2948,7 @@ impl ChatWidget {
.and_then(|mask| mask.model.clone())
.unwrap_or_else(|| model_for_header.clone());
let fallback_default = Settings {
model: header_model.clone(),
model: header_model,
reasoning_effort: None,
developer_instructions: None,
};
@@ -2964,7 +2958,7 @@ impl ChatWidget {
settings: fallback_default,
};
let active_cell = Some(Self::placeholder_session_header_cell(&config));
let active_cell = None;
let current_cwd = Some(config.cwd.clone());
let queued_message_edit_binding =
@@ -2993,7 +2987,6 @@ impl ChatWidget {
auth_manager,
models_manager,
otel_manager,
session_header: SessionHeader::new(header_model),
initial_user_message,
token_info: None,
rate_limit_snapshots_by_limit_id: BTreeMap::new(),
@@ -3111,6 +3104,7 @@ impl ChatWidget {
let header_model = model
.clone()
.unwrap_or_else(|| session_configured.model.clone());
let header_reasoning_effort = session_configured.reasoning_effort;
let active_collaboration_mask =
Self::initial_collaboration_mask(&config, models_manager.as_ref(), model_override);
let header_model = active_collaboration_mask
@@ -3159,7 +3153,6 @@ impl ChatWidget {
auth_manager,
models_manager,
otel_manager,
session_header: SessionHeader::new(header_model),
initial_user_message,
token_info: None,
rate_limit_snapshots_by_limit_id: BTreeMap::new(),
@@ -3253,6 +3246,17 @@ impl ChatWidget {
);
widget.update_collaboration_mode_indicator();
widget
.app_event_tx
.send(AppEvent::InsertHistoryCell(Box::new(
history_cell::SessionHeaderHistoryCell::new(
header_model,
header_reasoning_effort,
widget.config.cwd.clone(),
CODEX_CLI_VERSION,
),
)));
widget
}
@@ -3962,15 +3966,7 @@ impl ChatWidget {
}
fn add_boxed_history(&mut self, cell: Box<dyn HistoryCell>) {
// Keep the placeholder session header as the active cell until real session info arrives,
// so we can merge headers instead of committing a duplicate box to history.
let keep_placeholder_header_active = !self.is_session_configured()
&& self
.active_cell
.as_ref()
.is_some_and(|c| c.as_any().is::<history_cell::SessionHeaderHistoryCell>());
if !keep_placeholder_header_active && !cell.display_lines(u16::MAX).is_empty() {
if cell.has_visible_display_lines() {
// Only break exec grouping if the cell renders visible lines.
self.flush_active_cell();
self.needs_final_message_separator = true;
@@ -6958,8 +6954,6 @@ impl ChatWidget {
}
fn refresh_model_display(&mut self) {
let effective = self.effective_collaboration_mode();
self.session_header.set_model(effective.model());
// Keep composer paste affordances aligned with the currently effective model.
self.sync_image_paste_enabled();
}
@@ -7090,46 +7084,6 @@ impl ChatWidget {
}
}
/// Build a placeholder header cell while the session is configuring.
fn placeholder_session_header_cell(config: &Config) -> Box<dyn HistoryCell> {
let placeholder_style = Style::default().add_modifier(Modifier::DIM | Modifier::ITALIC);
Box::new(history_cell::SessionHeaderHistoryCell::new_with_style(
DEFAULT_MODEL_DISPLAY_NAME.to_string(),
placeholder_style,
None,
config.cwd.clone(),
CODEX_CLI_VERSION,
))
}
/// Merge the real session info cell with any placeholder header to avoid double boxes.
fn apply_session_info_cell(&mut self, cell: history_cell::SessionInfoCell) {
let mut session_info_cell = Some(Box::new(cell) as Box<dyn HistoryCell>);
let merged_header = if let Some(active) = self.active_cell.take() {
if active
.as_any()
.is::<history_cell::SessionHeaderHistoryCell>()
{
// Reuse the existing placeholder header to avoid rendering two boxes.
if let Some(cell) = session_info_cell.take() {
self.active_cell = Some(cell);
}
true
} else {
self.active_cell = Some(active);
false
}
} else {
false
};
self.flush_active_cell();
if !merged_header && let Some(cell) = session_info_cell {
self.add_boxed_history(cell);
}
}
pub(crate) fn add_info_message(&mut self, message: String, hint: Option<String>) {
self.add_to_history(history_cell::new_info_event(message, hint));
self.request_redraw();

View File

@@ -1,16 +0,0 @@
pub(crate) struct SessionHeader {
model: String,
}
impl SessionHeader {
pub(crate) fn new(model: String) -> Self {
Self { model }
}
/// Updates the header's model text.
pub(crate) fn set_model(&mut self, model: &str) {
if self.model != model {
self.model = model.to_string();
}
}
}

View File

@@ -1668,7 +1668,6 @@ async fn make_chatwidget_manual(
auth_manager,
models_manager,
otel_manager,
session_header: SessionHeader::new(resolved_model.clone()),
initial_user_message: None,
token_info: None,
rate_limit_snapshots_by_limit_id: BTreeMap::new(),

View File

@@ -10,6 +10,7 @@
//! bumps the active-cell revision tracked by `ChatWidget`, so the cache key changes whenever the
//! rendered transcript output can change.
use crate::chatwidget::DEFAULT_MODEL_DISPLAY_NAME;
use crate::diff_render::create_diff_summary;
use crate::diff_render::display_path_for;
use crate::exec_cell::CommandOutput;
@@ -95,6 +96,11 @@ pub(crate) trait HistoryCell: std::fmt::Debug + Send + Sync + Any {
/// Returns the logical lines for the main chat viewport.
fn display_lines(&self, width: u16) -> Vec<Line<'static>>;
/// Returns whether this cell renders any visible display lines when width is unconstrained.
fn has_visible_display_lines(&self) -> bool {
!self.display_lines(u16::MAX).is_empty()
}
/// Returns the number of viewport rows needed to render this cell.
///
/// The default delegates to `Paragraph::line_count` with
@@ -1019,43 +1025,30 @@ impl HistoryCell for TooltipHistoryCell {
}
}
#[derive(Debug)]
pub struct SessionInfoCell(CompositeHistoryCell);
pub(crate) fn new_session_info_body(
config: &Config,
requested_model: &str,
event: &SessionConfiguredEvent,
is_first_event: bool,
auth_plan: Option<PlanType>,
) -> Option<Box<dyn HistoryCell>> {
let parts = session_info_body_parts(config, requested_model, event, is_first_event, auth_plan);
impl HistoryCell for SessionInfoCell {
fn display_lines(&self, width: u16) -> Vec<Line<'static>> {
self.0.display_lines(width)
}
fn desired_height(&self, width: u16) -> u16 {
self.0.desired_height(width)
}
fn transcript_lines(&self, width: u16) -> Vec<Line<'static>> {
self.0.transcript_lines(width)
match parts.len() {
0 => None,
1 => parts.into_iter().next(),
_ => Some(Box::new(CompositeHistoryCell::new(parts))),
}
}
pub(crate) fn new_session_info(
fn session_info_body_parts(
config: &Config,
requested_model: &str,
event: SessionConfiguredEvent,
event: &SessionConfiguredEvent,
is_first_event: bool,
auth_plan: Option<PlanType>,
) -> SessionInfoCell {
let SessionConfiguredEvent {
model,
reasoning_effort,
..
} = event;
// Header box rendered as history (so it appears at the very top)
let header = SessionHeaderHistoryCell::new(
model.clone(),
reasoning_effort,
config.cwd.clone(),
CODEX_CLI_VERSION,
);
let mut parts: Vec<Box<dyn HistoryCell>> = vec![Box::new(header)];
) -> Vec<Box<dyn HistoryCell>> {
let mut parts: Vec<Box<dyn HistoryCell>> = Vec::new();
if is_first_event {
// Help lines below the header (new copy and list)
@@ -1098,17 +1091,17 @@ pub(crate) fn new_session_info(
{
parts.push(Box::new(tooltips));
}
if requested_model != model {
if requested_model != event.model {
let lines = vec![
"model changed:".magenta().bold().into(),
format!("requested: {requested_model}").into(),
format!("used: {model}").into(),
format!("used: {}", event.model).into(),
];
parts.push(Box::new(PlainHistoryCell { lines }));
}
}
SessionInfoCell(CompositeHistoryCell { parts })
parts
}
pub(crate) fn new_user_prompt(
@@ -1203,6 +1196,10 @@ impl SessionHeaderHistoryCell {
ReasoningEffortConfig::None => "none",
})
}
pub(crate) fn is_loading_placeholder(&self) -> bool {
self.model == DEFAULT_MODEL_DISPLAY_NAME
}
}
impl HistoryCell for SessionHeaderHistoryCell {

View File

@@ -126,43 +126,7 @@ where
for line in wrapped {
queue!(writer, Print("\r\n"))?;
// URL lines can be wider than the terminal and will
// character-wrap onto continuation rows. Pre-clear those rows
// so stale content from a previously longer line is erased.
let physical_rows = line.width().max(1).div_ceil(wrap_width);
if physical_rows > 1 {
queue!(writer, SavePosition)?;
for _ in 1..physical_rows {
queue!(writer, MoveDown(1), MoveToColumn(0))?;
queue!(writer, Clear(ClearType::UntilNewLine))?;
}
queue!(writer, RestorePosition)?;
}
queue!(
writer,
SetColors(Colors::new(
line.style
.fg
.map(std::convert::Into::into)
.unwrap_or(CColor::Reset),
line.style
.bg
.map(std::convert::Into::into)
.unwrap_or(CColor::Reset)
))
)?;
queue!(writer, Clear(ClearType::UntilNewLine))?;
// Merge line-level style into each span so that ANSI colors reflect
// line styles (e.g., blockquotes with green fg).
let merged_spans: Vec<Span> = line
.spans
.iter()
.map(|s| Span {
style: s.style.patch(line.style),
content: s.content.clone(),
})
.collect();
write_spans(writer, merged_spans.iter())?;
write_line(writer, &line, wrap_width)?;
}
queue!(writer, ResetScrollRegion)?;
@@ -181,6 +145,36 @@ where
Ok(())
}
pub fn replace_top_visible_history_lines<B>(
terminal: &mut crate::custom_terminal::Terminal<B>,
lines: Vec<Line>,
) -> io::Result<()>
where
B: Backend + Write,
{
if lines.is_empty() {
return Ok(());
}
let top = terminal
.viewport_area
.top()
.saturating_sub(terminal.visible_history_rows());
let wrap_width = terminal.viewport_area.width.max(1) as usize;
let last_cursor_pos = terminal.last_known_cursor_pos;
let writer = terminal.backend_mut();
for (index, line) in lines.iter().enumerate() {
let y = top.saturating_add(index as u16);
queue!(writer, MoveTo(0, y))?;
write_line(writer, line, wrap_width)?;
}
queue!(writer, MoveTo(last_cursor_pos.x, last_cursor_pos.y))?;
std::io::Write::flush(writer)?;
Ok(())
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SetScrollRegion(pub std::ops::Range<u16>);
@@ -329,6 +323,43 @@ where
)
}
fn write_line(mut writer: &mut impl Write, line: &Line<'_>, wrap_width: usize) -> io::Result<()> {
// URL lines can be wider than the terminal and will character-wrap onto continuation rows.
// Pre-clear those rows so stale content from a previously longer line is erased.
let physical_rows = line.width().max(1).div_ceil(wrap_width);
if physical_rows > 1 {
queue!(writer, SavePosition)?;
for _ in 1..physical_rows {
queue!(writer, MoveDown(1), MoveToColumn(0))?;
queue!(writer, Clear(ClearType::UntilNewLine))?;
}
queue!(writer, RestorePosition)?;
}
queue!(
writer,
SetColors(Colors::new(
line.style
.fg
.map(std::convert::Into::into)
.unwrap_or(CColor::Reset),
line.style
.bg
.map(std::convert::Into::into)
.unwrap_or(CColor::Reset)
))
)?;
queue!(writer, Clear(ClearType::UntilNewLine))?;
let merged_spans: Vec<Span> = line
.spans
.iter()
.map(|s| Span {
style: s.style.patch(line.style),
content: s.content.clone(),
})
.collect();
write_spans(&mut writer, merged_spans.iter())
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -445,6 +445,16 @@ impl Tui {
self.frame_requester().schedule_frame();
}
pub fn replace_top_visible_history_lines(&mut self, lines: Vec<Line<'static>>) -> Result<()> {
let line_count = lines.len();
if self.terminal.visible_history_rows() >= line_count as u16 {
crate::insert_history::replace_top_visible_history_lines(&mut self.terminal, lines)?;
} else if self.pending_history_lines.len() >= line_count {
self.pending_history_lines.splice(0..line_count, lines);
}
Ok(())
}
pub fn clear_pending_history_lines(&mut self) {
self.pending_history_lines.clear();
}