Compare commits

..

2 Commits

Author SHA1 Message Date
Joe Gershenson
c37ce40560 Polish pending transcript queue 2026-04-21 01:55:18 -07:00
Joe Gershenson
a514bf1683 Fix interrupt transcript ordering 2026-04-21 01:52:30 -07:00
60 changed files with 379 additions and 6184 deletions

View File

@@ -65,10 +65,6 @@ common:ci --verbose_failures
common:ci --build_metadata=REPO_URL=https://github.com/openai/codex.git
common:ci --build_metadata=ROLE=CI
common:ci --build_metadata=VISIBILITY=PUBLIC
# rules_rust derives debug level from Bazel toolchain/compilation-mode settings,
# not Cargo profiles. Keep CI Rust actions explicit and lean.
common:ci --@rules_rust//rust/settings:extra_rustc_flag=-Cdebuginfo=0
common:ci --@rules_rust//rust/settings:extra_exec_rustc_flag=-Cdebuginfo=0
# Disable disk cache in CI since we have a remote one and aren't using persistent workers.
common:ci --disk_cache=

86
MODULE.bazel.lock generated

File diff suppressed because one or more lines are too long

1216
codex-rs/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,6 @@ members = [
"collaboration-mode-templates",
"connectors",
"config",
"device-key",
"shell-command",
"shell-escalation",
"skills",
@@ -245,7 +244,6 @@ env_logger = "0.11.9"
eventsource-stream = "0.2.3"
futures = { version = "0.3", default-features = false }
gethostname = "1.1.0"
gix = { version = "0.81.0", default-features = false, features = ["sha1"] }
glob = "0.3"
globset = "0.4"
hmac = "0.12.1"
@@ -284,7 +282,6 @@ os_info = "3.12.0"
owo-colors = "4.3.0"
path-absolutize = "3.1.1"
pathdiff = "0.2"
p256 = "0.13.2"
portable-pty = "0.9.0"
predicates = "3"
pretty_assertions = "1.4.1"
@@ -433,11 +430,6 @@ ignored = [
"codex-v8-poc",
]
[profile.dev]
# Keep line tables/backtraces while avoiding expensive full variable debug info
# across local dev builds.
debug = 1
[profile.dev-small]
inherits = "dev"
opt-level = 0

View File

@@ -521,200 +521,6 @@
}
]
},
"DeviceKeyCreateParams": {
"description": "Create a controller-local device key with a random key id.",
"properties": {
"accountUserId": {
"type": "string"
},
"clientId": {
"type": "string"
},
"protectionPolicy": {
"anyOf": [
{
"$ref": "#/definitions/DeviceKeyProtectionPolicy"
},
{
"type": "null"
}
],
"description": "Defaults to `hardware_only` when omitted."
}
},
"required": [
"accountUserId",
"clientId"
],
"type": "object"
},
"DeviceKeyProtectionPolicy": {
"description": "Protection policy for creating or loading a controller-local device key.",
"enum": [
"hardware_only",
"allow_os_protected_nonextractable"
],
"type": "string"
},
"DeviceKeyPublicParams": {
"description": "Fetch a controller-local device key public key by id.",
"properties": {
"keyId": {
"type": "string"
}
},
"required": [
"keyId"
],
"type": "object"
},
"DeviceKeySignParams": {
"description": "Sign an accepted structured payload with a controller-local device key.",
"properties": {
"keyId": {
"type": "string"
},
"payload": {
"$ref": "#/definitions/DeviceKeySignPayload"
}
},
"required": [
"keyId",
"payload"
],
"type": "object"
},
"DeviceKeySignPayload": {
"description": "Structured payloads accepted by `device/key/sign`.",
"oneOf": [
{
"description": "Payload bound to one remote-control controller websocket `/client` connection challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientConnectionAudience"
},
"clientId": {
"type": "string"
},
"nonce": {
"type": "string"
},
"scopes": {
"description": "Must contain exactly `remote_control_controller_websocket`.",
"items": {
"type": "string"
},
"type": "array"
},
"sessionId": {
"description": "Backend-issued websocket session id that this proof authorizes.",
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "Websocket route path that this proof authorizes.",
"type": "string"
},
"tokenExpiresAt": {
"description": "Remote-control token expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"tokenSha256Base64url": {
"description": "SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientConnection"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"clientId",
"nonce",
"scopes",
"sessionId",
"targetOrigin",
"targetPath",
"tokenExpiresAt",
"tokenSha256Base64url",
"type"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayload",
"type": "object"
},
{
"description": "Payload bound to a remote-control client `/client/enroll` ownership challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientEnrollmentAudience"
},
"challengeExpiresAt": {
"description": "Enrollment challenge expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"challengeId": {
"description": "Backend-issued enrollment challenge id that this proof authorizes.",
"type": "string"
},
"clientId": {
"type": "string"
},
"deviceIdentitySha256Base64url": {
"description": "SHA-256 of the requested device identity operation, encoded as unpadded base64url.",
"type": "string"
},
"nonce": {
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "HTTP route path that this proof authorizes.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientEnrollment"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"challengeExpiresAt",
"challengeId",
"clientId",
"deviceIdentitySha256Base64url",
"nonce",
"targetOrigin",
"targetPath",
"type"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayload",
"type": "object"
}
]
},
"DynamicToolSpec": {
"properties": {
"deferLoading": {
@@ -1941,20 +1747,6 @@
}
]
},
"RemoteControlClientConnectionAudience": {
"description": "Audience for a remote-control client connection device-key proof.",
"enum": [
"remote_control_client_websocket"
],
"type": "string"
},
"RemoteControlClientEnrollmentAudience": {
"description": "Audience for a remote-control client enrollment device-key proof.",
"enum": [
"remote_control_client_enrollment"
],
"type": "string"
},
"RequestId": {
"anyOf": [
{
@@ -4568,78 +4360,6 @@
"title": "App/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/create"
],
"title": "Device/key/createRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeyCreateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/createRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/public"
],
"title": "Device/key/publicRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeyPublicParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/publicRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/sign"
],
"title": "Device/key/signRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeySignParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/signRequest",
"type": "object"
},
{
"properties": {
"id": {

View File

@@ -30,4 +30,4 @@
],
"title": "DynamicToolCallParams",
"type": "object"
}
}

View File

@@ -737,78 +737,6 @@
"title": "App/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
},
"method": {
"enum": [
"device/key/create"
],
"title": "Device/key/createRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/v2/DeviceKeyCreateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/createRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
},
"method": {
"enum": [
"device/key/public"
],
"title": "Device/key/publicRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/v2/DeviceKeyPublicParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/publicRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/v2/RequestId"
},
"method": {
"enum": [
"device/key/sign"
],
"title": "Device/key/signRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/v2/DeviceKeySignParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/signRequest",
"type": "object"
},
{
"properties": {
"id": {
@@ -7396,300 +7324,6 @@
"title": "DeprecationNoticeNotification",
"type": "object"
},
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
},
"DeviceKeyCreateParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Create a controller-local device key with a random key id.",
"properties": {
"accountUserId": {
"type": "string"
},
"clientId": {
"type": "string"
},
"protectionPolicy": {
"anyOf": [
{
"$ref": "#/definitions/v2/DeviceKeyProtectionPolicy"
},
{
"type": "null"
}
],
"description": "Defaults to `hardware_only` when omitted."
}
},
"required": [
"accountUserId",
"clientId"
],
"title": "DeviceKeyCreateParams",
"type": "object"
},
"DeviceKeyCreateResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Device-key metadata and public key returned by create/public APIs.",
"properties": {
"algorithm": {
"$ref": "#/definitions/v2/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/v2/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyCreateResponse",
"type": "object"
},
"DeviceKeyProtectionClass": {
"description": "Platform protection class for a controller-local device key.",
"enum": [
"hardware_secure_enclave",
"hardware_tpm",
"os_protected_nonextractable"
],
"type": "string"
},
"DeviceKeyProtectionPolicy": {
"description": "Protection policy for creating or loading a controller-local device key.",
"enum": [
"hardware_only",
"allow_os_protected_nonextractable"
],
"type": "string"
},
"DeviceKeyPublicParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Fetch a controller-local device key public key by id.",
"properties": {
"keyId": {
"type": "string"
}
},
"required": [
"keyId"
],
"title": "DeviceKeyPublicParams",
"type": "object"
},
"DeviceKeyPublicResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Device-key public metadata returned by `device/key/public`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/v2/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/v2/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyPublicResponse",
"type": "object"
},
"DeviceKeySignParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Sign an accepted structured payload with a controller-local device key.",
"properties": {
"keyId": {
"type": "string"
},
"payload": {
"$ref": "#/definitions/v2/DeviceKeySignPayload"
}
},
"required": [
"keyId",
"payload"
],
"title": "DeviceKeySignParams",
"type": "object"
},
"DeviceKeySignPayload": {
"description": "Structured payloads accepted by `device/key/sign`.",
"oneOf": [
{
"description": "Payload bound to one remote-control controller websocket `/client` connection challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/v2/RemoteControlClientConnectionAudience"
},
"clientId": {
"type": "string"
},
"nonce": {
"type": "string"
},
"scopes": {
"description": "Must contain exactly `remote_control_controller_websocket`.",
"items": {
"type": "string"
},
"type": "array"
},
"sessionId": {
"description": "Backend-issued websocket session id that this proof authorizes.",
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "Websocket route path that this proof authorizes.",
"type": "string"
},
"tokenExpiresAt": {
"description": "Remote-control token expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"tokenSha256Base64url": {
"description": "SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientConnection"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"clientId",
"nonce",
"scopes",
"sessionId",
"targetOrigin",
"targetPath",
"tokenExpiresAt",
"tokenSha256Base64url",
"type"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayload",
"type": "object"
},
{
"description": "Payload bound to a remote-control client `/client/enroll` ownership challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/v2/RemoteControlClientEnrollmentAudience"
},
"challengeExpiresAt": {
"description": "Enrollment challenge expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"challengeId": {
"description": "Backend-issued enrollment challenge id that this proof authorizes.",
"type": "string"
},
"clientId": {
"type": "string"
},
"deviceIdentitySha256Base64url": {
"description": "SHA-256 of the requested device identity operation, encoded as unpadded base64url.",
"type": "string"
},
"nonce": {
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "HTTP route path that this proof authorizes.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientEnrollment"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"challengeExpiresAt",
"challengeId",
"clientId",
"deviceIdentitySha256Base64url",
"nonce",
"targetOrigin",
"targetPath",
"type"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayload",
"type": "object"
}
]
},
"DeviceKeySignResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "ASN.1 DER signature returned by `device/key/sign`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/v2/DeviceKeyAlgorithm"
},
"signatureDerBase64": {
"description": "ECDSA signature DER encoded as base64.",
"type": "string"
},
"signedPayloadBase64": {
"description": "Exact bytes signed by the device key, encoded as base64. Verifiers must verify this byte string directly and must not reserialize `payload`.",
"type": "string"
}
},
"required": [
"algorithm",
"signatureDerBase64",
"signedPayloadBase64"
],
"title": "DeviceKeySignResponse",
"type": "object"
},
"DynamicToolCallOutputContentItem": {
"oneOf": [
{
@@ -12008,20 +11642,6 @@
"title": "ReasoningTextDeltaNotification",
"type": "object"
},
"RemoteControlClientConnectionAudience": {
"description": "Audience for a remote-control client connection device-key proof.",
"enum": [
"remote_control_client_websocket"
],
"type": "string"
},
"RemoteControlClientEnrollmentAudience": {
"description": "Audience for a remote-control client enrollment device-key proof.",
"enum": [
"remote_control_client_enrollment"
],
"type": "string"
},
"RequestId": {
"anyOf": [
{
@@ -14479,8 +14099,8 @@
},
"namespace": {
"type": [
"string",
"null"
"null",
"string"
]
},
"status": {
@@ -16992,4 +16612,4 @@
},
"title": "CodexAppServerProtocol",
"type": "object"
}
}

View File

@@ -1418,78 +1418,6 @@
"title": "App/listRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/create"
],
"title": "Device/key/createRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeyCreateParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/createRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/public"
],
"title": "Device/key/publicRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeyPublicParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/publicRequest",
"type": "object"
},
{
"properties": {
"id": {
"$ref": "#/definitions/RequestId"
},
"method": {
"enum": [
"device/key/sign"
],
"title": "Device/key/signRequestMethod",
"type": "string"
},
"params": {
"$ref": "#/definitions/DeviceKeySignParams"
}
},
"required": [
"id",
"method",
"params"
],
"title": "Device/key/signRequest",
"type": "object"
},
{
"properties": {
"id": {
@@ -4007,300 +3935,6 @@
"title": "DeprecationNoticeNotification",
"type": "object"
},
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
},
"DeviceKeyCreateParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Create a controller-local device key with a random key id.",
"properties": {
"accountUserId": {
"type": "string"
},
"clientId": {
"type": "string"
},
"protectionPolicy": {
"anyOf": [
{
"$ref": "#/definitions/DeviceKeyProtectionPolicy"
},
{
"type": "null"
}
],
"description": "Defaults to `hardware_only` when omitted."
}
},
"required": [
"accountUserId",
"clientId"
],
"title": "DeviceKeyCreateParams",
"type": "object"
},
"DeviceKeyCreateResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Device-key metadata and public key returned by create/public APIs.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyCreateResponse",
"type": "object"
},
"DeviceKeyProtectionClass": {
"description": "Platform protection class for a controller-local device key.",
"enum": [
"hardware_secure_enclave",
"hardware_tpm",
"os_protected_nonextractable"
],
"type": "string"
},
"DeviceKeyProtectionPolicy": {
"description": "Protection policy for creating or loading a controller-local device key.",
"enum": [
"hardware_only",
"allow_os_protected_nonextractable"
],
"type": "string"
},
"DeviceKeyPublicParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Fetch a controller-local device key public key by id.",
"properties": {
"keyId": {
"type": "string"
}
},
"required": [
"keyId"
],
"title": "DeviceKeyPublicParams",
"type": "object"
},
"DeviceKeyPublicResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Device-key public metadata returned by `device/key/public`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyPublicResponse",
"type": "object"
},
"DeviceKeySignParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Sign an accepted structured payload with a controller-local device key.",
"properties": {
"keyId": {
"type": "string"
},
"payload": {
"$ref": "#/definitions/DeviceKeySignPayload"
}
},
"required": [
"keyId",
"payload"
],
"title": "DeviceKeySignParams",
"type": "object"
},
"DeviceKeySignPayload": {
"description": "Structured payloads accepted by `device/key/sign`.",
"oneOf": [
{
"description": "Payload bound to one remote-control controller websocket `/client` connection challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientConnectionAudience"
},
"clientId": {
"type": "string"
},
"nonce": {
"type": "string"
},
"scopes": {
"description": "Must contain exactly `remote_control_controller_websocket`.",
"items": {
"type": "string"
},
"type": "array"
},
"sessionId": {
"description": "Backend-issued websocket session id that this proof authorizes.",
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "Websocket route path that this proof authorizes.",
"type": "string"
},
"tokenExpiresAt": {
"description": "Remote-control token expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"tokenSha256Base64url": {
"description": "SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientConnection"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"clientId",
"nonce",
"scopes",
"sessionId",
"targetOrigin",
"targetPath",
"tokenExpiresAt",
"tokenSha256Base64url",
"type"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayload",
"type": "object"
},
{
"description": "Payload bound to a remote-control client `/client/enroll` ownership challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientEnrollmentAudience"
},
"challengeExpiresAt": {
"description": "Enrollment challenge expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"challengeId": {
"description": "Backend-issued enrollment challenge id that this proof authorizes.",
"type": "string"
},
"clientId": {
"type": "string"
},
"deviceIdentitySha256Base64url": {
"description": "SHA-256 of the requested device identity operation, encoded as unpadded base64url.",
"type": "string"
},
"nonce": {
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "HTTP route path that this proof authorizes.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientEnrollment"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"challengeExpiresAt",
"challengeId",
"clientId",
"deviceIdentitySha256Base64url",
"nonce",
"targetOrigin",
"targetPath",
"type"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayload",
"type": "object"
}
]
},
"DeviceKeySignResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "ASN.1 DER signature returned by `device/key/sign`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"signatureDerBase64": {
"description": "ECDSA signature DER encoded as base64.",
"type": "string"
},
"signedPayloadBase64": {
"description": "Exact bytes signed by the device key, encoded as base64. Verifiers must verify this byte string directly and must not reserialize `payload`.",
"type": "string"
}
},
"required": [
"algorithm",
"signatureDerBase64",
"signedPayloadBase64"
],
"title": "DeviceKeySignResponse",
"type": "object"
},
"DynamicToolCallOutputContentItem": {
"oneOf": [
{
@@ -8774,20 +8408,6 @@
"title": "ReasoningTextDeltaNotification",
"type": "object"
},
"RemoteControlClientConnectionAudience": {
"description": "Audience for a remote-control client connection device-key proof.",
"enum": [
"remote_control_client_websocket"
],
"type": "string"
},
"RemoteControlClientEnrollmentAudience": {
"description": "Audience for a remote-control client enrollment device-key proof.",
"enum": [
"remote_control_client_enrollment"
],
"type": "string"
},
"RequestId": {
"anyOf": [
{

View File

@@ -1,39 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyProtectionPolicy": {
"description": "Protection policy for creating or loading a controller-local device key.",
"enum": [
"hardware_only",
"allow_os_protected_nonextractable"
],
"type": "string"
}
},
"description": "Create a controller-local device key with a random key id.",
"properties": {
"accountUserId": {
"type": "string"
},
"clientId": {
"type": "string"
},
"protectionPolicy": {
"anyOf": [
{
"$ref": "#/definitions/DeviceKeyProtectionPolicy"
},
{
"type": "null"
}
],
"description": "Defaults to `hardware_only` when omitted."
}
},
"required": [
"accountUserId",
"clientId"
],
"title": "DeviceKeyCreateParams",
"type": "object"
}

View File

@@ -1,45 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
},
"DeviceKeyProtectionClass": {
"description": "Platform protection class for a controller-local device key.",
"enum": [
"hardware_secure_enclave",
"hardware_tpm",
"os_protected_nonextractable"
],
"type": "string"
}
},
"description": "Device-key metadata and public key returned by create/public APIs.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyCreateResponse",
"type": "object"
}

View File

@@ -1,14 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Fetch a controller-local device key public key by id.",
"properties": {
"keyId": {
"type": "string"
}
},
"required": [
"keyId"
],
"title": "DeviceKeyPublicParams",
"type": "object"
}

View File

@@ -1,45 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
},
"DeviceKeyProtectionClass": {
"description": "Platform protection class for a controller-local device key.",
"enum": [
"hardware_secure_enclave",
"hardware_tpm",
"os_protected_nonextractable"
],
"type": "string"
}
},
"description": "Device-key public metadata returned by `device/key/public`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"keyId": {
"type": "string"
},
"protectionClass": {
"$ref": "#/definitions/DeviceKeyProtectionClass"
},
"publicKeySpkiDerBase64": {
"description": "SubjectPublicKeyInfo DER encoded as base64.",
"type": "string"
}
},
"required": [
"algorithm",
"keyId",
"protectionClass",
"publicKeySpkiDerBase64"
],
"title": "DeviceKeyPublicResponse",
"type": "object"
}

View File

@@ -1,165 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeySignPayload": {
"description": "Structured payloads accepted by `device/key/sign`.",
"oneOf": [
{
"description": "Payload bound to one remote-control controller websocket `/client` connection challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientConnectionAudience"
},
"clientId": {
"type": "string"
},
"nonce": {
"type": "string"
},
"scopes": {
"description": "Must contain exactly `remote_control_controller_websocket`.",
"items": {
"type": "string"
},
"type": "array"
},
"sessionId": {
"description": "Backend-issued websocket session id that this proof authorizes.",
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "Websocket route path that this proof authorizes.",
"type": "string"
},
"tokenExpiresAt": {
"description": "Remote-control token expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"tokenSha256Base64url": {
"description": "SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientConnection"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"clientId",
"nonce",
"scopes",
"sessionId",
"targetOrigin",
"targetPath",
"tokenExpiresAt",
"tokenSha256Base64url",
"type"
],
"title": "RemoteControlClientConnectionDeviceKeySignPayload",
"type": "object"
},
{
"description": "Payload bound to a remote-control client `/client/enroll` ownership challenge.",
"properties": {
"accountUserId": {
"type": "string"
},
"audience": {
"$ref": "#/definitions/RemoteControlClientEnrollmentAudience"
},
"challengeExpiresAt": {
"description": "Enrollment challenge expiration as Unix seconds.",
"format": "int64",
"type": "integer"
},
"challengeId": {
"description": "Backend-issued enrollment challenge id that this proof authorizes.",
"type": "string"
},
"clientId": {
"type": "string"
},
"deviceIdentitySha256Base64url": {
"description": "SHA-256 of the requested device identity operation, encoded as unpadded base64url.",
"type": "string"
},
"nonce": {
"type": "string"
},
"targetOrigin": {
"description": "Origin of the backend endpoint that issued the challenge and will verify this proof.",
"type": "string"
},
"targetPath": {
"description": "HTTP route path that this proof authorizes.",
"type": "string"
},
"type": {
"enum": [
"remoteControlClientEnrollment"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayloadType",
"type": "string"
}
},
"required": [
"accountUserId",
"audience",
"challengeExpiresAt",
"challengeId",
"clientId",
"deviceIdentitySha256Base64url",
"nonce",
"targetOrigin",
"targetPath",
"type"
],
"title": "RemoteControlClientEnrollmentDeviceKeySignPayload",
"type": "object"
}
]
},
"RemoteControlClientConnectionAudience": {
"description": "Audience for a remote-control client connection device-key proof.",
"enum": [
"remote_control_client_websocket"
],
"type": "string"
},
"RemoteControlClientEnrollmentAudience": {
"description": "Audience for a remote-control client enrollment device-key proof.",
"enum": [
"remote_control_client_enrollment"
],
"type": "string"
}
},
"description": "Sign an accepted structured payload with a controller-local device key.",
"properties": {
"keyId": {
"type": "string"
},
"payload": {
"$ref": "#/definitions/DeviceKeySignPayload"
}
},
"required": [
"keyId",
"payload"
],
"title": "DeviceKeySignParams",
"type": "object"
}

View File

@@ -1,33 +0,0 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"DeviceKeyAlgorithm": {
"description": "Device-key algorithm reported at enrollment and signing boundaries.",
"enum": [
"ecdsa_p256_sha256"
],
"type": "string"
}
},
"description": "ASN.1 DER signature returned by `device/key/sign`.",
"properties": {
"algorithm": {
"$ref": "#/definitions/DeviceKeyAlgorithm"
},
"signatureDerBase64": {
"description": "ECDSA signature DER encoded as base64.",
"type": "string"
},
"signedPayloadBase64": {
"description": "Exact bytes signed by the device key, encoded as base64. Verifiers must verify this byte string directly and must not reserialize `payload`.",
"type": "string"
}
},
"required": [
"algorithm",
"signatureDerBase64",
"signedPayloadBase64"
],
"title": "DeviceKeySignResponse",
"type": "object"
}

File diff suppressed because one or more lines are too long

View File

@@ -1,8 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Device-key algorithm reported at enrollment and signing boundaries.
*/
export type DeviceKeyAlgorithm = "ecdsa_p256_sha256";

View File

@@ -1,13 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DeviceKeyProtectionPolicy } from "./DeviceKeyProtectionPolicy";
/**
* Create a controller-local device key with a random key id.
*/
export type DeviceKeyCreateParams = {
/**
* Defaults to `hardware_only` when omitted.
*/
protectionPolicy?: DeviceKeyProtectionPolicy | null, accountUserId: string, clientId: string, };

View File

@@ -1,14 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DeviceKeyAlgorithm } from "./DeviceKeyAlgorithm";
import type { DeviceKeyProtectionClass } from "./DeviceKeyProtectionClass";
/**
* Device-key metadata and public key returned by create/public APIs.
*/
export type DeviceKeyCreateResponse = { keyId: string,
/**
* SubjectPublicKeyInfo DER encoded as base64.
*/
publicKeySpkiDerBase64: string, algorithm: DeviceKeyAlgorithm, protectionClass: DeviceKeyProtectionClass, };

View File

@@ -1,8 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Platform protection class for a controller-local device key.
*/
export type DeviceKeyProtectionClass = "hardware_secure_enclave" | "hardware_tpm" | "os_protected_nonextractable";

View File

@@ -1,8 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Protection policy for creating or loading a controller-local device key.
*/
export type DeviceKeyProtectionPolicy = "hardware_only" | "allow_os_protected_nonextractable";

View File

@@ -1,8 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Fetch a controller-local device key public key by id.
*/
export type DeviceKeyPublicParams = { keyId: string, };

View File

@@ -1,14 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DeviceKeyAlgorithm } from "./DeviceKeyAlgorithm";
import type { DeviceKeyProtectionClass } from "./DeviceKeyProtectionClass";
/**
* Device-key public metadata returned by `device/key/public`.
*/
export type DeviceKeyPublicResponse = { keyId: string,
/**
* SubjectPublicKeyInfo DER encoded as base64.
*/
publicKeySpkiDerBase64: string, algorithm: DeviceKeyAlgorithm, protectionClass: DeviceKeyProtectionClass, };

View File

@@ -1,9 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DeviceKeySignPayload } from "./DeviceKeySignPayload";
/**
* Sign an accepted structured payload with a controller-local device key.
*/
export type DeviceKeySignParams = { keyId: string, payload: DeviceKeySignPayload, };

View File

@@ -1,54 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { RemoteControlClientConnectionAudience } from "./RemoteControlClientConnectionAudience";
import type { RemoteControlClientEnrollmentAudience } from "./RemoteControlClientEnrollmentAudience";
/**
* Structured payloads accepted by `device/key/sign`.
*/
export type DeviceKeySignPayload = { "type": "remoteControlClientConnection", nonce: string, audience: RemoteControlClientConnectionAudience,
/**
* Backend-issued websocket session id that this proof authorizes.
*/
sessionId: string,
/**
* Origin of the backend endpoint that issued the challenge and will verify this proof.
*/
targetOrigin: string,
/**
* Websocket route path that this proof authorizes.
*/
targetPath: string, accountUserId: string, clientId: string,
/**
* Remote-control token expiration as Unix seconds.
*/
tokenExpiresAt: number,
/**
* SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.
*/
tokenSha256Base64url: string,
/**
* Must contain exactly `remote_control_controller_websocket`.
*/
scopes: Array<string>, } | { "type": "remoteControlClientEnrollment", nonce: string, audience: RemoteControlClientEnrollmentAudience,
/**
* Backend-issued enrollment challenge id that this proof authorizes.
*/
challengeId: string,
/**
* Origin of the backend endpoint that issued the challenge and will verify this proof.
*/
targetOrigin: string,
/**
* HTTP route path that this proof authorizes.
*/
targetPath: string, accountUserId: string, clientId: string,
/**
* SHA-256 of the requested device identity operation, encoded as unpadded base64url.
*/
deviceIdentitySha256Base64url: string,
/**
* Enrollment challenge expiration as Unix seconds.
*/
challengeExpiresAt: number, };

View File

@@ -1,18 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DeviceKeyAlgorithm } from "./DeviceKeyAlgorithm";
/**
* ASN.1 DER signature returned by `device/key/sign`.
*/
export type DeviceKeySignResponse = {
/**
* ECDSA signature DER encoded as base64.
*/
signatureDerBase64: string,
/**
* Exact bytes signed by the device key, encoded as base64. Verifiers must verify this byte
* string directly and must not reserialize `payload`.
*/
signedPayloadBase64: string, algorithm: DeviceKeyAlgorithm, };

View File

@@ -1,8 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Audience for a remote-control client connection device-key proof.
*/
export type RemoteControlClientConnectionAudience = "remote_control_client_websocket";

View File

@@ -1,8 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
/**
* Audience for a remote-control client enrollment device-key proof.
*/
export type RemoteControlClientEnrollmentAudience = "remote_control_client_enrollment";

View File

@@ -74,16 +74,6 @@ export type { ConfigWriteResponse } from "./ConfigWriteResponse";
export type { ContextCompactedNotification } from "./ContextCompactedNotification";
export type { CreditsSnapshot } from "./CreditsSnapshot";
export type { DeprecationNoticeNotification } from "./DeprecationNoticeNotification";
export type { DeviceKeyAlgorithm } from "./DeviceKeyAlgorithm";
export type { DeviceKeyCreateParams } from "./DeviceKeyCreateParams";
export type { DeviceKeyCreateResponse } from "./DeviceKeyCreateResponse";
export type { DeviceKeyProtectionClass } from "./DeviceKeyProtectionClass";
export type { DeviceKeyProtectionPolicy } from "./DeviceKeyProtectionPolicy";
export type { DeviceKeyPublicParams } from "./DeviceKeyPublicParams";
export type { DeviceKeyPublicResponse } from "./DeviceKeyPublicResponse";
export type { DeviceKeySignParams } from "./DeviceKeySignParams";
export type { DeviceKeySignPayload } from "./DeviceKeySignPayload";
export type { DeviceKeySignResponse } from "./DeviceKeySignResponse";
export type { DynamicToolCallOutputContentItem } from "./DynamicToolCallOutputContentItem";
export type { DynamicToolCallParams } from "./DynamicToolCallParams";
export type { DynamicToolCallResponse } from "./DynamicToolCallResponse";
@@ -269,8 +259,6 @@ export type { ReasoningEffortOption } from "./ReasoningEffortOption";
export type { ReasoningSummaryPartAddedNotification } from "./ReasoningSummaryPartAddedNotification";
export type { ReasoningSummaryTextDeltaNotification } from "./ReasoningSummaryTextDeltaNotification";
export type { ReasoningTextDeltaNotification } from "./ReasoningTextDeltaNotification";
export type { RemoteControlClientConnectionAudience } from "./RemoteControlClientConnectionAudience";
export type { RemoteControlClientEnrollmentAudience } from "./RemoteControlClientEnrollmentAudience";
export type { RequestPermissionProfile } from "./RequestPermissionProfile";
export type { ResidencyRequirement } from "./ResidencyRequirement";
export type { ReviewDelivery } from "./ReviewDelivery";

View File

@@ -360,18 +360,6 @@ client_request_definitions! {
params: v2::AppsListParams,
response: v2::AppsListResponse,
},
DeviceKeyCreate => "device/key/create" {
params: v2::DeviceKeyCreateParams,
response: v2::DeviceKeyCreateResponse,
},
DeviceKeyPublic => "device/key/public" {
params: v2::DeviceKeyPublicParams,
response: v2::DeviceKeyPublicResponse,
},
DeviceKeySign => "device/key/sign" {
params: v2::DeviceKeySignParams,
response: v2::DeviceKeySignResponse,
},
FsReadFile => "fs/readFile" {
params: v2::FsReadFileParams,
response: v2::FsReadFileResponse,

View File

@@ -2494,164 +2494,6 @@ pub struct FeedbackUploadResponse {
pub thread_id: String,
}
/// Device-key algorithm reported at enrollment and signing boundaries.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(rename_all = "snake_case", export_to = "v2/")]
pub enum DeviceKeyAlgorithm {
EcdsaP256Sha256,
}
/// Platform protection class for a controller-local device key.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(rename_all = "snake_case", export_to = "v2/")]
pub enum DeviceKeyProtectionClass {
HardwareSecureEnclave,
HardwareTpm,
OsProtectedNonextractable,
}
/// Protection policy for creating or loading a controller-local device key.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(rename_all = "snake_case", export_to = "v2/")]
pub enum DeviceKeyProtectionPolicy {
HardwareOnly,
AllowOsProtectedNonextractable,
}
/// Create a controller-local device key with a random key id.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct DeviceKeyCreateParams {
/// Defaults to `hardware_only` when omitted.
#[ts(optional = nullable)]
pub protection_policy: Option<DeviceKeyProtectionPolicy>,
pub account_user_id: String,
pub client_id: String,
}
/// Device-key metadata and public key returned by create/public APIs.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct DeviceKeyCreateResponse {
pub key_id: String,
/// SubjectPublicKeyInfo DER encoded as base64.
pub public_key_spki_der_base64: String,
pub algorithm: DeviceKeyAlgorithm,
pub protection_class: DeviceKeyProtectionClass,
}
/// Fetch a controller-local device key public key by id.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct DeviceKeyPublicParams {
pub key_id: String,
}
/// Device-key public metadata returned by `device/key/public`.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct DeviceKeyPublicResponse {
pub key_id: String,
/// SubjectPublicKeyInfo DER encoded as base64.
pub public_key_spki_der_base64: String,
pub algorithm: DeviceKeyAlgorithm,
pub protection_class: DeviceKeyProtectionClass,
}
/// Audience for a remote-control client connection device-key proof.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(rename_all = "snake_case", export_to = "v2/")]
pub enum RemoteControlClientConnectionAudience {
RemoteControlClientWebsocket,
}
/// Audience for a remote-control client enrollment device-key proof.
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "snake_case")]
#[ts(rename_all = "snake_case", export_to = "v2/")]
pub enum RemoteControlClientEnrollmentAudience {
RemoteControlClientEnrollment,
}
/// Structured payloads accepted by `device/key/sign`.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(tag = "type", rename_all = "camelCase")]
#[ts(tag = "type", export_to = "v2/")]
pub enum DeviceKeySignPayload {
/// Payload bound to one remote-control controller websocket `/client` connection challenge.
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
RemoteControlClientConnection {
nonce: String,
audience: RemoteControlClientConnectionAudience,
/// Backend-issued websocket session id that this proof authorizes.
session_id: String,
/// Origin of the backend endpoint that issued the challenge and will verify this proof.
target_origin: String,
/// Websocket route path that this proof authorizes.
target_path: String,
account_user_id: String,
client_id: String,
/// Remote-control token expiration as Unix seconds.
#[ts(type = "number")]
token_expires_at: i64,
/// SHA-256 of the controller-scoped remote-control token, encoded as unpadded base64url.
token_sha256_base64url: String,
/// Must contain exactly `remote_control_controller_websocket`.
scopes: Vec<String>,
},
/// Payload bound to a remote-control client `/client/enroll` ownership challenge.
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
RemoteControlClientEnrollment {
nonce: String,
audience: RemoteControlClientEnrollmentAudience,
/// Backend-issued enrollment challenge id that this proof authorizes.
challenge_id: String,
/// Origin of the backend endpoint that issued the challenge and will verify this proof.
target_origin: String,
/// HTTP route path that this proof authorizes.
target_path: String,
account_user_id: String,
client_id: String,
/// SHA-256 of the requested device identity operation, encoded as unpadded base64url.
device_identity_sha256_base64url: String,
/// Enrollment challenge expiration as Unix seconds.
#[ts(type = "number")]
challenge_expires_at: i64,
},
}
/// Sign an accepted structured payload with a controller-local device key.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct DeviceKeySignParams {
pub key_id: String,
pub payload: DeviceKeySignPayload,
}
/// ASN.1 DER signature returned by `device/key/sign`.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct DeviceKeySignResponse {
/// ECDSA signature DER encoded as base64.
pub signature_der_base64: String,
/// Exact bytes signed by the device key, encoded as base64. Verifiers must verify this byte
/// string directly and must not reserialize `payload`.
pub signed_payload_base64: String,
pub algorithm: DeviceKeyAlgorithm,
}
/// Read a file from the host filesystem.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
@@ -7544,181 +7386,6 @@ mod tests {
assert_eq!(decoded, params);
}
#[test]
fn device_key_create_params_round_trip_uses_protection_policy() {
let params = DeviceKeyCreateParams {
protection_policy: None,
account_user_id: "account-user-1".to_string(),
client_id: "cli_123".to_string(),
};
let value = serde_json::to_value(&params).expect("serialize device/key/create params");
assert_eq!(
value,
json!({
"accountUserId": "account-user-1",
"clientId": "cli_123",
"protectionPolicy": null,
})
);
let decoded = serde_json::from_value::<DeviceKeyCreateParams>(value)
.expect("deserialize device/key/create params");
assert_eq!(decoded, params);
let params = DeviceKeyCreateParams {
protection_policy: Some(DeviceKeyProtectionPolicy::AllowOsProtectedNonextractable),
account_user_id: "account-user-1".to_string(),
client_id: "cli_123".to_string(),
};
let value = serde_json::to_value(&params)
.expect("serialize device/key/create params with protection policy");
assert_eq!(
value,
json!({
"accountUserId": "account-user-1",
"clientId": "cli_123",
"protectionPolicy": "allow_os_protected_nonextractable",
})
);
}
#[test]
fn device_key_create_response_round_trips_protection_class() {
let response = DeviceKeyCreateResponse {
key_id: "dk_123".to_string(),
public_key_spki_der_base64: "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE".to_string(),
algorithm: DeviceKeyAlgorithm::EcdsaP256Sha256,
protection_class: DeviceKeyProtectionClass::OsProtectedNonextractable,
};
let value = serde_json::to_value(&response).expect("serialize device/key/create response");
assert_eq!(
value,
json!({
"keyId": "dk_123",
"publicKeySpkiDerBase64": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE",
"algorithm": "ecdsa_p256_sha256",
"protectionClass": "os_protected_nonextractable",
})
);
let decoded = serde_json::from_value::<DeviceKeyCreateResponse>(value)
.expect("deserialize device/key/create response");
assert_eq!(decoded, response);
}
#[test]
fn device_key_sign_params_round_trip_uses_accepted_payload_enum() {
let params = DeviceKeySignParams {
key_id: "dk_123".to_string(),
payload: DeviceKeySignPayload::RemoteControlClientConnection {
nonce: "nonce-1".to_string(),
audience: RemoteControlClientConnectionAudience::RemoteControlClientWebsocket,
session_id: "wssess_123".to_string(),
target_origin: "https://chatgpt.com".to_string(),
target_path: "/api/codex/remote/control/client".to_string(),
account_user_id: "account-user-1".to_string(),
client_id: "cli_123".to_string(),
token_sha256_base64url: "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU".to_string(),
token_expires_at: 1_700_000_000,
scopes: vec!["remote_control_controller_websocket".to_string()],
},
};
let value = serde_json::to_value(&params).expect("serialize device/key/sign params");
assert_eq!(
value,
json!({
"keyId": "dk_123",
"payload": {
"type": "remoteControlClientConnection",
"nonce": "nonce-1",
"audience": "remote_control_client_websocket",
"sessionId": "wssess_123",
"targetOrigin": "https://chatgpt.com",
"targetPath": "/api/codex/remote/control/client",
"accountUserId": "account-user-1",
"clientId": "cli_123",
"tokenSha256Base64url": "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU",
"tokenExpiresAt": 1_700_000_000,
"scopes": ["remote_control_controller_websocket"],
},
})
);
let decoded = serde_json::from_value::<DeviceKeySignParams>(value)
.expect("deserialize device/key/sign params");
assert_eq!(decoded, params);
}
#[test]
fn device_key_sign_params_round_trip_uses_enrollment_payload() {
let params = DeviceKeySignParams {
key_id: "dk_123".to_string(),
payload: DeviceKeySignPayload::RemoteControlClientEnrollment {
nonce: "nonce-1".to_string(),
audience: RemoteControlClientEnrollmentAudience::RemoteControlClientEnrollment,
challenge_id: "rch_123".to_string(),
target_origin: "https://chatgpt.com".to_string(),
target_path: "/wham/remote/control/client/enroll".to_string(),
account_user_id: "account-user-1".to_string(),
client_id: "cli_123".to_string(),
device_identity_sha256_base64url: "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU"
.to_string(),
challenge_expires_at: 1_700_000_000,
},
};
let value = serde_json::to_value(&params)
.expect("serialize device/key/sign params with enrollment payload");
assert_eq!(
value,
json!({
"keyId": "dk_123",
"payload": {
"type": "remoteControlClientEnrollment",
"nonce": "nonce-1",
"audience": "remote_control_client_enrollment",
"challengeId": "rch_123",
"targetOrigin": "https://chatgpt.com",
"targetPath": "/wham/remote/control/client/enroll",
"accountUserId": "account-user-1",
"clientId": "cli_123",
"deviceIdentitySha256Base64url": "47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU",
"challengeExpiresAt": 1_700_000_000,
},
})
);
let decoded = serde_json::from_value::<DeviceKeySignParams>(value)
.expect("deserialize device/key/sign params with enrollment payload");
assert_eq!(decoded, params);
}
#[test]
fn device_key_sign_response_returns_signed_payload_bytes() {
let response = DeviceKeySignResponse {
signature_der_base64: "MEUCIQD".to_string(),
signed_payload_base64: "eyJkb21haW4iOiJjb2RleA".to_string(),
algorithm: DeviceKeyAlgorithm::EcdsaP256Sha256,
};
let value = serde_json::to_value(&response).expect("serialize device/key/sign response");
assert_eq!(
value,
json!({
"signatureDerBase64": "MEUCIQD",
"signedPayloadBase64": "eyJkb21haW4iOiJjb2RleA",
"algorithm": "ecdsa_p256_sha256",
})
);
let decoded = serde_json::from_value::<DeviceKeySignResponse>(value)
.expect("deserialize device/key/sign response");
assert_eq!(decoded, response);
}
#[test]
fn fs_create_directory_params_round_trip_with_default_recursive() {
let params = FsCreateDirectoryParams {

View File

@@ -1186,11 +1186,6 @@ impl CodexMessageProcessor {
self.command_exec_terminate(to_connection_request_id(request_id), params)
.await;
}
ClientRequest::DeviceKeyCreate { .. }
| ClientRequest::DeviceKeyPublic { .. }
| ClientRequest::DeviceKeySign { .. } => {
warn!("Device key request reached CodexMessageProcessor unexpectedly");
}
ClientRequest::ConfigRead { .. }
| ClientRequest::ConfigValueWrite { .. }
| ClientRequest::ConfigBatchWrite { .. }
@@ -6046,19 +6041,17 @@ impl CodexMessageProcessor {
params: McpServerToolCallParams,
) {
let outgoing = Arc::clone(&self.outgoing);
let thread_id = params.thread_id.clone();
let (_, thread) = match self.load_thread(&thread_id).await {
let (_, thread) = match self.load_thread(&params.thread_id).await {
Ok(thread) => thread,
Err(error) => {
self.outgoing.send_error(request_id, error).await;
return;
}
};
let meta = with_mcp_tool_call_thread_id_meta(params.meta, &thread_id);
tokio::spawn(async move {
let result = thread
.call_mcp_tool(&params.server, &params.tool, params.arguments, meta)
.call_mcp_tool(&params.server, &params.tool, params.arguments, params.meta)
.await;
match result {
Ok(result) => {
@@ -9898,32 +9891,6 @@ fn thread_store_archive_error(operation: &str, err: ThreadStoreError) -> JSONRPC
}
}
const MCP_TOOL_THREAD_ID_META_KEY: &str = "threadId";
fn with_mcp_tool_call_thread_id_meta(
meta: Option<serde_json::Value>,
thread_id: &str,
) -> Option<serde_json::Value> {
match meta {
Some(serde_json::Value::Object(mut map)) => {
map.insert(
MCP_TOOL_THREAD_ID_META_KEY.to_string(),
serde_json::Value::String(thread_id.to_string()),
);
Some(serde_json::Value::Object(map))
}
None => {
let mut map = serde_json::Map::new();
map.insert(
MCP_TOOL_THREAD_ID_META_KEY.to_string(),
serde_json::Value::String(thread_id.to_string()),
);
Some(serde_json::Value::Object(map))
}
other => other,
}
}
fn summary_from_stored_thread(
thread: StoredThread,
fallback_provider: &str,

View File

@@ -1 +0,0 @@
pub(crate) mod external_agent_config;

View File

@@ -1,8 +1,3 @@
use crate::config::external_agent_config::ExternalAgentConfigDetectOptions;
use crate::config::external_agent_config::ExternalAgentConfigMigrationItem as CoreMigrationItem;
use crate::config::external_agent_config::ExternalAgentConfigMigrationItemType as CoreMigrationItemType;
use crate::config::external_agent_config::ExternalAgentConfigService;
use crate::config::external_agent_config::PendingPluginImport;
use crate::error_code::INTERNAL_ERROR_CODE;
use codex_app_server_protocol::ExternalAgentConfigDetectParams;
use codex_app_server_protocol::ExternalAgentConfigDetectResponse;
@@ -12,6 +7,11 @@ use codex_app_server_protocol::ExternalAgentConfigMigrationItemType;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_app_server_protocol::MigrationDetails;
use codex_app_server_protocol::PluginsMigration;
use codex_core::external_agent_config::ExternalAgentConfigDetectOptions;
use codex_core::external_agent_config::ExternalAgentConfigMigrationItem as CoreMigrationItem;
use codex_core::external_agent_config::ExternalAgentConfigMigrationItemType as CoreMigrationItemType;
use codex_core::external_agent_config::ExternalAgentConfigService;
use codex_core::external_agent_config::PendingPluginImport;
use std::io;
use std::path::PathBuf;
@@ -108,12 +108,12 @@ impl ExternalAgentConfigApi {
description: migration_item.description,
cwd: migration_item.cwd,
details: migration_item.details.map(|details| {
crate::config::external_agent_config::MigrationDetails {
codex_core::external_agent_config::MigrationDetails {
plugins: details
.plugins
.into_iter()
.map(|plugin| {
crate::config::external_agent_config::PluginsMigration {
codex_core::external_agent_config::PluginsMigration {
marketplace_name: plugin.marketplace_name,
plugin_names: plugin.plugin_names,
}

View File

@@ -70,7 +70,6 @@ mod app_server_tracing;
mod bespoke_event_handling;
mod codex_message_processor;
mod command_exec;
mod config;
mod config_api;
mod dynamic_tools;
mod error_code;

View File

@@ -83,11 +83,10 @@ url = "{mcp_server_url}/mcp"
)
.await??;
let ThreadStartResponse { thread, .. } = to_response(thread_start_resp)?;
let thread_id = thread.id.clone();
let tool_call_request_id = mcp
.send_mcp_server_tool_call_request(McpServerToolCallParams {
thread_id: thread_id.clone(),
thread_id: thread.id,
server: TEST_SERVER_NAME.to_string(),
tool: TEST_TOOL_NAME.to_string(),
arguments: Some(json!({
@@ -115,7 +114,6 @@ url = "{mcp_server_url}/mcp"
response.structured_content,
Some(json!({
"echoed": "hello from app",
"threadId": thread_id,
}))
);
assert_eq!(response.is_error, Some(false));
@@ -205,7 +203,7 @@ impl ServerHandler for ToolAppsMcpServer {
async fn call_tool(
&self,
request: CallToolRequestParams,
context: RequestContext<RoleServer>,
_context: RequestContext<RoleServer>,
) -> Result<CallToolResult, rmcp::ErrorData> {
assert_eq!(request.name.as_ref(), TEST_TOOL_NAME);
let message = request
@@ -214,19 +212,12 @@ impl ServerHandler for ToolAppsMcpServer {
.and_then(|arguments| arguments.get("message"))
.and_then(|value| value.as_str())
.unwrap_or_default();
let thread_id = context
.meta
.0
.get("threadId")
.and_then(|value| value.as_str())
.unwrap_or_default();
let mut meta = Meta::new();
meta.0.insert("calledBy".to_string(), json!("mcp-app"));
let mut result = CallToolResult::structured(json!({
"echoed": message,
"threadId": thread_id,
}));
result.content = vec![Content::text(format!("echo: {message}"))];
result.meta = Some(meta);

View File

@@ -104,6 +104,7 @@ sha2 = { workspace = true }
shlex = { workspace = true }
similar = { workspace = true }
tempfile = { workspace = true }
test-log = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = [
"io-std",
@@ -165,7 +166,6 @@ opentelemetry_sdk = { workspace = true, features = [
] }
serial_test = { workspace = true }
tempfile = { workspace = true }
test-log = { workspace = true }
tracing-opentelemetry = { workspace = true }
tracing-subscriber = { workspace = true }
tracing-test = { workspace = true, features = ["no-env-filter"] }

View File

@@ -1,18 +1,18 @@
use codex_config::types::PluginConfig;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::plugins::MarketplaceAddRequest;
use codex_core::plugins::PluginId;
use codex_core::plugins::PluginInstallRequest;
use codex_core::plugins::PluginsManager;
use codex_core::plugins::add_marketplace;
use codex_core::plugins::is_local_marketplace_source;
use crate::config::Config;
use crate::config::ConfigBuilder;
use crate::plugins::MarketplaceAddRequest;
use crate::plugins::PluginId;
use crate::plugins::PluginInstallRequest;
use crate::plugins::PluginsManager;
use crate::plugins::add_marketplace;
use crate::plugins::configured_plugins_from_stack;
use crate::plugins::find_marketplace_manifest_path;
use crate::plugins::is_local_marketplace_source;
use crate::plugins::parse_marketplace_source;
use codex_core_plugins::marketplace::MarketplacePluginInstallPolicy;
use codex_core_plugins::marketplace::find_marketplace_manifest_path;
use codex_protocol::protocol::Product;
use serde_json::Value as JsonValue;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::ffi::OsString;
use std::fs;
@@ -29,13 +29,13 @@ const EXTERNAL_OFFICIAL_MARKETPLACE_NAME: &str = "claude-plugins-official";
const EXTERNAL_OFFICIAL_MARKETPLACE_SOURCE: &str = "anthropics/claude-plugins-official";
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct ExternalAgentConfigDetectOptions {
pub struct ExternalAgentConfigDetectOptions {
pub include_home: bool,
pub cwds: Option<Vec<PathBuf>>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ExternalAgentConfigMigrationItemType {
pub enum ExternalAgentConfigMigrationItemType {
Config,
Skills,
AgentsMd,
@@ -44,24 +44,24 @@ pub(crate) enum ExternalAgentConfigMigrationItemType {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct PluginsMigration {
pub struct PluginsMigration {
pub marketplace_name: String,
pub plugin_names: Vec<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct MigrationDetails {
pub struct MigrationDetails {
pub plugins: Vec<PluginsMigration>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct PendingPluginImport {
pub struct PendingPluginImport {
pub cwd: Option<PathBuf>,
pub details: MigrationDetails,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub(crate) struct PluginImportOutcome {
pub struct PluginImportOutcome {
pub succeeded_marketplaces: Vec<String>,
pub succeeded_plugin_ids: Vec<String>,
pub failed_marketplaces: Vec<String>,
@@ -69,7 +69,7 @@ pub(crate) struct PluginImportOutcome {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct ExternalAgentConfigMigrationItem {
pub struct ExternalAgentConfigMigrationItem {
pub item_type: ExternalAgentConfigMigrationItemType,
pub description: String,
pub cwd: Option<PathBuf>,
@@ -77,13 +77,13 @@ pub(crate) struct ExternalAgentConfigMigrationItem {
}
#[derive(Clone)]
pub(crate) struct ExternalAgentConfigService {
pub struct ExternalAgentConfigService {
codex_home: PathBuf,
external_agent_home: PathBuf,
}
impl ExternalAgentConfigService {
pub(crate) fn new(codex_home: PathBuf) -> Self {
pub fn new(codex_home: PathBuf) -> Self {
let external_agent_home = default_external_agent_home();
Self {
codex_home,
@@ -99,7 +99,7 @@ impl ExternalAgentConfigService {
}
}
pub(crate) async fn detect(
pub async fn detect(
&self,
params: ExternalAgentConfigDetectOptions,
) -> io::Result<Vec<ExternalAgentConfigMigrationItem>> {
@@ -119,7 +119,7 @@ impl ExternalAgentConfigService {
Ok(items)
}
pub(crate) async fn import(
pub async fn import(
&self,
migration_items: Vec<ExternalAgentConfigMigrationItem>,
) -> io::Result<Vec<PendingPluginImport>> {
@@ -297,21 +297,10 @@ impl ExternalAgentConfigService {
.await
{
Ok(config) => {
let configured_plugin_ids = config
.config_layer_stack
.get_user_layer()
.and_then(|user_layer| user_layer.config.get("plugins"))
.and_then(|plugins| {
match plugins.clone().try_into::<HashMap<String, PluginConfig>>() {
Ok(plugins) => Some(plugins),
Err(err) => {
tracing::warn!("invalid plugins config: {err}");
None
}
}
})
.map(|plugins| plugins.into_keys().collect::<HashSet<_>>())
.unwrap_or_default();
let configured_plugin_ids =
configured_plugins_from_stack(&config.config_layer_stack)
.into_keys()
.collect::<HashSet<_>>();
let configured_marketplace_plugins = configured_marketplace_plugins(
&config,
&PluginsManager::new(self.codex_home.clone()),
@@ -421,7 +410,7 @@ impl ExternalAgentConfigService {
Ok((local_details, remote_details))
}
pub(crate) async fn import_plugins(
pub async fn import_plugins(
&self,
cwd: Option<&Path>,
details: Option<MigrationDetails>,
@@ -649,7 +638,7 @@ fn extract_plugin_migration_details(
let loadable_marketplaces = collect_marketplace_import_sources(settings, source_root)
.into_iter()
.filter_map(|(marketplace_name, source)| {
is_local_marketplace_source(&source.source, source.ref_name)
parse_marketplace_source(&source.source, source.ref_name)
.ok()
.map(|_| marketplace_name)
})

View File

@@ -17,6 +17,7 @@ use codex_hooks::UserPromptSubmitRequest;
use codex_otel::HOOK_RUN_DURATION_METRIC;
use codex_otel::HOOK_RUN_METRIC;
use codex_protocol::items::TurnItem;
use codex_protocol::items::UserMessageItem;
use codex_protocol::models::DeveloperInstructions;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
@@ -34,6 +35,7 @@ use serde_json::Value;
use crate::event_mapping::parse_turn_item;
use crate::session::session::Session;
use crate::session::turn_context::TurnContext;
use crate::state::PendingTurnInput;
use crate::tools::sandboxing::PermissionRequestPayload;
pub(crate) struct HookRuntimeOutcome {
@@ -46,6 +48,12 @@ pub(crate) enum PendingInputHookDisposition {
Blocked { additional_contexts: Vec<String> },
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(crate) enum PendingInputRecordOutcome {
Recorded,
Blocked,
}
pub(crate) enum PendingInputRecord {
UserMessage {
content: Vec<UserInput>,
@@ -268,6 +276,38 @@ pub(crate) async fn inspect_pending_input(
}
}
pub(crate) async fn inspect_pending_turn_input(
sess: &Arc<Session>,
turn_context: &Arc<TurnContext>,
pending_input_item: PendingTurnInput,
) -> PendingInputHookDisposition {
match pending_input_item {
PendingTurnInput::UserInput(input) => {
let response_item: ResponseItem = ResponseInputItem::from(input.clone()).into();
let user_prompt_submit_outcome = run_user_prompt_submit_hooks(
sess,
turn_context,
UserMessageItem::new(&input).message(),
)
.await;
if user_prompt_submit_outcome.should_stop {
PendingInputHookDisposition::Blocked {
additional_contexts: user_prompt_submit_outcome.additional_contexts,
}
} else {
PendingInputHookDisposition::Accepted(Box::new(PendingInputRecord::UserMessage {
content: input,
response_item,
additional_contexts: user_prompt_submit_outcome.additional_contexts,
}))
}
}
PendingTurnInput::ResponseInputItem(input) => {
inspect_pending_input(sess, turn_context, input).await
}
}
}
pub(crate) async fn record_pending_input(
sess: &Arc<Session>,
turn_context: &Arc<TurnContext>,
@@ -294,6 +334,25 @@ pub(crate) async fn record_pending_input(
}
}
pub(crate) async fn record_pending_turn_input(
sess: &Arc<Session>,
turn_context: &Arc<TurnContext>,
pending_input_item: PendingTurnInput,
) -> PendingInputRecordOutcome {
match inspect_pending_turn_input(sess, turn_context, pending_input_item).await {
PendingInputHookDisposition::Accepted(pending_input) => {
record_pending_input(sess, turn_context, *pending_input).await;
PendingInputRecordOutcome::Recorded
}
PendingInputHookDisposition::Blocked {
additional_contexts,
} => {
record_additional_contexts(sess, turn_context, additional_contexts).await;
PendingInputRecordOutcome::Blocked
}
}
}
async fn run_context_injecting_hook<Fut, Outcome>(
sess: &Arc<Session>,
turn_context: &Arc<TurnContext>,

View File

@@ -32,6 +32,7 @@ mod context_manager;
pub mod exec;
pub mod exec_env;
mod exec_policy;
pub mod external_agent_config;
pub mod file_watcher;
mod flags;
#[cfg(test)]

View File

@@ -477,8 +477,6 @@ async fn execute_mcp_tool_call(
metadata.and_then(|metadata| metadata.openai_file_input_params.as_deref()),
)
.await?;
let request_meta =
with_mcp_tool_call_thread_id_meta(request_meta, &sess.conversation_id.to_string());
let request_meta =
augment_mcp_tool_request_meta_with_sandbox_state(sess, turn_context, server, request_meta)
.await
@@ -662,7 +660,6 @@ pub(crate) struct McpToolApprovalMetadata {
const MCP_TOOL_CODEX_APPS_META_KEY: &str = "_codex_apps";
const MCP_TOOL_OPENAI_OUTPUT_TEMPLATE_META_KEY: &str = "openai/outputTemplate";
const MCP_TOOL_UI_RESOURCE_URI_META_KEY: &str = "ui/resourceUri";
const MCP_TOOL_THREAD_ID_META_KEY: &str = "threadId";
fn custom_mcp_tool_approval_mode(
turn_context: &TurnContext,
@@ -717,30 +714,6 @@ fn build_mcp_tool_call_request_meta(
(!request_meta.is_empty()).then_some(serde_json::Value::Object(request_meta))
}
fn with_mcp_tool_call_thread_id_meta(
meta: Option<serde_json::Value>,
thread_id: &str,
) -> Option<serde_json::Value> {
match meta {
Some(serde_json::Value::Object(mut map)) => {
map.insert(
MCP_TOOL_THREAD_ID_META_KEY.to_string(),
serde_json::Value::String(thread_id.to_string()),
);
Some(serde_json::Value::Object(map))
}
None => {
let mut map = serde_json::Map::new();
map.insert(
MCP_TOOL_THREAD_ID_META_KEY.to_string(),
serde_json::Value::String(thread_id.to_string()),
);
Some(serde_json::Value::Object(map))
}
other => other,
}
}
#[derive(Clone, Copy)]
struct McpToolApprovalPromptOptions {
allow_session_remember: bool,

View File

@@ -651,35 +651,6 @@ async fn codex_apps_tool_call_request_meta_includes_turn_metadata_and_codex_apps
);
}
#[test]
fn mcp_tool_call_thread_id_meta_is_added_to_request_meta() {
assert_eq!(
with_mcp_tool_call_thread_id_meta(
Some(serde_json::json!({
"source": "test-client",
"threadId": "stale-thread",
})),
"thread-live",
),
Some(serde_json::json!({
"source": "test-client",
"threadId": "thread-live",
}))
);
assert_eq!(
with_mcp_tool_call_thread_id_meta(/*meta*/ None, "thread-live"),
Some(serde_json::json!({
"threadId": "thread-live",
}))
);
assert_eq!(
with_mcp_tool_call_thread_id_meta(Some(serde_json::json!("invalid-meta")), "thread-live"),
Some(serde_json::json!("invalid-meta"))
);
}
#[test]
fn accepted_elicitation_content_converts_to_request_user_input_response() {
let response = request_user_input_response_from_elicitation_content(Some(serde_json::json!(

View File

@@ -56,7 +56,7 @@ pub async fn add_marketplace(
.map_err(|err| MarketplaceAddError::Internal(format!("failed to add marketplace: {err}")))?
}
pub fn is_local_marketplace_source(
pub(crate) fn is_local_marketplace_source(
source: &str,
explicit_ref: Option<String>,
) -> Result<bool, MarketplaceAddError> {

View File

@@ -25,6 +25,7 @@ pub use codex_plugin::validate_plugin_segment;
pub type LoadedPlugin = codex_plugin::LoadedPlugin<McpServerConfig>;
pub type PluginLoadOutcome = codex_plugin::PluginLoadOutcome<McpServerConfig>;
pub(crate) use codex_core_plugins::marketplace::find_marketplace_manifest_path;
pub(crate) use discoverable::list_tool_suggest_discoverable_plugins;
pub(crate) use injection::build_plugin_injections;
pub use installed_marketplaces::INSTALLED_MARKETPLACES_DIR;
@@ -45,11 +46,13 @@ pub use manager::PluginRemoteSyncError;
pub use manager::PluginUninstallError;
pub use manager::PluginsManager;
pub use manager::RemotePluginSyncResult;
pub(crate) use manager::configured_plugins_from_stack;
pub use marketplace_add::MarketplaceAddError;
pub use marketplace_add::MarketplaceAddOutcome;
pub use marketplace_add::MarketplaceAddRequest;
pub use marketplace_add::add_marketplace;
pub use marketplace_add::is_local_marketplace_source;
pub(crate) use marketplace_add::is_local_marketplace_source;
pub(crate) use marketplace_add::parse_marketplace_source;
pub use marketplace_remove::MarketplaceRemoveError;
pub use marketplace_remove::MarketplaceRemoveOutcome;
pub use marketplace_remove::MarketplaceRemoveRequest;

View File

@@ -252,6 +252,8 @@ use crate::agents_md::AgentsMdManager;
use crate::context::UserInstructions;
use crate::exec_policy::ExecPolicyUpdateError;
use crate::guardian::GuardianReviewSessionManager;
use crate::hook_runtime::PendingInputRecordOutcome;
use crate::hook_runtime::record_pending_turn_input;
use crate::mcp::McpManager;
use crate::memories;
use crate::network_policy_decision::execpolicy_network_rule_amendment;
@@ -269,6 +271,7 @@ use crate::skills_watcher::SkillsWatcher;
use crate::skills_watcher::SkillsWatcherEvent;
use crate::state::ActiveTurn;
use crate::state::MailboxDeliveryPhase;
use crate::state::PendingTurnInput;
use crate::state::SessionServices;
use crate::state::SessionState;
#[cfg(test)]
@@ -2888,7 +2891,7 @@ impl Session {
}
let mut turn_state = active_turn.turn_state.lock().await;
turn_state.push_pending_input(input.into());
turn_state.push_pending_input(input);
turn_state.accept_mailbox_delivery_for_current_turn();
Ok(active_turn_id.clone())
}
@@ -2975,12 +2978,18 @@ impl Session {
clippy::await_holding_invalid_type,
reason = "active turn checks and turn state updates must remain atomic"
)]
#[cfg(test)]
pub async fn prepend_pending_input(&self, input: Vec<ResponseInputItem>) -> Result<(), ()> {
let mut active = self.active_turn.lock().await;
match active.as_mut() {
Some(at) => {
let mut ts = at.turn_state.lock().await;
ts.prepend_pending_input(input);
ts.prepend_pending_input(
input
.into_iter()
.map(PendingTurnInput::ResponseInputItem)
.collect(),
);
Ok(())
}
None => Err(()),
@@ -2991,7 +3000,7 @@ impl Session {
clippy::await_holding_invalid_type,
reason = "active turn checks and turn state updates must remain atomic"
)]
pub async fn get_pending_input(&self) -> Vec<ResponseInputItem> {
pub(crate) async fn get_pending_turn_input(&self) -> Vec<PendingTurnInput> {
let (pending_input, accepts_mailbox_delivery) = {
let mut active = self.active_turn.lock().await;
match active.as_mut() {
@@ -3013,7 +3022,7 @@ impl Session {
mailbox_rx
.drain()
.into_iter()
.map(|mail| mail.to_response_input_item())
.map(|mail| PendingTurnInput::ResponseInputItem(mail.to_response_input_item()))
.collect::<Vec<_>>()
};
if pending_input.is_empty() {
@@ -3027,6 +3036,73 @@ impl Session {
}
}
#[cfg(test)]
pub async fn get_pending_input(&self) -> Vec<ResponseInputItem> {
self.get_pending_turn_input()
.await
.into_iter()
.map(ResponseInputItem::from)
.collect()
}
async fn fill_pending_input_from_mailbox_if_empty(
&self,
turn_state: &Arc<Mutex<crate::state::TurnState>>,
) {
let should_drain_mailbox = {
let ts = turn_state.lock().await;
!ts.has_pending_input() && ts.accepts_mailbox_delivery_for_current_turn()
};
if !should_drain_mailbox {
return;
}
let mailbox_items = {
let mut mailbox_rx = self.mailbox_rx.lock().await;
mailbox_rx
.drain()
.into_iter()
.map(|mail| PendingTurnInput::ResponseInputItem(mail.to_response_input_item()))
.collect::<Vec<_>>()
};
if mailbox_items.is_empty() {
return;
}
let mut ts = turn_state.lock().await;
for item in mailbox_items {
ts.push_pending_input(item);
}
}
#[expect(
clippy::await_holding_invalid_type,
reason = "pending transcript input must stay queued until hooks and history writes finish"
)]
pub(crate) async fn record_next_pending_turn_input_from_state(
self: &Arc<Self>,
turn_context: &Arc<crate::session::turn_context::TurnContext>,
turn_state: &Arc<Mutex<crate::state::TurnState>>,
) -> Option<PendingInputRecordOutcome> {
self.fill_pending_input_from_mailbox_if_empty(turn_state)
.await;
let mut ts = turn_state.lock().await;
let pending_input = ts.front_pending_input()?;
let outcome = record_pending_turn_input(self, turn_context, pending_input).await;
let _ = ts.pop_front_pending_input();
Some(outcome)
}
pub(crate) async fn record_next_pending_turn_input(
self: &Arc<Self>,
turn_context: &Arc<crate::session::turn_context::TurnContext>,
) -> Option<PendingInputRecordOutcome> {
let turn_state = self.turn_state_for_sub_id(&turn_context.sub_id).await?;
self.record_next_pending_turn_input_from_state(turn_context, &turn_state)
.await
}
/// Queue response items to be injected into the next active turn created for this session.
#[cfg(test)]
pub(crate) async fn queue_response_items_for_next_turn(&self, items: Vec<ResponseInputItem>) {

View File

@@ -310,7 +310,10 @@ async fn interrupting_regular_turn_waiting_on_startup_prewarm_emits_turn_aborted
.await;
sess.spawn_task(
Arc::clone(&tc),
Vec::new(),
vec![UserInput::Text {
text: "first prompt".to_string(),
text_elements: Vec::new(),
}],
crate::tasks::RegularTask::new(),
)
.await;
@@ -326,23 +329,51 @@ async fn interrupting_regular_turn_waiting_on_startup_prewarm_emits_turn_aborted
sess.abort_all_tasks(TurnAbortReason::Interrupted).await;
let second = tokio::time::timeout(std::time::Duration::from_secs(2), rx.recv())
.await
.expect("expected turn aborted event")
.expect("channel open");
let EventMsg::TurnAborted(TurnAbortedEvent {
let aborted = loop {
let event = tokio::time::timeout(std::time::Duration::from_secs(2), rx.recv())
.await
.expect("expected turn aborted event")
.expect("channel open");
if let EventMsg::TurnAborted(event) = event.msg {
break event;
}
};
let TurnAbortedEvent {
turn_id,
reason,
completed_at,
duration_ms,
}) = second.msg
else {
panic!("expected turn aborted event");
};
} = aborted;
assert_eq!(turn_id, Some(tc.sub_id.clone()));
assert_eq!(reason, TurnAbortReason::Interrupted);
assert!(completed_at.is_some());
assert!(duration_ms.is_some());
let history = sess.clone_history().await;
let expected_prompt = user_message("first prompt");
let prompt_idx = history
.raw_items()
.iter()
.position(|item| item == &expected_prompt);
let aborted_idx = history.raw_items().iter().position(|item| {
let ResponseItem::Message { role, content, .. } = item else {
return false;
};
role == "user"
&& content.iter().any(|content_item| {
let ContentItem::InputText { text } = content_item else {
return false;
};
TurnAborted::matches_text(text)
})
});
let (Some(prompt_idx), Some(aborted_idx)) = (prompt_idx, aborted_idx) else {
panic!(
"expected prompt and interrupted-turn marker in history: {:?}",
history.raw_items()
);
};
assert!(prompt_idx < aborted_idx);
}
fn test_model_client_session() -> crate::client::ModelClientSession {

View File

@@ -18,13 +18,9 @@ use crate::compact_remote::run_inline_remote_auto_compact_task;
use crate::connectors;
use crate::context::ContextualUserFragment;
use crate::feedback_tags;
use crate::hook_runtime::PendingInputHookDisposition;
use crate::hook_runtime::PendingInputRecordOutcome;
use crate::hook_runtime::emit_hook_completed_events;
use crate::hook_runtime::inspect_pending_input;
use crate::hook_runtime::record_additional_contexts;
use crate::hook_runtime::record_pending_input;
use crate::hook_runtime::run_pending_session_start_hooks;
use crate::hook_runtime::run_user_prompt_submit_hooks;
use crate::injection::ToolMentionKind;
use crate::injection::app_id_from_path;
use crate::injection::tool_kind_for_path;
@@ -75,7 +71,6 @@ use codex_protocol::error::CodexErr;
use codex_protocol::error::Result as CodexResult;
use codex_protocol::items::PlanItem;
use codex_protocol::items::TurnItem;
use codex_protocol::items::UserMessageItem;
use codex_protocol::items::build_hook_prompt_message;
use codex_protocol::models::BaseInstructions;
use codex_protocol::models::ContentItem;
@@ -300,30 +295,12 @@ pub(crate) async fn run_turn(
if run_pending_session_start_hooks(&sess, &turn_context).await {
return None;
}
let additional_contexts = if input.is_empty() {
Vec::new()
} else {
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input.clone());
let response_item: ResponseItem = initial_input_for_turn.clone().into();
let user_prompt_submit_outcome = run_user_prompt_submit_hooks(
&sess,
&turn_context,
UserMessageItem::new(&input).message(),
)
.await;
if user_prompt_submit_outcome.should_stop {
record_additional_contexts(
&sess,
&turn_context,
user_prompt_submit_outcome.additional_contexts,
)
.await;
return None;
}
sess.record_user_prompt_and_emit_turn_item(turn_context.as_ref(), &input, response_item)
.await;
user_prompt_submit_outcome.additional_contexts
};
if !input.is_empty()
&& sess.record_next_pending_turn_input(&turn_context).await
!= Some(PendingInputRecordOutcome::Recorded)
{
return None;
}
sess.services
.analytics_events_client
.track_app_mentioned(tracking.clone(), mentioned_app_invocations);
@@ -334,7 +311,6 @@ pub(crate) async fn run_turn(
}
sess.merge_connector_selection(explicitly_enabled_connectors.clone())
.await;
record_additional_contexts(&sess, &turn_context, additional_contexts).await;
if !input.is_empty() {
// Track the previous-turn baseline from the regular user-turn path only so
// standalone tasks (compact/shell/review/undo) cannot suppress future
@@ -403,7 +379,7 @@ pub(crate) async fn run_turn(
};
// Pending input is drained into history before building the next model request.
// However, we defer that drain until after sampling in two cases:
// 1. At the start of a turn, so the fresh user prompt in `input` gets sampled first.
// 1. At the start of a turn, so work queued behind the fresh prompt is sampled later.
// 2. After auto-compact, when model/tool continuation needs to resume before any steer.
let mut can_drain_pending_input = input.is_empty();
@@ -412,35 +388,18 @@ pub(crate) async fn run_turn(
break;
}
// Note that pending_input would be something like a message the user
// submitted through the UI while the model was running. Though the UI
// may support this, the model might not.
let pending_input = if can_drain_pending_input {
sess.get_pending_input().await
} else {
Vec::new()
};
let mut blocked_pending_input = false;
let mut blocked_pending_input_contexts = Vec::new();
let mut requeued_pending_input = false;
let mut accepted_pending_input = Vec::new();
if !pending_input.is_empty() {
let mut pending_input_iter = pending_input.into_iter();
while let Some(pending_input_item) = pending_input_iter.next() {
match inspect_pending_input(&sess, &turn_context, pending_input_item).await {
PendingInputHookDisposition::Accepted(pending_input) => {
accepted_pending_input.push(*pending_input);
let mut has_accepted_pending_input = false;
if can_drain_pending_input {
// Note that pending input would be something like a message the user
// submitted through the UI while the model was running. Though the UI
// may support this, the model might not.
while let Some(outcome) = sess.record_next_pending_turn_input(&turn_context).await {
match outcome {
PendingInputRecordOutcome::Recorded => {
has_accepted_pending_input = true;
}
PendingInputHookDisposition::Blocked {
additional_contexts,
} => {
let remaining_pending_input = pending_input_iter.collect::<Vec<_>>();
if !remaining_pending_input.is_empty() {
let _ = sess.prepend_pending_input(remaining_pending_input).await;
requeued_pending_input = true;
}
blocked_pending_input_contexts = additional_contexts;
PendingInputRecordOutcome::Blocked => {
blocked_pending_input = true;
break;
}
@@ -448,14 +407,8 @@ pub(crate) async fn run_turn(
}
}
let has_accepted_pending_input = !accepted_pending_input.is_empty();
for pending_input in accepted_pending_input {
record_pending_input(&sess, &turn_context, pending_input).await;
}
record_additional_contexts(&sess, &turn_context, blocked_pending_input_contexts).await;
if blocked_pending_input && !has_accepted_pending_input {
if requeued_pending_input {
if sess.has_pending_input().await {
continue;
}
break;

View File

@@ -6,6 +6,7 @@ pub(crate) use service::SessionServices;
pub(crate) use session::SessionState;
pub(crate) use turn::ActiveTurn;
pub(crate) use turn::MailboxDeliveryPhase;
pub(crate) use turn::PendingTurnInput;
pub(crate) use turn::RunningTask;
pub(crate) use turn::TaskKind;
pub(crate) use turn::TurnState;

View File

@@ -13,6 +13,7 @@ use codex_protocol::dynamic_tools::DynamicToolResponse;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::request_permissions::RequestPermissionsResponse;
use codex_protocol::request_user_input::RequestUserInputResponse;
use codex_protocol::user_input::UserInput;
use codex_rmcp_client::ElicitationResponse;
use rmcp::model::RequestId;
use tokio::sync::oneshot;
@@ -77,6 +78,38 @@ pub(crate) struct RunningTask {
pub(crate) _timer: Option<codex_otel::Timer>,
}
/// Input queued for ordered transcript recording during an active turn.
///
/// User prompts keep the original `UserInput` so client-visible turn items can
/// preserve UI-only spans such as `text_elements`; model-only response input
/// items can stay in their serialized Responses API form.
#[derive(Clone, Debug, PartialEq)]
pub(crate) enum PendingTurnInput {
UserInput(Vec<UserInput>),
ResponseInputItem(ResponseInputItem),
}
impl From<PendingTurnInput> for ResponseInputItem {
fn from(value: PendingTurnInput) -> Self {
match value {
PendingTurnInput::UserInput(input) => input.into(),
PendingTurnInput::ResponseInputItem(input) => input,
}
}
}
impl From<Vec<UserInput>> for PendingTurnInput {
fn from(value: Vec<UserInput>) -> Self {
Self::UserInput(value)
}
}
impl From<ResponseInputItem> for PendingTurnInput {
fn from(value: ResponseInputItem) -> Self {
Self::ResponseInputItem(value)
}
}
impl ActiveTurn {
pub(crate) fn add_task(&mut self, task: RunningTask) {
let sub_id = task.turn_context.sub_id.clone();
@@ -101,7 +134,7 @@ pub(crate) struct TurnState {
pending_user_input: HashMap<String, oneshot::Sender<RequestUserInputResponse>>,
pending_elicitations: HashMap<(String, RequestId), oneshot::Sender<ElicitationResponse>>,
pending_dynamic_tools: HashMap<String, oneshot::Sender<DynamicToolResponse>>,
pending_input: Vec<ResponseInputItem>,
pending_input: Vec<PendingTurnInput>,
mailbox_delivery_phase: MailboxDeliveryPhase,
granted_permissions: Option<PermissionProfile>,
pub(crate) tool_calls: u64,
@@ -198,11 +231,12 @@ impl TurnState {
self.pending_dynamic_tools.remove(key)
}
pub(crate) fn push_pending_input(&mut self, input: ResponseInputItem) {
self.pending_input.push(input);
pub(crate) fn push_pending_input(&mut self, input: impl Into<PendingTurnInput>) {
self.pending_input.push(input.into());
}
pub(crate) fn prepend_pending_input(&mut self, mut input: Vec<ResponseInputItem>) {
#[cfg(test)]
pub(crate) fn prepend_pending_input(&mut self, mut input: Vec<PendingTurnInput>) {
if input.is_empty() {
return;
}
@@ -211,7 +245,19 @@ impl TurnState {
self.pending_input = input;
}
pub(crate) fn take_pending_input(&mut self) -> Vec<ResponseInputItem> {
pub(crate) fn front_pending_input(&self) -> Option<PendingTurnInput> {
self.pending_input.first().cloned()
}
pub(crate) fn pop_front_pending_input(&mut self) -> Option<PendingTurnInput> {
if self.pending_input.is_empty() {
None
} else {
Some(self.pending_input.remove(0))
}
}
pub(crate) fn take_pending_input(&mut self) -> Vec<PendingTurnInput> {
if self.pending_input.is_empty() {
Vec::with_capacity(0)
} else {

View File

@@ -20,13 +20,11 @@ use tracing::trace;
use tracing::warn;
use crate::context::ContextualUserFragment;
use crate::hook_runtime::PendingInputHookDisposition;
use crate::hook_runtime::inspect_pending_input;
use crate::hook_runtime::record_additional_contexts;
use crate::hook_runtime::record_pending_input;
use crate::hook_runtime::record_pending_turn_input;
use crate::session::session::Session;
use crate::session::turn_context::TurnContext;
use crate::state::ActiveTurn;
use crate::state::PendingTurnInput;
use crate::state::RunningTask;
use crate::state::TaskKind;
use codex_analytics::TurnTokenUsageFact;
@@ -38,7 +36,6 @@ use codex_otel::TURN_MEMORY_METRIC;
use codex_otel::TURN_NETWORK_PROXY_METRIC;
use codex_otel::TURN_TOKEN_USAGE_METRIC;
use codex_otel::TURN_TOOL_CALL_METRIC;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::RolloutItem;
@@ -149,6 +146,11 @@ pub(crate) trait SessionTask: Send + Sync + 'static {
/// Returns the tracing name for a spawned task span.
fn span_name(&self) -> &'static str;
/// Whether the submitted user input should be queued for ordered transcript recording.
fn queues_initial_input(&self) -> bool {
false
}
/// Executes the task until completion or cancellation.
///
/// Implementations typically stream protocol events using `session` and
@@ -186,6 +188,8 @@ pub(crate) trait AnySessionTask: Send + Sync + 'static {
fn span_name(&self) -> &'static str;
fn queues_initial_input(&self) -> bool;
fn run(
self: Arc<Self>,
session: Arc<SessionTaskContext>,
@@ -213,6 +217,10 @@ where
SessionTask::span_name(self)
}
fn queues_initial_input(&self) -> bool {
SessionTask::queues_initial_input(self)
}
fn run(
self: Arc<Self>,
session: Arc<SessionTaskContext>,
@@ -259,6 +267,7 @@ impl Session {
let task: Arc<dyn AnySessionTask> = Arc::new(task);
let task_kind = task.kind();
let span_name = task.span_name();
let queue_initial_input = task.queues_initial_input() && !input.is_empty();
let started_at = Instant::now();
turn_context
.turn_timing_state
@@ -270,7 +279,7 @@ impl Session {
let done = Arc::new(Notify::new());
let queued_response_items = self.take_queued_response_items_for_next_turn().await;
let mailbox_items = self.get_pending_input().await;
let mailbox_items = self.get_pending_turn_input().await;
let turn_state = {
let mut active = self.active_turn.lock().await;
let turn = active.get_or_insert_with(ActiveTurn::default);
@@ -280,6 +289,9 @@ impl Session {
{
let mut turn_state = turn_state.lock().await;
turn_state.token_usage_at_turn_start = token_usage_at_turn_start;
if queue_initial_input {
turn_state.push_pending_input(PendingTurnInput::UserInput(input.clone()));
}
for item in queued_response_items {
turn_state.push_pending_input(item);
}
@@ -398,8 +410,10 @@ impl Session {
pub async fn abort_all_tasks(self: &Arc<Self>, reason: TurnAbortReason) {
if let Some(mut active_turn) = self.take_active_turn().await {
let turn_state = Arc::clone(&active_turn.turn_state);
for task in active_turn.drain_tasks() {
self.handle_task_abort(task, reason.clone()).await;
self.handle_task_abort(task, reason.clone(), Arc::clone(&turn_state))
.await;
}
// Let interrupted tasks observe cancellation before dropping pending approvals, or an
// in-flight approval wait can surface as a model-visible rejection before TurnAborted.
@@ -419,7 +433,7 @@ impl Session {
.turn_metadata_state
.cancel_git_enrichment_task();
let mut pending_input = Vec::<ResponseInputItem>::new();
let mut pending_input = Vec::<PendingTurnInput>::new();
let mut should_clear_active_turn = false;
let mut token_usage_at_turn_start = None;
let mut turn_had_memory_citation = false;
@@ -448,16 +462,7 @@ impl Session {
}
if !pending_input.is_empty() {
for pending_input_item in pending_input {
match inspect_pending_input(self, &turn_context, pending_input_item).await {
PendingInputHookDisposition::Accepted(pending_input) => {
record_pending_input(self, &turn_context, *pending_input).await;
}
PendingInputHookDisposition::Blocked {
additional_contexts,
} => {
record_additional_contexts(self, &turn_context, additional_contexts).await;
}
}
record_pending_turn_input(self, &turn_context, pending_input_item).await;
}
}
// Emit token usage metrics.
@@ -594,7 +599,12 @@ impl Session {
}
}
async fn handle_task_abort(self: &Arc<Self>, task: RunningTask, reason: TurnAbortReason) {
async fn handle_task_abort(
self: &Arc<Self>,
task: RunningTask,
reason: TurnAbortReason,
turn_state: Arc<tokio::sync::Mutex<crate::state::TurnState>>,
) {
let sub_id = task.turn_context.sub_id.clone();
if task.cancellation_token.is_cancelled() {
return;
@@ -624,6 +634,11 @@ impl Session {
if reason == TurnAbortReason::Interrupted {
self.cleanup_after_interrupt(&task.turn_context).await;
while self
.record_next_pending_turn_input_from_state(&task.turn_context, &turn_state)
.await
.is_some()
{}
let marker = interrupted_turn_history_marker();
self.record_into_history(std::slice::from_ref(&marker), task.turn_context.as_ref())

View File

@@ -33,6 +33,10 @@ impl SessionTask for RegularTask {
"session_task.turn"
}
fn queues_initial_input(&self) -> bool {
true
}
async fn run(
self: Arc<Self>,
session: Arc<SessionTaskContext>,

View File

@@ -1,6 +0,0 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "device-key",
crate_name = "codex_device_key",
)

View File

@@ -1,27 +0,0 @@
[package]
name = "codex-device-key"
version.workspace = true
edition.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
p256 = { workspace = true, features = ["ecdsa", "pkcs8"] }
rand = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
url = { workspace = true }
[target.'cfg(windows)'.dependencies]
sha2 = { workspace = true }
windows-sys = { version = "0.52", features = [
"Win32_Foundation",
"Win32_Security_Cryptography",
] }
[dev-dependencies]
pretty_assertions = { workspace = true }

File diff suppressed because it is too large Load Diff

View File

@@ -1,68 +0,0 @@
use crate::DeviceKeyProvider;
use std::sync::Arc;
#[cfg(windows)]
mod windows;
#[cfg(windows)]
pub(crate) fn default_provider() -> Arc<dyn DeviceKeyProvider> {
Arc::new(windows::WindowsDeviceKeyProvider)
}
#[cfg(not(windows))]
pub(crate) fn default_provider() -> Arc<dyn DeviceKeyProvider> {
Arc::new(unsupported::UnsupportedDeviceKeyProvider)
}
#[cfg(not(windows))]
mod unsupported {
use crate::DeviceKeyBinding;
use crate::DeviceKeyError;
use crate::DeviceKeyInfo;
use crate::DeviceKeyProtectionClass;
use crate::DeviceKeyProvider;
use crate::ProviderCreateRequest;
use crate::ProviderSignature;
#[derive(Debug)]
pub(crate) struct UnsupportedDeviceKeyProvider;
impl DeviceKeyProvider for UnsupportedDeviceKeyProvider {
fn create(
&self,
request: ProviderCreateRequest<'_>,
) -> Result<DeviceKeyInfo, DeviceKeyError> {
let _ = request.key_id_for(DeviceKeyProtectionClass::HardwareTpm);
let _ = request
.protection_policy
.allows(DeviceKeyProtectionClass::HardwareTpm);
let _ = request.binding;
Err(DeviceKeyError::HardwareBackedKeysUnavailable)
}
fn get_public(
&self,
_key_id: &str,
_protection_class: DeviceKeyProtectionClass,
) -> Result<DeviceKeyInfo, DeviceKeyError> {
Err(DeviceKeyError::KeyNotFound)
}
fn binding(
&self,
_key_id: &str,
_protection_class: DeviceKeyProtectionClass,
) -> Result<DeviceKeyBinding, DeviceKeyError> {
Err(DeviceKeyError::KeyNotFound)
}
fn sign(
&self,
_key_id: &str,
_protection_class: DeviceKeyProtectionClass,
_payload: &[u8],
) -> Result<ProviderSignature, DeviceKeyError> {
Err(DeviceKeyError::KeyNotFound)
}
}
}

View File

@@ -1,416 +0,0 @@
use crate::DeviceKeyAlgorithm;
use crate::DeviceKeyBinding;
use crate::DeviceKeyError;
use crate::DeviceKeyInfo;
use crate::DeviceKeyProtectionClass;
use crate::DeviceKeyProvider;
use crate::ProviderCreateRequest;
use crate::ProviderSignature;
use crate::sec1_public_key_to_spki_der;
use p256::ecdsa::Signature;
use serde::Deserialize;
use serde::Serialize;
use sha2::Digest;
use sha2::Sha256;
use std::fs;
use std::mem::size_of;
use std::path::PathBuf;
use std::ptr;
use windows_sys::Win32::Foundation::NTE_BAD_KEYSET;
use windows_sys::Win32::Foundation::NTE_EXISTS;
use windows_sys::Win32::Security::Cryptography::BCRYPT_ECCKEY_BLOB;
use windows_sys::Win32::Security::Cryptography::BCRYPT_ECCPUBLIC_BLOB;
use windows_sys::Win32::Security::Cryptography::BCRYPT_ECDSA_PUBLIC_P256_MAGIC;
use windows_sys::Win32::Security::Cryptography::MS_PLATFORM_CRYPTO_PROVIDER;
use windows_sys::Win32::Security::Cryptography::NCRYPT_ECDSA_P256_ALGORITHM;
use windows_sys::Win32::Security::Cryptography::NCRYPT_HANDLE;
use windows_sys::Win32::Security::Cryptography::NCRYPT_KEY_HANDLE;
use windows_sys::Win32::Security::Cryptography::NCRYPT_PROV_HANDLE;
use windows_sys::Win32::Security::Cryptography::NCRYPT_SILENT_FLAG;
use windows_sys::Win32::Security::Cryptography::NCryptCreatePersistedKey;
use windows_sys::Win32::Security::Cryptography::NCryptExportKey;
use windows_sys::Win32::Security::Cryptography::NCryptFinalizeKey;
use windows_sys::Win32::Security::Cryptography::NCryptFreeObject;
use windows_sys::Win32::Security::Cryptography::NCryptOpenKey;
use windows_sys::Win32::Security::Cryptography::NCryptOpenStorageProvider;
use windows_sys::Win32::Security::Cryptography::NCryptSignHash;
use windows_sys::core::HRESULT;
#[derive(Debug)]
pub(crate) struct WindowsDeviceKeyProvider;
impl DeviceKeyProvider for WindowsDeviceKeyProvider {
fn create(&self, request: ProviderCreateRequest<'_>) -> Result<DeviceKeyInfo, DeviceKeyError> {
if !request
.protection_policy
.allows(DeviceKeyProtectionClass::HardwareTpm)
{
return Err(DeviceKeyError::DegradedProtectionNotAllowed {
available: DeviceKeyProtectionClass::HardwareTpm,
});
}
let key_id = request.key_id_for(DeviceKeyProtectionClass::HardwareTpm);
let provider = open_platform_provider()?;
let name = key_name(&key_id);
if let Some(key) = open_key(&provider, &name)? {
let info = key_info(&key_id, &key)?;
store_binding(&key_id, request.binding)?;
return Ok(info);
}
let key = create_or_open_key(&provider, &name)?;
let info = key_info(&key_id, &key)?;
store_binding(&key_id, request.binding)?;
Ok(info)
}
fn get_public(
&self,
key_id: &str,
protection_class: DeviceKeyProtectionClass,
) -> Result<DeviceKeyInfo, DeviceKeyError> {
require_hardware_tpm(protection_class)?;
let provider = open_platform_provider()?;
let key = open_key(&provider, &key_name(key_id))?.ok_or(DeviceKeyError::KeyNotFound)?;
key_info(key_id, &key)
}
fn binding(
&self,
key_id: &str,
protection_class: DeviceKeyProtectionClass,
) -> Result<DeviceKeyBinding, DeviceKeyError> {
require_hardware_tpm(protection_class)?;
load_binding(key_id)
}
fn sign(
&self,
key_id: &str,
protection_class: DeviceKeyProtectionClass,
payload: &[u8],
) -> Result<ProviderSignature, DeviceKeyError> {
require_hardware_tpm(protection_class)?;
let provider = open_platform_provider()?;
let key = open_key(&provider, &key_name(key_id))?.ok_or(DeviceKeyError::KeyNotFound)?;
let digest = Sha256::digest(payload);
let signature = sign_hash(&key, &digest)?;
let signature = Signature::from_slice(&signature)
.map_err(|err| DeviceKeyError::Crypto(err.to_string()))?;
Ok(ProviderSignature {
signature_der: signature.to_der().as_bytes().to_vec(),
algorithm: DeviceKeyAlgorithm::EcdsaP256Sha256,
})
}
}
fn require_hardware_tpm(protection_class: DeviceKeyProtectionClass) -> Result<(), DeviceKeyError> {
if protection_class != DeviceKeyProtectionClass::HardwareTpm {
return Err(DeviceKeyError::KeyNotFound);
}
Ok(())
}
#[derive(Debug)]
struct ProviderHandle(NCRYPT_PROV_HANDLE);
impl Drop for ProviderHandle {
fn drop(&mut self) {
unsafe {
NCryptFreeObject(self.0 as NCRYPT_HANDLE);
}
}
}
#[derive(Debug)]
struct KeyHandle(NCRYPT_KEY_HANDLE);
impl Drop for KeyHandle {
fn drop(&mut self) {
unsafe {
NCryptFreeObject(self.0 as NCRYPT_HANDLE);
}
}
}
fn open_platform_provider() -> Result<ProviderHandle, DeviceKeyError> {
let mut provider = 0;
let status = unsafe {
NCryptOpenStorageProvider(
&mut provider,
MS_PLATFORM_CRYPTO_PROVIDER,
/*dwflags*/ 0,
)
};
if status != 0 {
return Err(DeviceKeyError::HardwareBackedKeysUnavailable);
}
Ok(ProviderHandle(provider))
}
fn open_key(provider: &ProviderHandle, name: &[u16]) -> Result<Option<KeyHandle>, DeviceKeyError> {
let mut key = 0;
let status = unsafe {
NCryptOpenKey(
provider.0,
&mut key,
name.as_ptr(),
/*dwlegacykeyspec*/ 0,
NCRYPT_SILENT_FLAG,
)
};
if status == NTE_BAD_KEYSET {
return Ok(None);
}
if status != 0 {
return Err(DeviceKeyError::Platform(format_hresult(
"NCryptOpenKey",
status,
)));
}
Ok(Some(KeyHandle(key)))
}
fn create_or_open_key(
provider: &ProviderHandle,
name: &[u16],
) -> Result<KeyHandle, DeviceKeyError> {
match create_key(provider, name) {
Ok(key) => Ok(key),
Err(KeyCreationError::AlreadyExists) => {
open_key(provider, name)?.ok_or(DeviceKeyError::KeyNotFound)
}
Err(KeyCreationError::Failed(err)) => Err(err),
}
}
enum KeyCreationError {
AlreadyExists,
Failed(DeviceKeyError),
}
fn create_key(provider: &ProviderHandle, name: &[u16]) -> Result<KeyHandle, KeyCreationError> {
let mut key = 0;
let status = unsafe {
NCryptCreatePersistedKey(
provider.0,
&mut key,
NCRYPT_ECDSA_P256_ALGORITHM,
name.as_ptr(),
/*dwlegacykeyspec*/ 0,
NCRYPT_SILENT_FLAG,
)
};
if status == NTE_EXISTS {
return Err(KeyCreationError::AlreadyExists);
}
if status != 0 {
return Err(KeyCreationError::Failed(DeviceKeyError::Platform(
format_hresult("NCryptCreatePersistedKey", status),
)));
}
let key = KeyHandle(key);
let status = unsafe { NCryptFinalizeKey(key.0, NCRYPT_SILENT_FLAG) };
if status != 0 {
return Err(KeyCreationError::Failed(DeviceKeyError::Platform(
format_hresult("NCryptFinalizeKey", status),
)));
}
Ok(key)
}
fn key_info(key_id: &str, key: &KeyHandle) -> Result<DeviceKeyInfo, DeviceKeyError> {
Ok(DeviceKeyInfo {
key_id: key_id.to_string(),
public_key_spki_der: export_public_key_spki_der(key)?,
algorithm: DeviceKeyAlgorithm::EcdsaP256Sha256,
protection_class: DeviceKeyProtectionClass::HardwareTpm,
})
}
fn export_public_key_spki_der(key: &KeyHandle) -> Result<Vec<u8>, DeviceKeyError> {
let blob = ncrypt_export_key(key, BCRYPT_ECCPUBLIC_BLOB)?;
let header_len = size_of::<BCRYPT_ECCKEY_BLOB>();
if blob.len() < header_len {
return Err(DeviceKeyError::Platform(
"NCryptExportKey returned a truncated ECC public key header".to_string(),
));
}
let header = unsafe { ptr::read_unaligned(blob.as_ptr() as *const BCRYPT_ECCKEY_BLOB) };
if header.dwMagic != BCRYPT_ECDSA_PUBLIC_P256_MAGIC {
return Err(DeviceKeyError::Platform(format!(
"NCryptExportKey returned unsupported ECC public key magic {}",
header.dwMagic
)));
}
let coordinate_len =
usize::try_from(header.cbKey).map_err(|err| DeviceKeyError::Platform(err.to_string()))?;
let expected_len = header_len + coordinate_len * 2;
if blob.len() != expected_len {
return Err(DeviceKeyError::Platform(format!(
"NCryptExportKey returned ECC public key length {}, expected {expected_len}",
blob.len()
)));
}
let mut sec1 = Vec::with_capacity(1 + coordinate_len * 2);
sec1.push(0x04);
sec1.extend_from_slice(&blob[header_len..]);
sec1_public_key_to_spki_der(&sec1)
}
fn sign_hash(key: &KeyHandle, digest: &[u8]) -> Result<Vec<u8>, DeviceKeyError> {
let mut len = 0;
let status = unsafe {
NCryptSignHash(
key.0,
ptr::null(),
digest.as_ptr(),
digest.len() as u32,
ptr::null_mut(),
/*cbsignature*/ 0,
&mut len,
NCRYPT_SILENT_FLAG,
)
};
if status != 0 {
return Err(DeviceKeyError::Platform(format_hresult(
"NCryptSignHash",
status,
)));
}
let mut signature = vec![0; len as usize];
let status = unsafe {
NCryptSignHash(
key.0,
ptr::null(),
digest.as_ptr(),
digest.len() as u32,
signature.as_mut_ptr(),
signature.len() as u32,
&mut len,
NCRYPT_SILENT_FLAG,
)
};
if status != 0 {
return Err(DeviceKeyError::Platform(format_hresult(
"NCryptSignHash",
status,
)));
}
signature.truncate(len as usize);
Ok(signature)
}
fn ncrypt_export_key(key: &KeyHandle, blob_type: *const u16) -> Result<Vec<u8>, DeviceKeyError> {
let mut len = 0;
let status = unsafe {
NCryptExportKey(
key.0,
/*hexportkey*/ 0,
blob_type,
ptr::null(),
ptr::null_mut(),
/*cboutput*/ 0,
&mut len,
NCRYPT_SILENT_FLAG,
)
};
if status != 0 {
return Err(DeviceKeyError::Platform(format_hresult(
"NCryptExportKey",
status,
)));
}
let mut blob = vec![0; len as usize];
let status = unsafe {
NCryptExportKey(
key.0,
/*hexportkey*/ 0,
blob_type,
ptr::null(),
blob.as_mut_ptr(),
blob.len() as u32,
&mut len,
NCRYPT_SILENT_FLAG,
)
};
if status != 0 {
return Err(DeviceKeyError::Platform(format_hresult(
"NCryptExportKey",
status,
)));
}
blob.truncate(len as usize);
Ok(blob)
}
fn key_name(key_id: &str) -> Vec<u16> {
format!("CodexDeviceKey.{key_id}")
.encode_utf16()
.chain(std::iter::once(0))
.collect()
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct StoredBinding {
account_user_id: String,
client_id: String,
}
fn store_binding(key_id: &str, binding: &DeviceKeyBinding) -> Result<(), DeviceKeyError> {
let path = binding_path(key_id)?;
let parent = path
.parent()
.ok_or_else(|| DeviceKeyError::Platform("binding path has no parent".to_string()))?;
fs::create_dir_all(parent).map_err(|err| DeviceKeyError::Platform(err.to_string()))?;
let stored = StoredBinding {
account_user_id: binding.account_user_id.clone(),
client_id: binding.client_id.clone(),
};
let bytes =
serde_json::to_vec(&stored).map_err(|err| DeviceKeyError::Platform(err.to_string()))?;
fs::write(path, bytes).map_err(|err| DeviceKeyError::Platform(err.to_string()))
}
fn load_binding(key_id: &str) -> Result<DeviceKeyBinding, DeviceKeyError> {
let path = binding_path(key_id)?;
let bytes = fs::read(path).map_err(|err| {
if err.kind() == std::io::ErrorKind::NotFound {
DeviceKeyError::KeyNotFound
} else {
DeviceKeyError::Platform(err.to_string())
}
})?;
let stored: StoredBinding =
serde_json::from_slice(&bytes).map_err(|err| DeviceKeyError::Platform(err.to_string()))?;
Ok(DeviceKeyBinding {
account_user_id: stored.account_user_id,
client_id: stored.client_id,
})
}
fn binding_path(key_id: &str) -> Result<PathBuf, DeviceKeyError> {
let data_dir = std::env::var_os("LOCALAPPDATA")
.or_else(|| std::env::var_os("APPDATA"))
.ok_or_else(|| {
DeviceKeyError::Platform("LOCALAPPDATA and APPDATA are not set".to_string())
})?;
Ok(PathBuf::from(data_dir)
.join("OpenAI")
.join("Codex")
.join("device-keys")
.join("windows")
.join(format!("{key_id}.binding.json")))
}
fn format_hresult(function: &str, status: HRESULT) -> String {
format!("{function} failed with HRESULT 0x{:08x}", status as u32)
}

View File

@@ -9,18 +9,14 @@ readme = "README.md"
workspace = true
[dependencies]
anyhow = { workspace = true }
chrono = { workspace = true }
codex-exec-server = { workspace = true }
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }
futures = { workspace = true, features = ["alloc"] }
gix = { workspace = true }
once_cell = { workspace = true }
regex = "1"
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
similar = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["macros", "process", "rt", "time"] }

View File

@@ -1,11 +1,7 @@
# codex-git-utils
Helpers for interacting with git, including patch application and worktree
snapshot utilities. The crate also exposes a lightweight baseline API for
internal directories that use git only as a resettable diff mechanism:
`reset_git_repository` replaces `root/.git` with a fresh one-commit baseline,
and `diff_since_latest_init` returns structured file changes plus a unified
diff from that baseline to the current directory contents.
snapshot utilities.
```rust,no_run
use std::path::Path;

View File

@@ -1,645 +0,0 @@
use anyhow::Context;
use gix::hash::ObjectId;
use gix::objs::Tree;
use gix::objs::tree::Entry;
use gix::objs::tree::EntryKind;
use gix::objs::tree::EntryMode;
use similar::TextDiff;
use std::collections::BTreeMap;
use std::ffi::OsStr;
use std::fs;
use std::path::Path;
use std::path::PathBuf;
use tokio::task;
const BASELINE_COMMIT_MESSAGE: &str =
"Initialize Codex git baseline\n\nCo-authored-by: Codex <noreply@openai.com>";
/// File-level change status between a git baseline and the current directory.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum GitBaselineChangeStatus {
Added,
Modified,
Deleted,
}
impl GitBaselineChangeStatus {
/// Returns the short git-style status label for this change.
pub fn label(self) -> &'static str {
match self {
GitBaselineChangeStatus::Added => "A",
GitBaselineChangeStatus::Modified => "M",
GitBaselineChangeStatus::Deleted => "D",
}
}
}
/// One changed file between a git baseline and the current directory.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct GitBaselineChange {
pub status: GitBaselineChangeStatus,
pub path: String,
}
/// Structured diff from the latest git baseline reset to the current directory.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct GitBaselineDiff {
pub changes: Vec<GitBaselineChange>,
pub unified_diff: String,
}
impl GitBaselineDiff {
pub fn has_changes(&self) -> bool {
!self.changes.is_empty()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct GitBaselineFileEntry {
oid: ObjectId,
mode: EntryMode,
}
/// Replaces any existing `.git` metadata in `root` with a fresh one-commit baseline.
///
/// This is intentionally destructive for `root/.git`. It is meant for internal directories where
/// git is used only as a baseline/diff implementation detail, not for user repositories.
pub async fn reset_git_repository(root: &Path) -> anyhow::Result<()> {
let root = root.to_path_buf();
task::spawn_blocking(move || {
fs::create_dir_all(&root)
.with_context(|| format!("create git baseline root {}", root.display()))?;
remove_git_metadata(&root)?;
let repo = gix::init(&root).with_context(|| format!("init git repo {}", root.display()))?;
commit_current_tree(&repo, BASELINE_COMMIT_MESSAGE)?;
anyhow::Ok(())
})
.await?
}
/// Returns the diff between the latest baseline reset and the current directory contents.
pub async fn diff_since_latest_init(root: &Path) -> anyhow::Result<GitBaselineDiff> {
let root = root.to_path_buf();
task::spawn_blocking(move || {
let repo = gix::open(&root).with_context(|| format!("open git repo {}", root.display()))?;
let head_entries = head_file_entries(&repo)?;
let current_entries = current_file_entries(&repo, &root)?;
let changes = diff_entries(&head_entries, &current_entries);
let unified_diff =
render_unified_diff(&repo, &root, &head_entries, &current_entries, &changes)?;
Ok(GitBaselineDiff {
changes,
unified_diff,
})
})
.await?
}
fn remove_git_metadata(root: &Path) -> anyhow::Result<()> {
let git_path = root.join(".git");
let metadata = match fs::symlink_metadata(&git_path) {
Ok(metadata) => metadata,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(err) => return Err(err).with_context(|| format!("stat {}", git_path.display())),
};
if metadata.file_type().is_dir() && !metadata.file_type().is_symlink() {
fs::remove_dir_all(&git_path).with_context(|| format!("remove {}", git_path.display()))
} else {
fs::remove_file(&git_path).with_context(|| format!("remove {}", git_path.display()))
}
}
fn commit_current_tree(repo: &gix::Repository, message: &str) -> anyhow::Result<()> {
let root = repo
.workdir()
.context("git baseline repo must have a worktree")?;
let tree_id = write_tree(repo, root)?;
let signature = codex_signature();
let mut time = gix::date::parse::TimeBuf::default();
let signature_ref = signature.to_ref(&mut time);
repo.commit_as(
signature_ref,
signature_ref,
"HEAD",
message,
tree_id,
Vec::<ObjectId>::new(),
)
.context("commit git baseline repo")?;
Ok(())
}
fn codex_signature() -> gix::actor::Signature {
gix::actor::Signature {
name: "Codex".into(),
email: "noreply@openai.com".into(),
time: gix::date::Time {
seconds: chrono::Utc::now().timestamp(),
offset: 0,
},
}
}
fn write_tree(repo: &gix::Repository, dir: &Path) -> anyhow::Result<ObjectId> {
let mut entries = Vec::new();
for entry in fs::read_dir(dir).with_context(|| format!("read {}", dir.display()))? {
let entry = entry?;
let path = entry.path();
let file_name = entry.file_name();
if file_name == OsStr::new(".git") {
continue;
}
let file_type = entry.file_type()?;
if file_type.is_dir() {
let oid = write_tree(repo, &path)?;
let tree = repo
.find_tree(oid)
.with_context(|| format!("load tree {}", path.display()))?;
if tree.decode()?.entries.is_empty() {
continue;
}
entries.push(Entry {
mode: EntryKind::Tree.into(),
filename: os_str_to_bstring(&file_name),
oid,
});
} else if file_type.is_file() {
let bytes = fs::read(&path).with_context(|| format!("read {}", path.display()))?;
let oid = repo
.write_blob(bytes)
.with_context(|| format!("write blob {}", path.display()))?
.detach();
entries.push(Entry {
mode: file_mode(&path, EntryKind::Blob)?,
filename: os_str_to_bstring(&file_name),
oid,
});
} else if file_type.is_symlink() {
let target =
fs::read_link(&path).with_context(|| format!("read symlink {}", path.display()))?;
let oid = repo
.write_blob(path_to_bytes(&target))
.with_context(|| format!("write symlink blob {}", path.display()))?
.detach();
entries.push(Entry {
mode: EntryKind::Link.into(),
filename: os_str_to_bstring(&file_name),
oid,
});
}
}
entries.sort();
repo.write_object(&Tree { entries })
.context("write tree object")
.map(gix::Id::detach)
}
fn head_file_entries(
repo: &gix::Repository,
) -> anyhow::Result<BTreeMap<String, GitBaselineFileEntry>> {
let tree_id = repo.head_tree_id().context("load HEAD tree id")?;
let tree = repo.find_tree(tree_id.detach()).context("load HEAD tree")?;
let mut entries = BTreeMap::new();
collect_tree_entries(repo, tree, PathBuf::new(), &mut entries)?;
Ok(entries)
}
fn collect_tree_entries(
repo: &gix::Repository,
tree: gix::Tree<'_>,
prefix: PathBuf,
entries: &mut BTreeMap<String, GitBaselineFileEntry>,
) -> anyhow::Result<()> {
for entry in tree.iter() {
let entry = entry?;
let file_name = bstr_to_path(entry.inner.filename);
let path = prefix.join(file_name);
if entry.inner.mode.is_tree() {
let tree = repo
.find_tree(entry.inner.oid.to_owned())
.context("load child tree")?;
collect_tree_entries(repo, tree, path, entries)?;
} else {
entries.insert(
path_to_slash_string(&path),
GitBaselineFileEntry {
oid: entry.inner.oid.to_owned(),
mode: entry.inner.mode,
},
);
}
}
Ok(())
}
fn current_file_entries(
repo: &gix::Repository,
root: &Path,
) -> anyhow::Result<BTreeMap<String, GitBaselineFileEntry>> {
let mut entries = BTreeMap::new();
collect_current_entries(repo, root, root, &mut entries)?;
Ok(entries)
}
fn collect_current_entries(
repo: &gix::Repository,
root: &Path,
dir: &Path,
entries: &mut BTreeMap<String, GitBaselineFileEntry>,
) -> anyhow::Result<()> {
for entry in fs::read_dir(dir).with_context(|| format!("read {}", dir.display()))? {
let entry = entry?;
let path = entry.path();
if path.file_name() == Some(OsStr::new(".git")) {
continue;
}
let file_type = entry.file_type()?;
if file_type.is_dir() {
collect_current_entries(repo, root, &path, entries)?;
} else if file_type.is_file() {
let bytes = fs::read(&path).with_context(|| format!("read {}", path.display()))?;
entries.insert(
relative_slash_path(root, &path)?,
GitBaselineFileEntry {
oid: blob_oid(repo, &bytes)?,
mode: file_mode(&path, EntryKind::Blob)?,
},
);
} else if file_type.is_symlink() {
let target =
fs::read_link(&path).with_context(|| format!("read symlink {}", path.display()))?;
entries.insert(
relative_slash_path(root, &path)?,
GitBaselineFileEntry {
oid: blob_oid(repo, &path_to_bytes(&target))?,
mode: EntryKind::Link.into(),
},
);
}
}
Ok(())
}
fn blob_oid(repo: &gix::Repository, bytes: &[u8]) -> anyhow::Result<ObjectId> {
gix::objs::compute_hash(repo.object_hash(), gix::objs::Kind::Blob, bytes)
.context("compute git baseline blob oid")
}
fn diff_entries(
head: &BTreeMap<String, GitBaselineFileEntry>,
current: &BTreeMap<String, GitBaselineFileEntry>,
) -> Vec<GitBaselineChange> {
let mut entries = Vec::new();
for (path, entry) in current {
match head.get(path) {
None => entries.push(GitBaselineChange {
status: GitBaselineChangeStatus::Added,
path: path.clone(),
}),
Some(head_entry) if head_entry != entry => entries.push(GitBaselineChange {
status: GitBaselineChangeStatus::Modified,
path: path.clone(),
}),
Some(_) => {}
}
}
for path in head.keys() {
if !current.contains_key(path) {
entries.push(GitBaselineChange {
status: GitBaselineChangeStatus::Deleted,
path: path.clone(),
});
}
}
entries.sort_by(|left, right| left.path.cmp(&right.path));
entries
}
fn render_unified_diff(
repo: &gix::Repository,
root: &Path,
head_entries: &BTreeMap<String, GitBaselineFileEntry>,
current_entries: &BTreeMap<String, GitBaselineFileEntry>,
changes: &[GitBaselineChange],
) -> anyhow::Result<String> {
let mut rendered = String::new();
for change in changes {
rendered.push_str(&render_change_diff(
repo,
root,
head_entries,
current_entries,
change,
)?);
}
Ok(rendered)
}
fn render_change_diff(
repo: &gix::Repository,
root: &Path,
head_entries: &BTreeMap<String, GitBaselineFileEntry>,
current_entries: &BTreeMap<String, GitBaselineFileEntry>,
change: &GitBaselineChange,
) -> anyhow::Result<String> {
let old_entry = head_entries.get(&change.path);
let new_entry = current_entries.get(&change.path);
let old_bytes = old_entry
.map(|entry| read_head_blob(repo, entry))
.transpose()
.with_context(|| format!("read HEAD content for {}", change.path))?;
let new_bytes = new_entry
.map(|_| read_current_file_bytes(root, &change.path))
.transpose()
.with_context(|| format!("read current content for {}", change.path))?;
let old_text = String::from_utf8_lossy(old_bytes.as_deref().unwrap_or_default());
let new_text = String::from_utf8_lossy(new_bytes.as_deref().unwrap_or_default());
let old_header = if old_bytes.is_some() {
format!("a/{}", change.path)
} else {
"/dev/null".to_string()
};
let new_header = if new_bytes.is_some() {
format!("b/{}", change.path)
} else {
"/dev/null".to_string()
};
let mut section = format!("diff --git a/{0} b/{0}\n", change.path);
match (old_entry, new_entry) {
(None, Some(entry)) => {
section.push_str(&format!("new file mode {}\n", mode_label(entry.mode)));
}
(Some(entry), None) => {
section.push_str(&format!("deleted file mode {}\n", mode_label(entry.mode)));
}
(Some(old), Some(new)) if old.mode != new.mode => {
section.push_str(&format!(
"old mode {}\nnew mode {}\n",
mode_label(old.mode),
mode_label(new.mode)
));
}
(Some(_), Some(_)) => {}
(None, None) => return Ok(String::new()),
}
let diff = TextDiff::from_lines(&old_text, &new_text)
.unified_diff()
.context_radius(3)
.header(&old_header, &new_header)
.to_string();
section.push_str(&diff);
if !section.ends_with('\n') {
section.push('\n');
}
Ok(section)
}
fn read_head_blob(repo: &gix::Repository, entry: &GitBaselineFileEntry) -> anyhow::Result<Vec<u8>> {
let mut blob = repo.find_blob(entry.oid)?;
Ok(blob.take_data())
}
fn read_current_file_bytes(root: &Path, relative_path: &str) -> anyhow::Result<Vec<u8>> {
let path = root.join(relative_path);
let metadata =
fs::symlink_metadata(&path).with_context(|| format!("stat {}", path.display()))?;
if metadata.file_type().is_symlink() {
let target =
fs::read_link(&path).with_context(|| format!("read symlink {}", path.display()))?;
Ok(path_to_bytes(&target))
} else {
fs::read(&path).with_context(|| format!("read {}", path.display()))
}
}
fn mode_label(mode: EntryMode) -> &'static str {
match mode.kind() {
EntryKind::Blob => "100644",
EntryKind::BlobExecutable => "100755",
EntryKind::Link => "120000",
EntryKind::Tree => "040000",
EntryKind::Commit => "160000",
}
}
#[cfg(unix)]
fn file_mode(path: &Path, default: EntryKind) -> anyhow::Result<EntryMode> {
use std::os::unix::fs::PermissionsExt;
let mode = fs::metadata(path)?.permissions().mode();
Ok(if mode & 0o111 == 0 {
default.into()
} else {
EntryKind::BlobExecutable.into()
})
}
#[cfg(not(unix))]
fn file_mode(_path: &Path, default: EntryKind) -> anyhow::Result<EntryMode> {
Ok(default.into())
}
#[cfg(unix)]
fn os_str_to_bstring(value: &OsStr) -> gix::bstr::BString {
use std::os::unix::ffi::OsStrExt;
value.as_bytes().into()
}
#[cfg(not(unix))]
fn os_str_to_bstring(value: &OsStr) -> gix::bstr::BString {
value.to_string_lossy().as_bytes().into()
}
#[cfg(unix)]
fn path_to_bytes(path: &Path) -> Vec<u8> {
use std::os::unix::ffi::OsStrExt;
path.as_os_str().as_bytes().to_vec()
}
#[cfg(not(unix))]
fn path_to_bytes(path: &Path) -> Vec<u8> {
path.to_string_lossy().as_bytes().to_vec()
}
fn bstr_to_path(value: &gix::bstr::BStr) -> PathBuf {
#[cfg(unix)]
{
use std::os::unix::ffi::OsStrExt;
PathBuf::from(OsStr::from_bytes(value))
}
#[cfg(not(unix))]
{
PathBuf::from(value.to_string())
}
}
fn relative_slash_path(root: &Path, path: &Path) -> anyhow::Result<String> {
path.strip_prefix(root)
.with_context(|| format!("strip {} from {}", root.display(), path.display()))
.map(path_to_slash_string)
}
fn path_to_slash_string(path: &Path) -> String {
path.components()
.map(|component| component.as_os_str().to_string_lossy())
.collect::<Vec<_>>()
.join("/")
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use std::fs;
use tempfile::TempDir;
#[tokio::test]
async fn reset_creates_fresh_baseline() {
let home = TempDir::new().expect("tempdir");
let root = home.path().join("repo");
fs::create_dir_all(&root).expect("create root");
fs::write(root.join("MEMORY.md"), "baseline").expect("write memory");
reset_git_repository(&root).await.expect("reset repo");
assert!(root.join(".git").is_dir());
let diff = diff_since_latest_init(&root).await.expect("diff");
assert!(!diff.has_changes());
assert_eq!(diff.unified_diff, "");
}
#[tokio::test]
async fn diff_reports_added_modified_and_deleted_files() {
let home = TempDir::new().expect("tempdir");
let root = home.path().join("repo");
fs::create_dir_all(root.join("rollout_summaries")).expect("create rollout summaries");
fs::write(root.join("MEMORY.md"), "old").expect("write memory");
fs::write(
root.join("rollout_summaries/deleted.md"),
"thread_id: 00000000-0000-4000-8000-000000000001\nimportant stale evidence\n",
)
.expect("write rollout summary");
reset_git_repository(&root).await.expect("reset repo");
fs::write(root.join("MEMORY.md"), "new").expect("update memory");
fs::write(root.join("memory_summary.md"), "summary").expect("write summary");
fs::remove_file(root.join("rollout_summaries/deleted.md")).expect("delete summary");
let diff = diff_since_latest_init(&root).await.expect("diff");
assert_eq!(
diff.changes,
vec![
GitBaselineChange {
status: GitBaselineChangeStatus::Modified,
path: "MEMORY.md".to_string(),
},
GitBaselineChange {
status: GitBaselineChangeStatus::Added,
path: "memory_summary.md".to_string(),
},
GitBaselineChange {
status: GitBaselineChangeStatus::Deleted,
path: "rollout_summaries/deleted.md".to_string(),
},
]
);
assert!(
diff.unified_diff
.contains("diff --git a/MEMORY.md b/MEMORY.md")
);
assert!(diff.unified_diff.contains("-old"));
assert!(diff.unified_diff.contains("+new"));
assert!(
diff.unified_diff
.contains("diff --git a/memory_summary.md b/memory_summary.md")
);
assert!(diff.unified_diff.contains("+summary"));
assert!(
diff.unified_diff.contains(
"diff --git a/rollout_summaries/deleted.md b/rollout_summaries/deleted.md"
)
);
assert!(diff.unified_diff.contains("deleted file mode 100644"));
assert!(
diff.unified_diff
.contains("-thread_id: 00000000-0000-4000-8000-000000000001")
);
assert!(diff.unified_diff.contains("-important stale evidence"));
}
#[tokio::test]
async fn reset_drops_previous_history() {
let home = TempDir::new().expect("tempdir");
let root = home.path().join("repo");
fs::create_dir_all(&root).expect("create root");
fs::write(root.join("MEMORY.md"), "old").expect("write memory");
reset_git_repository(&root).await.expect("reset repo");
fs::write(root.join("MEMORY.md"), "new").expect("update memory");
reset_git_repository(&root).await.expect("reset repo again");
let repo = gix::open(&root).expect("open repo");
let head = repo.head_id().expect("head").detach();
let commit = repo.find_commit(head).expect("find head commit");
assert_eq!(commit.parent_ids().count(), 0);
let diff = diff_since_latest_init(&root).await.expect("diff");
assert!(!diff.has_changes());
}
#[tokio::test]
async fn status_scan_does_not_write_added_file_blobs() {
let home = TempDir::new().expect("tempdir");
let root = home.path().join("repo");
fs::create_dir_all(&root).expect("create root");
reset_git_repository(&root).await.expect("reset repo");
let added_content = b"new uncommitted memory";
fs::write(root.join("MEMORY.md"), added_content).expect("write memory");
let diff = diff_since_latest_init(&root).await.expect("diff");
assert!(diff.has_changes());
let repo = gix::open(&root).expect("open repo");
let added_oid = blob_oid(&repo, added_content).expect("compute added oid");
assert!(
repo.find_blob(added_oid).is_err(),
"status scans should hash current files without writing loose git objects"
);
}
#[cfg(unix)]
#[tokio::test]
async fn reports_executable_bit_changes_as_modified() {
use std::os::unix::fs::PermissionsExt;
let home = TempDir::new().expect("tempdir");
let root = home.path().join("repo");
fs::create_dir_all(&root).expect("create root");
let path = root.join("MEMORY.md");
fs::write(&path, "same content").expect("write memory");
reset_git_repository(&root).await.expect("reset repo");
let mut permissions = fs::metadata(&path).expect("stat memory").permissions();
permissions.set_mode(permissions.mode() | 0o111);
fs::set_permissions(&path, permissions).expect("chmod memory");
let diff = diff_since_latest_init(&root).await.expect("diff");
assert_eq!(
diff.changes,
vec![GitBaselineChange {
status: GitBaselineChangeStatus::Modified,
path: "MEMORY.md".to_string(),
}]
);
assert!(diff.unified_diff.contains("old mode 100644"));
assert!(diff.unified_diff.contains("new mode 100755"));
}
}

View File

@@ -57,114 +57,10 @@ mod job_control;
/// Target frame interval for UI redraw scheduling.
pub(crate) const TARGET_FRAME_INTERVAL: Duration = frame_rate_limiter::MIN_FRAME_INTERVAL;
const DISABLE_KEYBOARD_ENHANCEMENT_ENV_VAR: &str = "CODEX_TUI_DISABLE_KEYBOARD_ENHANCEMENT";
/// A type alias for the terminal type used in this application
pub type Terminal = CustomTerminal<CrosstermBackend<Stdout>>;
fn keyboard_enhancement_disabled() -> bool {
let disable_env = std::env::var(DISABLE_KEYBOARD_ENHANCEMENT_ENV_VAR).ok();
let is_wsl = running_in_wsl();
let is_vscode_terminal = is_wsl && running_in_vscode_terminal();
keyboard_enhancement_disabled_for(disable_env.as_deref(), is_wsl, is_vscode_terminal)
}
fn keyboard_enhancement_disabled_for(
disable_env: Option<&str>,
is_wsl: bool,
is_vscode_terminal: bool,
) -> bool {
if let Some(disabled) = parse_bool_env(disable_env) {
return disabled;
}
// VS Code running a WSL shell can hide TERM_PROGRAM from the Linux process
// environment, so `running_in_vscode_terminal` also probes the Windows-side
// environment through WSL interop.
is_wsl && is_vscode_terminal
}
fn parse_bool_env(value: Option<&str>) -> Option<bool> {
match value.map(str::trim) {
Some("1") => Some(true),
Some(value) if value.eq_ignore_ascii_case("true") => Some(true),
Some(value) if value.eq_ignore_ascii_case("yes") => Some(true),
Some("0") => Some(false),
Some(value) if value.eq_ignore_ascii_case("false") => Some(false),
Some(value) if value.eq_ignore_ascii_case("no") => Some(false),
_ => None,
}
}
fn running_in_wsl() -> bool {
#[cfg(target_os = "linux")]
{
crate::clipboard_paste::is_probably_wsl()
}
#[cfg(not(target_os = "linux"))]
{
false
}
}
fn running_in_vscode_terminal() -> bool {
vscode_terminal_detected(
std::env::var("TERM_PROGRAM").ok().as_deref(),
windows_term_program().as_deref(),
)
}
fn vscode_terminal_detected(
linux_term_program: Option<&str>,
windows_term_program: Option<&str>,
) -> bool {
term_program_is_vscode(linux_term_program) || term_program_is_vscode(windows_term_program)
}
fn term_program_is_vscode(value: Option<&str>) -> bool {
value.is_some_and(|value| value.eq_ignore_ascii_case("vscode"))
}
fn windows_term_program() -> Option<String> {
#[cfg(target_os = "linux")]
{
static WINDOWS_TERM_PROGRAM: std::sync::OnceLock<Option<String>> =
std::sync::OnceLock::new();
WINDOWS_TERM_PROGRAM
.get_or_init(read_windows_term_program)
.clone()
}
#[cfg(not(target_os = "linux"))]
{
None
}
}
#[cfg(target_os = "linux")]
fn read_windows_term_program() -> Option<String> {
let output = std::process::Command::new("cmd.exe")
.args(["/d", "/s", "/c", "set TERM_PROGRAM"])
.stdin(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.output()
.ok()?;
if !output.status.success() {
return None;
}
String::from_utf8_lossy(&output.stdout)
.lines()
.find_map(|line| {
line.trim_end_matches('\r')
.strip_prefix("TERM_PROGRAM=")
.map(str::to_string)
})
.filter(|value| !value.trim().is_empty())
}
fn should_emit_notification(condition: NotificationCondition, terminal_focused: bool) -> bool {
match condition {
NotificationCondition::Unfocused => !terminal_focused,
@@ -174,10 +70,7 @@ fn should_emit_notification(condition: NotificationCondition, terminal_focused:
#[cfg(test)]
mod tests {
use super::keyboard_enhancement_disabled_for;
use super::parse_bool_env;
use super::should_emit_notification;
use super::vscode_terminal_detected;
use codex_config::types::NotificationCondition;
#[test]
@@ -203,68 +96,6 @@ mod tests {
/*terminal_focused*/ false
));
}
#[test]
fn keyboard_enhancement_env_flag_parses_common_values() {
assert_eq!(parse_bool_env(Some("1")), Some(true));
assert_eq!(parse_bool_env(Some("true")), Some(true));
assert_eq!(parse_bool_env(Some("YES")), Some(true));
assert_eq!(parse_bool_env(Some("0")), Some(false));
assert_eq!(parse_bool_env(Some("false")), Some(false));
assert_eq!(parse_bool_env(Some("NO")), Some(false));
assert_eq!(parse_bool_env(Some("unexpected")), None);
assert_eq!(parse_bool_env(/*value*/ None), None);
}
#[test]
fn keyboard_enhancement_auto_disables_for_vscode_in_wsl() {
assert!(keyboard_enhancement_disabled_for(
/*disable_env*/ None, /*is_wsl*/ true, /*is_vscode_terminal*/ true
));
}
#[test]
fn keyboard_enhancement_auto_disable_requires_wsl_and_vscode() {
assert!(!keyboard_enhancement_disabled_for(
/*disable_env*/ None, /*is_wsl*/ true, /*is_vscode_terminal*/ false
));
assert!(!keyboard_enhancement_disabled_for(
/*disable_env*/ None, /*is_wsl*/ false, /*is_vscode_terminal*/ true
));
}
#[test]
fn keyboard_enhancement_env_flag_overrides_auto_detection() {
assert!(!keyboard_enhancement_disabled_for(
Some("0"),
/*is_wsl*/ true,
/*is_vscode_terminal*/ true
));
assert!(keyboard_enhancement_disabled_for(
Some("1"),
/*is_wsl*/ false,
/*is_vscode_terminal*/ false
));
}
#[test]
fn vscode_terminal_detection_uses_linux_and_windows_term_program() {
assert!(vscode_terminal_detected(
Some("vscode"),
/*windows_term_program*/ None
));
assert!(vscode_terminal_detected(
/*linux_term_program*/ None,
Some("vscode")
));
assert!(!vscode_terminal_detected(
/*linux_term_program*/ None,
Some("WindowsTerminal")
));
assert!(!vscode_terminal_detected(
/*linux_term_program*/ None, /*windows_term_program*/ None
));
}
}
pub fn set_modes() -> Result<()> {
@@ -277,16 +108,14 @@ pub fn set_modes() -> Result<()> {
// Some terminals (notably legacy Windows consoles) do not support
// keyboard enhancement flags. Attempt to enable them, but continue
// gracefully if unsupported.
if !keyboard_enhancement_disabled() {
let _ = execute!(
stdout(),
PushKeyboardEnhancementFlags(
KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES
| KeyboardEnhancementFlags::REPORT_EVENT_TYPES
| KeyboardEnhancementFlags::REPORT_ALTERNATE_KEYS
)
);
}
let _ = execute!(
stdout(),
PushKeyboardEnhancementFlags(
KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES
| KeyboardEnhancementFlags::REPORT_EVENT_TYPES
| KeyboardEnhancementFlags::REPORT_ALTERNATE_KEYS
)
);
let _ = execute!(stdout(), EnableFocusChange);
Ok(())
@@ -476,8 +305,7 @@ impl Tui {
// Detect keyboard enhancement support before any EventStream is created so the
// crossterm poller can acquire its lock without contention.
let enhanced_keys_supported =
!keyboard_enhancement_disabled() && supports_keyboard_enhancement().unwrap_or(false);
let enhanced_keys_supported = supports_keyboard_enhancement().unwrap_or(false);
// Cache this to avoid contention with the event reader.
supports_color::on_cached(supports_color::Stream::Stdout);
let _ = crate::terminal_palette::default_colors();