Compare commits

..

9 Commits

Author SHA1 Message Date
Ahmed Ibrahim
8cca259b99 codex: fix CI failure on PR #15029
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 17:35:34 +00:00
Ahmed Ibrahim
08dcd3dc19 Update auth tests to use codex-auth
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:58:35 +00:00
Ahmed Ibrahim
8caf1ddb00 Remove stale auth env var imports
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:36:22 +00:00
Ahmed Ibrahim
21b00f2672 Fix external auth refresh constructor call
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:25:51 +00:00
Ahmed Ibrahim
184fb02a9a Extract codex-auth from codex-core
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:11:40 +00:00
Ahmed Ibrahim
1cf68f940c codex: fix CI failure on PR #15010
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 04:05:05 +00:00
Ahmed Ibrahim
0f406c3de0 codex: address PR review feedback (#15010)
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 03:49:51 +00:00
Ahmed Ibrahim
8b3fc35e0b fix: unblock config loader split CI
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 03:34:46 +00:00
Ahmed Ibrahim
38a28973a8 refactor: move config loader internals into codex-config
Extract config-layer IO and managed requirements loading into codex-config so codex-core keeps a thinner config loader facade.

Co-authored-by: Codex <noreply@openai.com>
2026-03-18 02:30:22 +00:00
340 changed files with 6681 additions and 20626 deletions

View File

@@ -351,7 +351,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install Zig
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2
uses: mlugg/setup-zig@v2
with:
version: 0.14.0

View File

@@ -142,7 +142,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install Zig
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2
uses: mlugg/setup-zig@v2
with:
version: 0.14.0

60
codex-rs/Cargo.lock generated
View File

@@ -1427,7 +1427,6 @@ dependencies = [
"codex-chatgpt",
"codex-cloud-requirements",
"codex-core",
"codex-environment",
"codex-feedback",
"codex-file-search",
"codex-login",
@@ -1463,6 +1462,7 @@ dependencies = [
"tracing-opentelemetry",
"tracing-subscriber",
"uuid",
"walkdir",
"wiremock",
]
@@ -1570,13 +1570,11 @@ name = "codex-artifacts"
version = "0.0.0"
dependencies = [
"codex-package-manager",
"flate2",
"pretty_assertions",
"reqwest",
"serde",
"serde_json",
"sha2",
"tar",
"tempfile",
"thiserror 2.0.18",
"tokio",
@@ -1600,17 +1598,22 @@ dependencies = [
name = "codex-auth"
version = "0.0.0"
dependencies = [
"anyhow",
"base64 0.22.1",
"codex-api",
"chrono",
"codex-app-server-protocol",
"http 1.4.0",
"maplit",
"codex-keyring-store",
"keyring",
"once_cell",
"pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
"sha2",
"tempfile",
"thiserror 2.0.18",
"toml 0.9.11+spec-1.1.0",
"tokio",
"tracing",
]
[[package]]
@@ -1805,10 +1808,12 @@ name = "codex-config"
version = "0.0.0"
dependencies = [
"anyhow",
"base64 0.22.1",
"codex-app-server-protocol",
"codex-execpolicy",
"codex-protocol",
"codex-utils-absolute-path",
"core-foundation 0.9.4",
"futures",
"multimap",
"pretty_assertions",
@@ -1821,6 +1826,7 @@ dependencies = [
"toml 0.9.11+spec-1.1.0",
"toml_edit 0.24.0+spec-1.1.0",
"tracing",
"windows-sys 0.52.0",
]
[[package]]
@@ -1861,8 +1867,6 @@ dependencies = [
"codex-client",
"codex-config",
"codex-connectors",
"codex-core-auth",
"codex-environment",
"codex-execpolicy",
"codex-file-search",
"codex-git",
@@ -1888,7 +1892,6 @@ dependencies = [
"codex-utils-stream-parser",
"codex-utils-string",
"codex-windows-sandbox",
"core-foundation 0.9.4",
"core_test_support",
"csv",
"ctor 0.6.3",
@@ -1948,34 +1951,11 @@ dependencies = [
"walkdir",
"which",
"wildmatch",
"windows-sys 0.52.0",
"wiremock",
"zip",
"zstd",
]
[[package]]
name = "codex-core-auth"
version = "0.0.0"
dependencies = [
"anyhow",
"base64 0.22.1",
"chrono",
"codex-app-server-protocol",
"codex-auth",
"codex-keyring-store",
"keyring",
"once_cell",
"pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
"sha2",
"tempfile",
"tokio",
"tracing",
]
[[package]]
name = "codex-debug-client"
version = "0.0.0"
@@ -1988,17 +1968,6 @@ dependencies = [
"serde_json",
]
[[package]]
name = "codex-environment"
version = "0.0.0"
dependencies = [
"async-trait",
"codex-utils-absolute-path",
"pretty_assertions",
"tempfile",
"tokio",
]
[[package]]
name = "codex-exec"
version = "0.0.0"
@@ -2202,6 +2171,7 @@ dependencies = [
"base64 0.22.1",
"chrono",
"codex-app-server-protocol",
"codex-auth",
"codex-client",
"codex-core",
"core_test_support",
@@ -2799,6 +2769,7 @@ dependencies = [
"base64 0.22.1",
"codex-utils-cache",
"image",
"tempfile",
"thiserror 2.0.18",
"tokio",
]
@@ -3102,6 +3073,7 @@ dependencies = [
"anyhow",
"assert_cmd",
"base64 0.22.1",
"codex-auth",
"codex-core",
"codex-protocol",
"codex-utils-absolute-path",

View File

@@ -2,6 +2,7 @@
members = [
"backend-client",
"ansi-escape",
"auth",
"async-utils",
"app-server",
"app-server-client",
@@ -18,13 +19,10 @@ members = [
"cli",
"connectors",
"config",
"codex-auth",
"shell-command",
"shell-escalation",
"skills",
"core",
"core/auth",
"environment",
"hooks",
"secrets",
"exec",
@@ -89,7 +87,7 @@ license = "Apache-2.0"
app_test_support = { path = "app-server/tests/common" }
codex-ansi-escape = { path = "ansi-escape" }
codex-api = { path = "codex-api" }
codex-auth = { path = "codex-auth" }
codex-auth = { path = "auth" }
codex-artifacts = { path = "artifacts" }
codex-package-manager = { path = "package-manager" }
codex-app-server = { path = "app-server" }
@@ -107,8 +105,6 @@ codex-cloud-requirements = { path = "cloud-requirements" }
codex-connectors = { path = "connectors" }
codex-config = { path = "config" }
codex-core = { path = "core" }
codex-core-auth = { path = "core/auth" }
codex-environment = { path = "environment" }
codex-exec = { path = "exec" }
codex-execpolicy = { path = "execpolicy" }
codex-experimental-api-macros = { path = "codex-experimental-api-macros" }

View File

@@ -1033,10 +1033,6 @@ mod tests {
for (session_source, expected_source) in [
(SessionSource::Exec, ApiSessionSource::Exec),
(SessionSource::Cli, ApiSessionSource::Cli),
(
SessionSource::Custom("atlas".to_string()),
ApiSessionSource::Custom("atlas".to_string()),
),
] {
let client = start_test_client(session_source).await;
let parsed: ThreadStartResponse = client

View File

@@ -1759,12 +1759,6 @@
"call_id": {
"type": "string"
},
"name": {
"type": [
"string",
"null"
]
},
"output": {
"$ref": "#/definitions/FunctionCallOutputBody"
},
@@ -2894,7 +2888,6 @@
"vscode",
"exec",
"appServer",
"custom",
"subAgent",
"subAgentReview",
"subAgentCompact",

View File

@@ -1136,7 +1136,6 @@
"HookEventName": {
"enum": [
"sessionStart",
"userPromptSubmit",
"stop"
],
"type": "string"
@@ -1455,54 +1454,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -1880,19 +1831,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -2242,17 +2180,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -7882,7 +7882,6 @@
"HookEventName": {
"enum": [
"sessionStart",
"userPromptSubmit",
"stop"
],
"type": "string"
@@ -8600,54 +8599,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/v2/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MergeStrategy": {
"enum": [
"replace",
@@ -10360,12 +10311,6 @@
"call_id": {
"type": "string"
},
"name": {
"type": [
"string",
"null"
]
},
"output": {
"$ref": "#/definitions/v2/FunctionCallOutputBody"
},
@@ -10984,19 +10929,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -11908,17 +11840,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/v2/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{
@@ -13110,7 +13031,6 @@
"vscode",
"exec",
"appServer",
"custom",
"subAgent",
"subAgentReview",
"subAgentCompact",

View File

@@ -4626,7 +4626,6 @@
"HookEventName": {
"enum": [
"sessionStart",
"userPromptSubmit",
"stop"
],
"type": "string"
@@ -5388,54 +5387,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MergeStrategy": {
"enum": [
"replace",
@@ -7148,12 +7099,6 @@
"call_id": {
"type": "string"
},
"name": {
"type": [
"string",
"null"
]
},
"output": {
"$ref": "#/definitions/FunctionCallOutputBody"
},
@@ -8744,19 +8689,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -9668,17 +9600,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{
@@ -10870,7 +10791,6 @@
"vscode",
"exec",
"appServer",
"custom",
"subAgent",
"subAgentReview",
"subAgentCompact",

View File

@@ -4,7 +4,6 @@
"HookEventName": {
"enum": [
"sessionStart",
"userPromptSubmit",
"stop"
],
"type": "string"

View File

@@ -4,7 +4,6 @@
"HookEventName": {
"enum": [
"sessionStart",
"userPromptSubmit",
"stop"
],
"type": "string"

View File

@@ -289,54 +289,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -492,17 +444,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -289,54 +289,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -492,17 +444,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -607,12 +607,6 @@
"call_id": {
"type": "string"
},
"name": {
"type": [
"string",
"null"
]
},
"output": {
"$ref": "#/definitions/FunctionCallOutputBody"
},

View File

@@ -403,54 +403,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -606,17 +558,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -488,54 +488,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -826,19 +778,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -1099,17 +1038,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -14,7 +14,6 @@
"vscode",
"exec",
"appServer",
"custom",
"subAgent",
"subAgentReview",
"subAgentCompact",

View File

@@ -426,54 +426,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -584,19 +536,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -857,17 +796,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -426,54 +426,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -584,19 +536,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -857,17 +796,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -426,54 +426,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -584,19 +536,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -857,17 +796,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -673,12 +673,6 @@
"call_id": {
"type": "string"
},
"name": {
"type": [
"string",
"null"
]
},
"output": {
"$ref": "#/definitions/FunctionCallOutputBody"
},

View File

@@ -488,54 +488,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -826,19 +778,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -1099,17 +1038,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -426,54 +426,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -584,19 +536,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -857,17 +796,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -488,54 +488,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -826,19 +778,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -1099,17 +1038,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -426,54 +426,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -584,19 +536,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -857,17 +796,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -426,54 +426,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -584,19 +536,6 @@
],
"type": "string"
},
{
"additionalProperties": false,
"properties": {
"custom": {
"type": "string"
}
},
"required": [
"custom"
],
"title": "CustomSessionSource",
"type": "object"
},
{
"additionalProperties": false,
"properties": {
@@ -857,17 +796,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -403,54 +403,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -606,17 +558,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -403,54 +403,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -606,17 +558,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -403,54 +403,6 @@
],
"type": "string"
},
"MemoryCitation": {
"properties": {
"entries": {
"items": {
"$ref": "#/definitions/MemoryCitationEntry"
},
"type": "array"
},
"threadIds": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"entries",
"threadIds"
],
"type": "object"
},
"MemoryCitationEntry": {
"properties": {
"lineEnd": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"lineStart": {
"format": "uint32",
"minimum": 0.0,
"type": "integer"
},
"note": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"lineEnd",
"lineStart",
"note",
"path"
],
"type": "object"
},
"MessagePhase": {
"description": "Classifies an assistant message as interim commentary or final answer text.\n\nProviders do not emit this consistently, so callers must treat `None` as \"phase unknown\" and keep compatibility behavior for legacy models.",
"oneOf": [
@@ -606,17 +558,6 @@
"id": {
"type": "string"
},
"memoryCitation": {
"anyOf": [
{
"$ref": "#/definitions/MemoryCitation"
},
{
"type": "null"
}
],
"default": null
},
"phase": {
"anyOf": [
{

View File

@@ -15,4 +15,4 @@ export type ResponseItem = { "type": "message", role: string, content: Array<Con
/**
* Set when using the Responses API.
*/
call_id: string | null, status: LocalShellStatus, action: LocalShellAction, } | { "type": "function_call", name: string, namespace?: string, arguments: string, call_id: string, } | { "type": "tool_search_call", call_id: string | null, status?: string, execution: string, arguments: unknown, } | { "type": "function_call_output", call_id: string, output: FunctionCallOutputBody, } | { "type": "custom_tool_call", status?: string, call_id: string, name: string, input: string, } | { "type": "custom_tool_call_output", call_id: string, name?: string, output: FunctionCallOutputBody, } | { "type": "tool_search_output", call_id: string | null, status: string, execution: string, tools: unknown[], } | { "type": "web_search_call", status?: string, action?: WebSearchAction, } | { "type": "image_generation_call", id: string, status: string, revised_prompt?: string, result: string, } | { "type": "ghost_snapshot", ghost_commit: GhostCommit, } | { "type": "compaction", encrypted_content: string, } | { "type": "other" };
call_id: string | null, status: LocalShellStatus, action: LocalShellAction, } | { "type": "function_call", name: string, namespace?: string, arguments: string, call_id: string, } | { "type": "tool_search_call", call_id: string | null, status?: string, execution: string, arguments: unknown, } | { "type": "function_call_output", call_id: string, output: FunctionCallOutputBody, } | { "type": "custom_tool_call", status?: string, call_id: string, name: string, input: string, } | { "type": "custom_tool_call_output", call_id: string, output: FunctionCallOutputBody, } | { "type": "tool_search_output", call_id: string | null, status: string, execution: string, tools: unknown[], } | { "type": "web_search_call", status?: string, action?: WebSearchAction, } | { "type": "image_generation_call", id: string, status: string, revised_prompt?: string, result: string, } | { "type": "ghost_snapshot", ghost_commit: GhostCommit, } | { "type": "compaction", encrypted_content: string, } | { "type": "other" };

View File

@@ -3,4 +3,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SubAgentSource } from "./SubAgentSource";
export type SessionSource = "cli" | "vscode" | "exec" | "mcp" | { "custom": string } | { "subagent": SubAgentSource } | "unknown";
export type SessionSource = "cli" | "vscode" | "exec" | "mcp" | { "subagent": SubAgentSource } | "unknown";

View File

@@ -2,4 +2,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type HookEventName = "sessionStart" | "userPromptSubmit" | "stop";
export type HookEventName = "sessionStart" | "stop";

View File

@@ -1,6 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { MemoryCitationEntry } from "./MemoryCitationEntry";
export type MemoryCitation = { entries: Array<MemoryCitationEntry>, threadIds: Array<string>, };

View File

@@ -1,5 +0,0 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type MemoryCitationEntry = { path: string, lineStart: number, lineEnd: number, note: string, };

View File

@@ -3,4 +3,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SubAgentSource } from "../SubAgentSource";
export type SessionSource = "cli" | "vscode" | "exec" | "appServer" | { "custom": string } | { "subAgent": SubAgentSource } | "unknown";
export type SessionSource = "cli" | "vscode" | "exec" | "appServer" | { "subAgent": SubAgentSource } | "unknown";

View File

@@ -15,12 +15,11 @@ import type { FileUpdateChange } from "./FileUpdateChange";
import type { McpToolCallError } from "./McpToolCallError";
import type { McpToolCallResult } from "./McpToolCallResult";
import type { McpToolCallStatus } from "./McpToolCallStatus";
import type { MemoryCitation } from "./MemoryCitation";
import type { PatchApplyStatus } from "./PatchApplyStatus";
import type { UserInput } from "./UserInput";
import type { WebSearchAction } from "./WebSearchAction";
export type ThreadItem = { "type": "userMessage", id: string, content: Array<UserInput>, } | { "type": "agentMessage", id: string, text: string, phase: MessagePhase | null, memoryCitation: MemoryCitation | null, } | { "type": "plan", id: string, text: string, } | { "type": "reasoning", id: string, summary: Array<string>, content: Array<string>, } | { "type": "commandExecution", id: string,
export type ThreadItem = { "type": "userMessage", id: string, content: Array<UserInput>, } | { "type": "agentMessage", id: string, text: string, phase: MessagePhase | null, } | { "type": "plan", id: string, text: string, } | { "type": "reasoning", id: string, summary: Array<string>, content: Array<string>, } | { "type": "commandExecution", id: string,
/**
* The command to be executed.
*/

View File

@@ -2,4 +2,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type ThreadSourceKind = "cli" | "vscode" | "exec" | "appServer" | "custom" | "subAgent" | "subAgentReview" | "subAgentCompact" | "subAgentThreadSpawn" | "subAgentOther" | "unknown";
export type ThreadSourceKind = "cli" | "vscode" | "exec" | "appServer" | "subAgent" | "subAgentReview" | "subAgentCompact" | "subAgentThreadSpawn" | "subAgentOther" | "unknown";

View File

@@ -174,8 +174,6 @@ export type { McpToolCallError } from "./McpToolCallError";
export type { McpToolCallProgressNotification } from "./McpToolCallProgressNotification";
export type { McpToolCallResult } from "./McpToolCallResult";
export type { McpToolCallStatus } from "./McpToolCallStatus";
export type { MemoryCitation } from "./MemoryCitation";
export type { MemoryCitationEntry } from "./MemoryCitationEntry";
export type { MergeStrategy } from "./MergeStrategy";
export type { Model } from "./Model";
export type { ModelAvailabilityNux } from "./ModelAvailabilityNux";

View File

@@ -118,11 +118,9 @@ impl ThreadHistoryBuilder {
pub fn handle_event(&mut self, event: &EventMsg) {
match event {
EventMsg::UserMessage(payload) => self.handle_user_message(payload),
EventMsg::AgentMessage(payload) => self.handle_agent_message(
payload.message.clone(),
payload.phase.clone(),
payload.memory_citation.clone().map(Into::into),
),
EventMsg::AgentMessage(payload) => {
self.handle_agent_message(payload.message.clone(), payload.phase.clone())
}
EventMsg::AgentReasoning(payload) => self.handle_agent_reasoning(payload),
EventMsg::AgentReasoningRawContent(payload) => {
self.handle_agent_reasoning_raw_content(payload)
@@ -210,23 +208,15 @@ impl ThreadHistoryBuilder {
self.current_turn = Some(turn);
}
fn handle_agent_message(
&mut self,
text: String,
phase: Option<MessagePhase>,
memory_citation: Option<crate::protocol::v2::MemoryCitation>,
) {
fn handle_agent_message(&mut self, text: String, phase: Option<MessagePhase>) {
if text.is_empty() {
return;
}
let id = self.next_item_id();
self.ensure_turn().items.push(ThreadItem::AgentMessage {
id,
text,
phase,
memory_citation,
});
self.ensure_turn()
.items
.push(ThreadItem::AgentMessage { id, text, phase });
}
fn handle_agent_reasoning(&mut self, payload: &AgentReasoningEvent) {
@@ -1188,7 +1178,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "Hi there".into(),
phase: None,
memory_citation: None,
}),
EventMsg::AgentReasoning(AgentReasoningEvent {
text: "thinking".into(),
@@ -1205,7 +1194,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "Reply two".into(),
phase: None,
memory_citation: None,
}),
];
@@ -1241,7 +1229,6 @@ mod tests {
id: "item-2".into(),
text: "Hi there".into(),
phase: None,
memory_citation: None,
}
);
assert_eq!(
@@ -1273,7 +1260,6 @@ mod tests {
id: "item-5".into(),
text: "Reply two".into(),
phase: None,
memory_citation: None,
}
);
}
@@ -1332,7 +1318,6 @@ mod tests {
let events = vec![EventMsg::AgentMessage(AgentMessageEvent {
message: "Final reply".into(),
phase: Some(CoreMessagePhase::FinalAnswer),
memory_citation: None,
})];
let items = events
@@ -1347,7 +1332,6 @@ mod tests {
id: "item-1".into(),
text: "Final reply".into(),
phase: Some(MessagePhase::FinalAnswer),
memory_citation: None,
}
);
}
@@ -1370,7 +1354,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "interlude".into(),
phase: None,
memory_citation: None,
}),
EventMsg::AgentReasoning(AgentReasoningEvent {
text: "second summary".into(),
@@ -1416,7 +1399,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "Working...".into(),
phase: None,
memory_citation: None,
}),
EventMsg::TurnAborted(TurnAbortedEvent {
turn_id: Some("turn-1".into()),
@@ -1431,7 +1413,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "Second attempt complete.".into(),
phase: None,
memory_citation: None,
}),
];
@@ -1461,7 +1442,6 @@ mod tests {
id: "item-2".into(),
text: "Working...".into(),
phase: None,
memory_citation: None,
}
);
@@ -1484,7 +1464,6 @@ mod tests {
id: "item-4".into(),
text: "Second attempt complete.".into(),
phase: None,
memory_citation: None,
}
);
}
@@ -1501,7 +1480,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "A1".into(),
phase: None,
memory_citation: None,
}),
EventMsg::UserMessage(UserMessageEvent {
message: "Second".into(),
@@ -1512,7 +1490,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "A2".into(),
phase: None,
memory_citation: None,
}),
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 1 }),
EventMsg::UserMessage(UserMessageEvent {
@@ -1524,7 +1501,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "A3".into(),
phase: None,
memory_citation: None,
}),
];
@@ -1553,7 +1529,6 @@ mod tests {
id: "item-2".into(),
text: "A1".into(),
phase: None,
memory_citation: None,
},
]
);
@@ -1571,7 +1546,6 @@ mod tests {
id: "item-4".into(),
text: "A3".into(),
phase: None,
memory_citation: None,
},
]
);
@@ -1589,7 +1563,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "A1".into(),
phase: None,
memory_citation: None,
}),
EventMsg::UserMessage(UserMessageEvent {
message: "Two".into(),
@@ -1600,7 +1573,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "A2".into(),
phase: None,
memory_citation: None,
}),
EventMsg::ThreadRolledBack(ThreadRolledBackEvent { num_turns: 99 }),
];
@@ -2237,7 +2209,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "still in b".into(),
phase: None,
memory_citation: None,
}),
EventMsg::TurnComplete(TurnCompleteEvent {
turn_id: "turn-b".into(),
@@ -2292,7 +2263,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "still in b".into(),
phase: None,
memory_citation: None,
}),
];
@@ -2527,7 +2497,6 @@ mod tests {
EventMsg::AgentMessage(AgentMessageEvent {
message: "done".into(),
phase: None,
memory_citation: None,
}),
EventMsg::Error(ErrorEvent {
message: "rollback failed".into(),

View File

@@ -30,8 +30,6 @@ use codex_protocol::items::TurnItem as CoreTurnItem;
use codex_protocol::mcp::Resource as McpResource;
use codex_protocol::mcp::ResourceTemplate as McpResourceTemplate;
use codex_protocol::mcp::Tool as McpTool;
use codex_protocol::memory_citation::MemoryCitation as CoreMemoryCitation;
use codex_protocol::memory_citation::MemoryCitationEntry as CoreMemoryCitationEntry;
use codex_protocol::models::FileSystemPermissions as CoreFileSystemPermissions;
use codex_protocol::models::MacOsAutomationPermission as CoreMacOsAutomationPermission;
use codex_protocol::models::MacOsContactsPermission as CoreMacOsContactsPermission;
@@ -345,7 +343,7 @@ v2_enum_from_core!(
v2_enum_from_core!(
pub enum HookEventName from CoreHookEventName {
SessionStart, UserPromptSubmit, Stop
SessionStart, Stop
}
);
@@ -1467,7 +1465,6 @@ pub enum SessionSource {
VsCode,
Exec,
AppServer,
Custom(String),
SubAgent(CoreSubAgentSource),
#[serde(other)]
Unknown,
@@ -1480,7 +1477,6 @@ impl From<CoreSessionSource> for SessionSource {
CoreSessionSource::VSCode => SessionSource::VsCode,
CoreSessionSource::Exec => SessionSource::Exec,
CoreSessionSource::Mcp => SessionSource::AppServer,
CoreSessionSource::Custom(source) => SessionSource::Custom(source),
CoreSessionSource::SubAgent(sub) => SessionSource::SubAgent(sub),
CoreSessionSource::Unknown => SessionSource::Unknown,
}
@@ -1494,7 +1490,6 @@ impl From<SessionSource> for CoreSessionSource {
SessionSource::VsCode => CoreSessionSource::VSCode,
SessionSource::Exec => CoreSessionSource::Exec,
SessionSource::AppServer => CoreSessionSource::Mcp,
SessionSource::Custom(source) => CoreSessionSource::Custom(source),
SessionSource::SubAgent(sub) => CoreSessionSource::SubAgent(sub),
SessionSource::Unknown => CoreSessionSource::Unknown,
}
@@ -2954,7 +2949,6 @@ pub enum ThreadSourceKind {
VsCode,
Exec,
AppServer,
Custom,
SubAgent,
SubAgentReview,
SubAgentCompact,
@@ -3574,44 +3568,6 @@ pub struct Turn {
pub error: Option<TurnError>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct MemoryCitation {
pub entries: Vec<MemoryCitationEntry>,
pub thread_ids: Vec<String>,
}
impl From<CoreMemoryCitation> for MemoryCitation {
fn from(value: CoreMemoryCitation) -> Self {
Self {
entries: value.entries.into_iter().map(Into::into).collect(),
thread_ids: value.rollout_ids,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct MemoryCitationEntry {
pub path: String,
pub line_start: u32,
pub line_end: u32,
pub note: String,
}
impl From<CoreMemoryCitationEntry> for MemoryCitationEntry {
fn from(value: CoreMemoryCitationEntry) -> Self {
Self {
path: value.path,
line_start: value.line_start,
line_end: value.line_end,
note: value.note,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Error)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
@@ -4112,8 +4068,6 @@ pub enum ThreadItem {
text: String,
#[serde(default)]
phase: Option<MessagePhase>,
#[serde(default)]
memory_citation: Option<MemoryCitation>,
},
#[serde(rename_all = "camelCase")]
#[ts(rename_all = "camelCase")]
@@ -4363,7 +4317,6 @@ impl From<CoreTurnItem> for ThreadItem {
id: agent.id,
text,
phase: agent.phase,
memory_citation: agent.memory_citation.map(Into::into),
}
}
CoreTurnItem::Plan(plan) => ThreadItem::Plan {
@@ -7440,7 +7393,6 @@ mod tests {
},
],
phase: None,
memory_citation: None,
});
assert_eq!(
@@ -7449,7 +7401,6 @@ mod tests {
id: "agent-1".to_string(),
text: "Hello world".to_string(),
phase: None,
memory_citation: None,
}
);
@@ -7459,15 +7410,6 @@ mod tests {
text: "final".to_string(),
}],
phase: Some(MessagePhase::FinalAnswer),
memory_citation: Some(CoreMemoryCitation {
entries: vec![CoreMemoryCitationEntry {
path: "MEMORY.md".to_string(),
line_start: 1,
line_end: 2,
note: "summary".to_string(),
}],
rollout_ids: vec!["rollout-1".to_string()],
}),
});
assert_eq!(
@@ -7476,15 +7418,6 @@ mod tests {
id: "agent-2".to_string(),
text: "final".to_string(),
phase: Some(MessagePhase::FinalAnswer),
memory_citation: Some(MemoryCitation {
entries: vec![MemoryCitationEntry {
path: "MEMORY.md".to_string(),
line_start: 1,
line_end: 2,
note: "summary".to_string(),
}],
thread_ids: vec!["rollout-1".to_string()],
}),
}
);

View File

@@ -32,7 +32,6 @@ axum = { workspace = true, default-features = false, features = [
codex-arg0 = { workspace = true }
codex-cloud-requirements = { workspace = true }
codex-core = { workspace = true }
codex-environment = { workspace = true }
codex-otel = { workspace = true }
codex-shell-command = { workspace = true }
codex-utils-cli = { workspace = true }
@@ -69,6 +68,7 @@ tokio-tungstenite = { workspace = true }
tracing = { workspace = true, features = ["log"] }
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt", "json"] }
uuid = { workspace = true, features = ["serde", "v7"] }
walkdir = { workspace = true }
[dev-dependencies]
app_test_support = { workspace = true }

View File

@@ -29,8 +29,7 @@ Supported transports:
When running with `--listen ws://IP:PORT`, the same listener also serves basic HTTP health probes:
- `GET /readyz` returns `200 OK` once the listener is accepting new connections.
- `GET /healthz` returns `200 OK` when no `Origin` header is present.
- Any request carrying an `Origin` header is rejected with `403 Forbidden`.
- `GET /healthz` currently always returns `200 OK`.
Websocket transport is currently experimental and unsupported. Do not rely on it for production workloads.
@@ -256,7 +255,7 @@ Experimental API: `thread/start`, `thread/resume`, and `thread/fork` accept `per
- `limit` — server defaults to a reasonable page size if unset.
- `sortKey``created_at` (default) or `updated_at`.
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
- `sourceKinds` — restrict results to specific sources; omit or pass `[]` for interactive sessions only (`cli`, `vscode`, and custom product sources).
- `sourceKinds` — restrict results to specific sources; omit or pass `[]` for interactive sessions only (`cli`, `vscode`).
- `archived` — when `true`, list archived threads only. When `false` or `null`, list non-archived threads (default).
- `cwd` — restrict results to threads whose session cwd exactly matches this path.
- `searchTerm` — restrict results to threads whose extracted title contains this substring (case-sensitive).

View File

@@ -220,7 +220,7 @@ use codex_core::mcp::group_tools_by_server;
use codex_core::models_manager::collaboration_mode_presets::CollaborationModesConfig;
use codex_core::parse_cursor;
use codex_core::plugins::MarketplaceError;
use codex_core::plugins::MarketplacePluginSource;
use codex_core::plugins::MarketplacePluginSourceSummary;
use codex_core::plugins::PluginInstallError as CorePluginInstallError;
use codex_core::plugins::PluginInstallRequest;
use codex_core::plugins::PluginReadRequest;
@@ -423,10 +423,7 @@ impl CodexMessageProcessor {
Ok(config) => self
.thread_manager
.plugins_manager()
.maybe_start_curated_repo_sync_for_config(
&config,
&self.thread_manager.session_source(),
),
.maybe_start_curated_repo_sync_for_config(&config),
Err(err) => warn!("failed to load latest config for curated plugin sync: {err:?}"),
}
}
@@ -1946,7 +1943,6 @@ impl CodexMessageProcessor {
config_overrides,
typesafe_overrides,
&cloud_requirements,
&listener_task_context.codex_home,
)
.await
{
@@ -3396,7 +3392,6 @@ impl CodexMessageProcessor {
typesafe_overrides,
history_cwd,
&cloud_requirements,
&self.config.codex_home,
)
.await
{
@@ -3923,7 +3918,6 @@ impl CodexMessageProcessor {
typesafe_overrides,
history_cwd,
&cloud_requirements,
&self.config.codex_home,
)
.await
{
@@ -5305,7 +5299,6 @@ impl CodexMessageProcessor {
force_reload,
per_cwd_extra_user_roots,
} = params;
let session_source = self.thread_manager.session_source();
let cwds = if cwds.is_empty() {
vec![self.config.cwd.clone()]
} else {
@@ -5350,12 +5343,9 @@ impl CodexMessageProcessor {
let extra_roots = extra_roots_by_cwd
.get(&cwd)
.map_or(&[][..], std::vec::Vec::as_slice);
let outcome = codex_core::skills::filter_skill_load_outcome_for_session_source(
skills_manager
.skills_for_cwd_with_extra_user_roots(&cwd, force_reload, extra_roots)
.await,
&session_source,
);
let outcome = skills_manager
.skills_for_cwd_with_extra_user_roots(&cwd, force_reload, extra_roots)
.await;
let errors = errors_to_info(&outcome.errors);
let skills = skills_to_info(&outcome.skills, &outcome.disabled_paths);
data.push(codex_app_server_protocol::SkillsListEntry {
@@ -5371,7 +5361,6 @@ impl CodexMessageProcessor {
async fn plugin_list(&self, request_id: ConnectionRequestId, params: PluginListParams) {
let plugins_manager = self.thread_manager.plugins_manager();
let session_source = self.thread_manager.session_source();
let PluginListParams {
cwds,
force_remote_sync,
@@ -5425,35 +5414,26 @@ impl CodexMessageProcessor {
Ok::<Vec<PluginMarketplaceEntry>, MarketplaceError>(
marketplaces
.into_iter()
.filter_map(|marketplace| {
let plugins = marketplace
.map(|marketplace| PluginMarketplaceEntry {
name: marketplace.name,
path: marketplace.path,
interface: marketplace.interface.map(|interface| MarketplaceInterface {
display_name: interface.display_name,
}),
plugins: marketplace
.plugins
.into_iter()
.filter(|plugin| {
session_source.matches_product_restriction(&plugin.policy.products)
})
.map(|plugin| PluginSummary {
id: plugin.id,
installed: plugin.installed,
enabled: plugin.enabled,
name: plugin.name,
source: marketplace_plugin_source_to_info(plugin.source),
install_policy: plugin.policy.installation.into(),
auth_policy: plugin.policy.authentication.into(),
install_policy: plugin.install_policy.into(),
auth_policy: plugin.auth_policy.into(),
interface: plugin.interface.map(plugin_interface_to_info),
})
.collect::<Vec<_>>();
(!plugins.is_empty()).then_some(PluginMarketplaceEntry {
name: marketplace.name,
path: marketplace.path,
interface: marketplace.interface.map(|interface| {
MarketplaceInterface {
display_name: interface.display_name,
}
}),
plugins,
})
.collect(),
})
.collect(),
)
@@ -5528,11 +5508,6 @@ impl CodexMessageProcessor {
return;
}
};
let session_source = self.thread_manager.session_source();
let plugin_skills = codex_core::skills::filter_skills_for_session_source(
outcome.plugin.skills,
&session_source,
);
let app_summaries =
plugin_app_helpers::load_plugin_app_summaries(&config, &outcome.plugin.apps).await;
let plugin = PluginDetail {
@@ -5544,12 +5519,12 @@ impl CodexMessageProcessor {
source: marketplace_plugin_source_to_info(outcome.plugin.source),
installed: outcome.plugin.installed,
enabled: outcome.plugin.enabled,
install_policy: outcome.plugin.policy.installation.into(),
auth_policy: outcome.plugin.policy.authentication.into(),
install_policy: outcome.plugin.install_policy.into(),
auth_policy: outcome.plugin.auth_policy.into(),
interface: outcome.plugin.interface.map(plugin_interface_to_info),
},
description: outcome.plugin.description,
skills: plugin_skills_to_info(&plugin_skills),
skills: plugin_skills_to_info(&outcome.plugin.skills),
apps: app_summaries,
mcp_servers: outcome.plugin.mcp_server_names,
};
@@ -7041,7 +7016,6 @@ impl CodexMessageProcessor {
},
Some(command_cwd.clone()),
&cloud_requirements,
&config.codex_home,
)
.await;
let setup_result = match derived_config {
@@ -7482,7 +7456,7 @@ fn plugin_skills_to_info(skills: &[codex_core::skills::SkillMetadata]) -> Vec<Sk
}
fn plugin_interface_to_info(
interface: codex_core::plugins::PluginManifestInterface,
interface: codex_core::plugins::PluginManifestInterfaceSummary,
) -> PluginInterface {
PluginInterface {
display_name: interface.display_name,
@@ -7502,9 +7476,9 @@ fn plugin_interface_to_info(
}
}
fn marketplace_plugin_source_to_info(source: MarketplacePluginSource) -> PluginSource {
fn marketplace_plugin_source_to_info(source: MarketplacePluginSourceSummary) -> PluginSource {
match source {
MarketplacePluginSource::Local { path } => PluginSource::Local { path },
MarketplacePluginSourceSummary::Local { path } => PluginSource::Local { path },
}
}
@@ -7636,7 +7610,6 @@ async fn derive_config_from_params(
request_overrides: Option<HashMap<String, serde_json::Value>>,
typesafe_overrides: ConfigOverrides,
cloud_requirements: &CloudRequirementsLoader,
codex_home: &Path,
) -> std::io::Result<Config> {
let merged_cli_overrides = cli_overrides
.iter()
@@ -7650,7 +7623,6 @@ async fn derive_config_from_params(
.collect::<Vec<_>>();
codex_core::config::ConfigBuilder::default()
.codex_home(codex_home.to_path_buf())
.cli_overrides(merged_cli_overrides)
.harness_overrides(typesafe_overrides)
.cloud_requirements(cloud_requirements.clone())
@@ -7664,7 +7636,6 @@ async fn derive_config_for_cwd(
typesafe_overrides: ConfigOverrides,
cwd: Option<PathBuf>,
cloud_requirements: &CloudRequirementsLoader,
codex_home: &Path,
) -> std::io::Result<Config> {
let merged_cli_overrides = cli_overrides
.iter()
@@ -7678,7 +7649,6 @@ async fn derive_config_for_cwd(
.collect::<Vec<_>>();
codex_core::config::ConfigBuilder::default()
.codex_home(codex_home.to_path_buf())
.cli_overrides(merged_cli_overrides)
.harness_overrides(typesafe_overrides)
.fallback_cwd(cwd)

View File

@@ -298,7 +298,6 @@ mod tests {
allowed_web_search_modes: Some(vec![
codex_core::config_loader::WebSearchModeRequirement::Cached,
]),
guardian_developer_instructions: None,
feature_requirements: Some(codex_core::config_loader::FeatureRequirementsToml {
entries: std::collections::BTreeMap::from([
("apps".to_string(), false),
@@ -375,7 +374,6 @@ mod tests {
allowed_approval_policies: None,
allowed_sandbox_modes: None,
allowed_web_search_modes: Some(Vec::new()),
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,

View File

@@ -1,24 +1,17 @@
use codex_app_server_protocol::ThreadSourceKind;
use codex_core::INTERACTIVE_SESSION_SOURCES;
use codex_protocol::protocol::SessionSource as CoreSessionSource;
use codex_protocol::protocol::SubAgentSource as CoreSubAgentSource;
fn interactive_source_kinds() -> Vec<ThreadSourceKind> {
vec![
ThreadSourceKind::Cli,
ThreadSourceKind::VsCode,
ThreadSourceKind::Custom,
]
}
pub(crate) fn compute_source_filters(
source_kinds: Option<Vec<ThreadSourceKind>>,
) -> (Vec<CoreSessionSource>, Option<Vec<ThreadSourceKind>>) {
let Some(source_kinds) = source_kinds else {
return (Vec::new(), Some(interactive_source_kinds()));
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
};
if source_kinds.is_empty() {
return (Vec::new(), Some(interactive_source_kinds()));
return (INTERACTIVE_SESSION_SOURCES.to_vec(), None);
}
let requires_post_filter = source_kinds.iter().any(|kind| {
@@ -26,7 +19,6 @@ pub(crate) fn compute_source_filters(
kind,
ThreadSourceKind::Exec
| ThreadSourceKind::AppServer
| ThreadSourceKind::Custom
| ThreadSourceKind::SubAgent
| ThreadSourceKind::SubAgentReview
| ThreadSourceKind::SubAgentCompact
@@ -46,7 +38,6 @@ pub(crate) fn compute_source_filters(
ThreadSourceKind::VsCode => Some(CoreSessionSource::VSCode),
ThreadSourceKind::Exec
| ThreadSourceKind::AppServer
| ThreadSourceKind::Custom
| ThreadSourceKind::SubAgent
| ThreadSourceKind::SubAgentReview
| ThreadSourceKind::SubAgentCompact
@@ -65,7 +56,6 @@ pub(crate) fn source_kind_matches(source: &CoreSessionSource, filter: &[ThreadSo
ThreadSourceKind::VsCode => matches!(source, CoreSessionSource::VSCode),
ThreadSourceKind::Exec => matches!(source, CoreSessionSource::Exec),
ThreadSourceKind::AppServer => matches!(source, CoreSessionSource::Mcp),
ThreadSourceKind::Custom => matches!(source, CoreSessionSource::Custom(_)),
ThreadSourceKind::SubAgent => matches!(source, CoreSessionSource::SubAgent(_)),
ThreadSourceKind::SubAgentReview => {
matches!(
@@ -102,16 +92,16 @@ mod tests {
fn compute_source_filters_defaults_to_interactive_sources() {
let (allowed_sources, filter) = compute_source_filters(None);
assert_eq!(allowed_sources, Vec::new());
assert_eq!(filter, Some(interactive_source_kinds()));
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
assert_eq!(filter, None);
}
#[test]
fn compute_source_filters_empty_means_interactive_sources() {
let (allowed_sources, filter) = compute_source_filters(Some(Vec::new()));
assert_eq!(allowed_sources, Vec::new());
assert_eq!(filter, Some(interactive_source_kinds()));
assert_eq!(allowed_sources, INTERACTIVE_SESSION_SOURCES.to_vec());
assert_eq!(filter, None);
}
#[test]
@@ -135,15 +125,6 @@ mod tests {
assert_eq!(filter, Some(source_kinds));
}
#[test]
fn compute_source_filters_custom_requires_post_filtering() {
let source_kinds = vec![ThreadSourceKind::Custom];
let (allowed_sources, filter) = compute_source_filters(Some(source_kinds.clone()));
assert_eq!(allowed_sources, Vec::new());
assert_eq!(filter, Some(source_kinds));
}
#[test]
fn source_kind_matches_distinguishes_subagent_variants() {
let parent_thread_id =
@@ -173,12 +154,4 @@ mod tests {
&[ThreadSourceKind::SubAgentReview]
));
}
#[test]
fn source_kind_matches_custom_sources() {
let custom = CoreSessionSource::Custom("atlas".to_string());
assert!(source_kind_matches(&custom, &[ThreadSourceKind::Custom]));
assert!(!source_kind_matches(&custom, &[ThreadSourceKind::Cli]));
}
}

View File

@@ -18,37 +18,23 @@ use codex_app_server_protocol::FsRemoveResponse;
use codex_app_server_protocol::FsWriteFileParams;
use codex_app_server_protocol::FsWriteFileResponse;
use codex_app_server_protocol::JSONRPCErrorError;
use codex_environment::CopyOptions;
use codex_environment::CreateDirectoryOptions;
use codex_environment::Environment;
use codex_environment::ExecutorFileSystem;
use codex_environment::RemoveOptions;
use std::io;
use std::sync::Arc;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use walkdir::WalkDir;
#[derive(Clone)]
pub(crate) struct FsApi {
file_system: Arc<dyn ExecutorFileSystem>,
}
impl Default for FsApi {
fn default() -> Self {
Self {
file_system: Arc::new(Environment.get_filesystem()),
}
}
}
#[derive(Clone, Default)]
pub(crate) struct FsApi;
impl FsApi {
pub(crate) async fn read_file(
&self,
params: FsReadFileParams,
) -> Result<FsReadFileResponse, JSONRPCErrorError> {
let bytes = self
.file_system
.read_file(&params.path)
.await
.map_err(map_fs_error)?;
let bytes = tokio::fs::read(params.path).await.map_err(map_io_error)?;
Ok(FsReadFileResponse {
data_base64: STANDARD.encode(bytes),
})
@@ -63,10 +49,9 @@ impl FsApi {
"fs/writeFile requires valid base64 dataBase64: {err}"
))
})?;
self.file_system
.write_file(&params.path, bytes)
tokio::fs::write(params.path, bytes)
.await
.map_err(map_fs_error)?;
.map_err(map_io_error)?;
Ok(FsWriteFileResponse {})
}
@@ -74,15 +59,15 @@ impl FsApi {
&self,
params: FsCreateDirectoryParams,
) -> Result<FsCreateDirectoryResponse, JSONRPCErrorError> {
self.file_system
.create_directory(
&params.path,
CreateDirectoryOptions {
recursive: params.recursive.unwrap_or(true),
},
)
.await
.map_err(map_fs_error)?;
if params.recursive.unwrap_or(true) {
tokio::fs::create_dir_all(params.path)
.await
.map_err(map_io_error)?;
} else {
tokio::fs::create_dir(params.path)
.await
.map_err(map_io_error)?;
}
Ok(FsCreateDirectoryResponse {})
}
@@ -90,16 +75,14 @@ impl FsApi {
&self,
params: FsGetMetadataParams,
) -> Result<FsGetMetadataResponse, JSONRPCErrorError> {
let metadata = self
.file_system
.get_metadata(&params.path)
let metadata = tokio::fs::metadata(params.path)
.await
.map_err(map_fs_error)?;
.map_err(map_io_error)?;
Ok(FsGetMetadataResponse {
is_directory: metadata.is_directory,
is_file: metadata.is_file,
created_at_ms: metadata.created_at_ms,
modified_at_ms: metadata.modified_at_ms,
is_directory: metadata.is_dir(),
is_file: metadata.is_file(),
created_at_ms: metadata.created().ok().map_or(0, system_time_to_unix_ms),
modified_at_ms: metadata.modified().ok().map_or(0, system_time_to_unix_ms),
})
}
@@ -107,59 +90,232 @@ impl FsApi {
&self,
params: FsReadDirectoryParams,
) -> Result<FsReadDirectoryResponse, JSONRPCErrorError> {
let entries = self
.file_system
.read_directory(&params.path)
let mut entries = Vec::new();
let mut read_dir = tokio::fs::read_dir(params.path)
.await
.map_err(map_fs_error)?;
Ok(FsReadDirectoryResponse {
entries: entries
.into_iter()
.map(|entry| FsReadDirectoryEntry {
file_name: entry.file_name,
is_directory: entry.is_directory,
is_file: entry.is_file,
})
.collect(),
})
.map_err(map_io_error)?;
while let Some(entry) = read_dir.next_entry().await.map_err(map_io_error)? {
let metadata = tokio::fs::metadata(entry.path())
.await
.map_err(map_io_error)?;
entries.push(FsReadDirectoryEntry {
file_name: entry.file_name().to_string_lossy().into_owned(),
is_directory: metadata.is_dir(),
is_file: metadata.is_file(),
});
}
Ok(FsReadDirectoryResponse { entries })
}
pub(crate) async fn remove(
&self,
params: FsRemoveParams,
) -> Result<FsRemoveResponse, JSONRPCErrorError> {
self.file_system
.remove(
&params.path,
RemoveOptions {
recursive: params.recursive.unwrap_or(true),
force: params.force.unwrap_or(true),
},
)
.await
.map_err(map_fs_error)?;
Ok(FsRemoveResponse {})
let path = params.path.as_path();
let recursive = params.recursive.unwrap_or(true);
let force = params.force.unwrap_or(true);
match tokio::fs::symlink_metadata(path).await {
Ok(metadata) => {
let file_type = metadata.file_type();
if file_type.is_dir() {
if recursive {
tokio::fs::remove_dir_all(path)
.await
.map_err(map_io_error)?;
} else {
tokio::fs::remove_dir(path).await.map_err(map_io_error)?;
}
} else {
tokio::fs::remove_file(path).await.map_err(map_io_error)?;
}
Ok(FsRemoveResponse {})
}
Err(err) if err.kind() == io::ErrorKind::NotFound && force => Ok(FsRemoveResponse {}),
Err(err) => Err(map_io_error(err)),
}
}
pub(crate) async fn copy(
&self,
params: FsCopyParams,
) -> Result<FsCopyResponse, JSONRPCErrorError> {
self.file_system
.copy(
&params.source_path,
&params.destination_path,
CopyOptions {
recursive: params.recursive,
},
)
.await
.map_err(map_fs_error)?;
let FsCopyParams {
source_path,
destination_path,
recursive,
} = params;
tokio::task::spawn_blocking(move || -> Result<(), JSONRPCErrorError> {
let metadata =
std::fs::symlink_metadata(source_path.as_path()).map_err(map_io_error)?;
let file_type = metadata.file_type();
if file_type.is_dir() {
if !recursive {
return Err(invalid_request(
"fs/copy requires recursive: true when sourcePath is a directory",
));
}
if destination_is_same_or_descendant_of_source(
source_path.as_path(),
destination_path.as_path(),
)
.map_err(map_io_error)?
{
return Err(invalid_request(
"fs/copy cannot copy a directory to itself or one of its descendants",
));
}
copy_dir_recursive(source_path.as_path(), destination_path.as_path())
.map_err(map_io_error)?;
return Ok(());
}
if file_type.is_symlink() {
copy_symlink(source_path.as_path(), destination_path.as_path())
.map_err(map_io_error)?;
return Ok(());
}
if file_type.is_file() {
std::fs::copy(source_path.as_path(), destination_path.as_path())
.map_err(map_io_error)?;
return Ok(());
}
Err(invalid_request(
"fs/copy only supports regular files, directories, and symlinks",
))
})
.await
.map_err(map_join_error)??;
Ok(FsCopyResponse {})
}
}
fn invalid_request(message: impl Into<String>) -> JSONRPCErrorError {
fn copy_dir_recursive(source: &Path, target: &Path) -> io::Result<()> {
for entry in WalkDir::new(source) {
let entry = entry.map_err(|err| {
if let Some(io_err) = err.io_error() {
io::Error::new(io_err.kind(), io_err.to_string())
} else {
io::Error::other(err.to_string())
}
})?;
let relative_path = entry.path().strip_prefix(source).map_err(|err| {
io::Error::other(format!(
"failed to compute relative path for {} under {}: {err}",
entry.path().display(),
source.display()
))
})?;
let target_path = target.join(relative_path);
let file_type = entry.file_type();
if file_type.is_dir() {
std::fs::create_dir_all(&target_path)?;
continue;
}
if file_type.is_file() {
std::fs::copy(entry.path(), &target_path)?;
continue;
}
if file_type.is_symlink() {
copy_symlink(entry.path(), &target_path)?;
continue;
}
// For now ignore special files such as FIFOs, sockets, and device nodes during recursive copies.
}
Ok(())
}
fn destination_is_same_or_descendant_of_source(
source: &Path,
destination: &Path,
) -> io::Result<bool> {
let source = std::fs::canonicalize(source)?;
let destination = resolve_copy_destination_path(destination)?;
Ok(destination.starts_with(&source))
}
fn resolve_copy_destination_path(path: &Path) -> io::Result<PathBuf> {
let mut normalized = PathBuf::new();
for component in path.components() {
match component {
Component::Prefix(prefix) => normalized.push(prefix.as_os_str()),
Component::RootDir => normalized.push(component.as_os_str()),
Component::CurDir => {}
Component::ParentDir => {
normalized.pop();
}
Component::Normal(part) => normalized.push(part),
}
}
let mut unresolved_suffix = Vec::new();
let mut existing_path = normalized.as_path();
while !existing_path.exists() {
let Some(file_name) = existing_path.file_name() else {
break;
};
unresolved_suffix.push(file_name.to_os_string());
let Some(parent) = existing_path.parent() else {
break;
};
existing_path = parent;
}
let mut resolved = std::fs::canonicalize(existing_path)?;
for file_name in unresolved_suffix.iter().rev() {
resolved.push(file_name);
}
Ok(resolved)
}
fn copy_symlink(source: &Path, target: &Path) -> io::Result<()> {
let link_target = std::fs::read_link(source)?;
#[cfg(unix)]
{
std::os::unix::fs::symlink(&link_target, target)
}
#[cfg(windows)]
{
if symlink_points_to_directory(source)? {
std::os::windows::fs::symlink_dir(&link_target, target)
} else {
std::os::windows::fs::symlink_file(&link_target, target)
}
}
#[cfg(not(any(unix, windows)))]
{
let _ = link_target;
let _ = target;
Err(io::Error::new(
io::ErrorKind::Unsupported,
"copying symlinks is unsupported on this platform",
))
}
}
#[cfg(windows)]
fn symlink_points_to_directory(source: &Path) -> io::Result<bool> {
use std::os::windows::fs::FileTypeExt;
Ok(std::fs::symlink_metadata(source)?
.file_type()
.is_symlink_dir())
}
fn system_time_to_unix_ms(time: SystemTime) -> i64 {
time.duration_since(UNIX_EPOCH)
.ok()
.and_then(|duration| i64::try_from(duration.as_millis()).ok())
.unwrap_or(0)
}
pub(crate) fn invalid_request(message: impl Into<String>) -> JSONRPCErrorError {
JSONRPCErrorError {
code: INVALID_REQUEST_ERROR_CODE,
message: message.into(),
@@ -167,14 +323,43 @@ fn invalid_request(message: impl Into<String>) -> JSONRPCErrorError {
}
}
fn map_fs_error(err: io::Error) -> JSONRPCErrorError {
if err.kind() == io::ErrorKind::InvalidInput {
invalid_request(err.to_string())
} else {
JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: err.to_string(),
data: None,
}
fn map_join_error(err: tokio::task::JoinError) -> JSONRPCErrorError {
JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: format!("filesystem task failed: {err}"),
data: None,
}
}
pub(crate) fn map_io_error(err: io::Error) -> JSONRPCErrorError {
JSONRPCErrorError {
code: INTERNAL_ERROR_CODE,
message: err.to_string(),
data: None,
}
}
#[cfg(all(test, windows))]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn symlink_points_to_directory_handles_dangling_directory_symlinks() -> io::Result<()> {
use std::os::windows::fs::symlink_dir;
let temp_dir = tempfile::TempDir::new()?;
let source_dir = temp_dir.path().join("source");
let link_path = temp_dir.path().join("source-link");
std::fs::create_dir(&source_dir)?;
if symlink_dir(&source_dir, &link_path).is_err() {
return Ok(());
}
std::fs::remove_dir(&source_dir)?;
assert_eq!(symlink_points_to_directory(&link_path)?, true);
Ok(())
}
}

View File

@@ -808,10 +808,6 @@ mod tests {
for (requested_source, expected_source) in [
(SessionSource::Cli, ApiSessionSource::Cli),
(SessionSource::Exec, ApiSessionSource::Exec),
(
SessionSource::Custom("atlas".to_string()),
ApiSessionSource::Custom("atlas".to_string()),
),
] {
let client = start_test_client(requested_source).await;
let response = client

View File

@@ -336,7 +336,6 @@ pub async fn run_main(
loader_overrides,
default_analytics_enabled,
AppServerTransport::Stdio,
SessionSource::VSCode,
)
.await
}
@@ -347,7 +346,6 @@ pub async fn run_main_with_transport(
loader_overrides: LoaderOverrides,
default_analytics_enabled: bool,
transport: AppServerTransport,
session_source: SessionSource,
) -> IoResult<()> {
let (transport_event_tx, mut transport_event_rx) =
mpsc::channel::<TransportEvent>(CHANNEL_CAPACITY);
@@ -479,14 +477,6 @@ pub async fn run_main_with_transport(
range: None,
});
}
if let Some(warning) = codex_core::config::missing_system_bwrap_warning() {
config_warnings.push(ConfigWarningNotification {
summary: warning,
details: None,
path: None,
range: None,
});
}
let feedback = CodexFeedback::new();
@@ -623,7 +613,7 @@ pub async fn run_main_with_transport(
feedback: feedback.clone(),
log_db,
config_warnings,
session_source,
session_source: SessionSource::VSCode,
enable_codex_api_key_env: false,
});
let mut thread_created_rx = processor.thread_created_receiver();

View File

@@ -4,7 +4,6 @@ use codex_app_server::run_main_with_transport;
use codex_arg0::Arg0DispatchPaths;
use codex_arg0::arg0_dispatch_or_else;
use codex_core::config_loader::LoaderOverrides;
use codex_protocol::protocol::SessionSource;
use codex_utils_cli::CliConfigOverrides;
use std::path::PathBuf;
@@ -22,17 +21,6 @@ struct AppServerArgs {
default_value = AppServerTransport::DEFAULT_LISTEN_URL
)]
listen: AppServerTransport,
/// Session source stamped into new threads started by this app-server.
///
/// Known values such as `vscode`, `cli`, `exec`, and `mcp` map to built-in
/// sources. Any other non-empty value is recorded as a custom source.
#[arg(
long = "session-source",
value_name = "SOURCE",
default_value = "vscode"
)]
session_source: String,
}
fn main() -> anyhow::Result<()> {
@@ -44,8 +32,6 @@ fn main() -> anyhow::Result<()> {
..Default::default()
};
let transport = args.listen;
let session_source = SessionSource::from_startup_arg(args.session_source.as_str())
.map_err(|err| anyhow::anyhow!("invalid --session-source: {err}"))?;
run_main_with_transport(
arg0_paths,
@@ -53,7 +39,6 @@ fn main() -> anyhow::Result<()> {
loader_overrides,
/*default_analytics_enabled*/ false,
transport,
session_source,
)
.await?;
Ok(())

View File

@@ -231,7 +231,7 @@ impl MessageProcessor {
// TODO(xl): Move into PluginManager once this no longer depends on config feature gating.
thread_manager
.plugins_manager()
.maybe_start_curated_repo_sync_for_config(&config, &thread_manager.session_source());
.maybe_start_curated_repo_sync_for_config(&config);
let cloud_requirements = Arc::new(RwLock::new(cloud_requirements));
let codex_message_processor = CodexMessageProcessor::new(CodexMessageProcessorArgs {
auth_manager: auth_manager.clone(),
@@ -253,7 +253,7 @@ impl MessageProcessor {
analytics_events_client,
);
let external_agent_config_api = ExternalAgentConfigApi::new(config.codex_home.clone());
let fs_api = FsApi::default();
let fs_api = FsApi;
Self {
outgoing,

View File

@@ -5,19 +5,13 @@ use crate::outgoing_message::OutgoingEnvelope;
use crate::outgoing_message::OutgoingError;
use crate::outgoing_message::OutgoingMessage;
use axum::Router;
use axum::body::Body;
use axum::extract::ConnectInfo;
use axum::extract::State;
use axum::extract::ws::Message as WebSocketMessage;
use axum::extract::ws::WebSocket;
use axum::extract::ws::WebSocketUpgrade;
use axum::http::Request;
use axum::http::StatusCode;
use axum::http::header::ORIGIN;
use axum::middleware;
use axum::middleware::Next;
use axum::response::IntoResponse;
use axum::response::Response;
use axum::routing::any;
use axum::routing::get;
use codex_app_server_protocol::JSONRPCErrorError;
@@ -97,22 +91,6 @@ async fn health_check_handler() -> StatusCode {
StatusCode::OK
}
async fn reject_requests_with_origin_header(
request: Request<Body>,
next: Next,
) -> Result<Response, StatusCode> {
if request.headers().contains_key(ORIGIN) {
warn!(
method = %request.method(),
uri = %request.uri(),
"rejecting websocket listener request with Origin header"
);
Err(StatusCode::FORBIDDEN)
} else {
Ok(next.run(request).await)
}
}
async fn websocket_upgrade_handler(
websocket: WebSocketUpgrade,
ConnectInfo(peer_addr): ConnectInfo<SocketAddr>,
@@ -344,7 +322,6 @@ pub(crate) async fn start_websocket_acceptor(
.route("/readyz", get(health_check_handler))
.route("/healthz", get(health_check_handler))
.fallback(any(websocket_upgrade_handler))
.layer(middleware::from_fn(reject_requests_with_origin_header))
.with_state(WebSocketListenerState {
transport_event_tx,
connection_counter: Arc::new(AtomicU64::new(1)),

View File

@@ -7,11 +7,11 @@ use base64::engine::general_purpose::URL_SAFE_NO_PAD;
use chrono::DateTime;
use chrono::Utc;
use codex_app_server_protocol::AuthMode;
use codex_core::TokenData;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::AuthDotJson;
use codex_core::auth::save_auth;
use codex_core::parse_chatgpt_jwt_claims;
use codex_core::token_data::TokenData;
use codex_core::token_data::parse_chatgpt_jwt_claims;
use serde_json::json;
/// Builder for writing a fake ChatGPT auth.json in tests.

View File

@@ -34,23 +34,21 @@ pub fn write_mock_responses_config_toml(
Some(true) => "requires_openai_auth = true\n".to_string(),
Some(false) | None => String::new(),
};
let provider_name = if matches!(requires_openai_auth, Some(true)) {
"OpenAI"
let provider_block = if model_provider_id == "openai" {
String::new()
} else {
"Mock provider for test"
};
let provider_block = format!(
r#"
[model_providers.{model_provider_id}]
name = "{provider_name}"
format!(
r#"
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"
wire_api = "responses"
request_max_retries = 0
stream_max_retries = 0
supports_websockets = false
{requires_line}
"#
);
)
};
let openai_base_url_line = if model_provider_id == "openai" {
format!("openai_base_url = \"{server_uri}/v1\"\n")
} else {

View File

@@ -95,11 +95,7 @@ pub const DEFAULT_CLIENT_NAME: &str = "codex-app-server-tests";
impl McpProcess {
pub async fn new(codex_home: &Path) -> anyhow::Result<Self> {
Self::new_with_env_and_args(codex_home, &[], &[]).await
}
pub async fn new_with_args(codex_home: &Path, args: &[&str]) -> anyhow::Result<Self> {
Self::new_with_env_and_args(codex_home, &[], args).await
Self::new_with_env(codex_home, &[]).await
}
/// Creates a new MCP process, allowing tests to override or remove
@@ -110,14 +106,6 @@ impl McpProcess {
pub async fn new_with_env(
codex_home: &Path,
env_overrides: &[(&str, Option<&str>)],
) -> anyhow::Result<Self> {
Self::new_with_env_and_args(codex_home, env_overrides, &[]).await
}
pub async fn new_with_env_and_args(
codex_home: &Path,
env_overrides: &[(&str, Option<&str>)],
args: &[&str],
) -> anyhow::Result<Self> {
let program = codex_utils_cargo_bin::cargo_bin("codex-app-server")
.context("should find binary for codex-app-server")?;
@@ -130,7 +118,6 @@ impl McpProcess {
cmd.env("CODEX_HOME", codex_home);
cmd.env("RUST_LOG", "info");
cmd.env_remove(CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR);
cmd.args(args);
for (k, v) in env_overrides {
match v {

View File

@@ -45,6 +45,7 @@ fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
effective_context_window_percent: 95,
experimental_supported_tools: Vec::new(),
input_modalities: default_input_modalities(),
prefer_websockets: false,
used_fallback_model_metadata: false,
supports_search_tool: false,
}

View File

@@ -149,7 +149,7 @@ async fn auto_compaction_remote_emits_started_and_completed_items() -> Result<()
&BTreeMap::default(),
REMOTE_AUTO_COMPACT_LIMIT,
Some(true),
"mock_provider",
"openai",
COMPACT_PROMPT,
)?;
write_chatgpt_auth(

View File

@@ -29,11 +29,7 @@ use tokio::time::timeout;
use tokio_tungstenite::MaybeTlsStream;
use tokio_tungstenite::WebSocketStream;
use tokio_tungstenite::connect_async;
use tokio_tungstenite::tungstenite::Error as WebSocketError;
use tokio_tungstenite::tungstenite::Message as WebSocketMessage;
use tokio_tungstenite::tungstenite::client::IntoClientRequest;
use tokio_tungstenite::tungstenite::http::HeaderValue;
use tokio_tungstenite::tungstenite::http::header::ORIGIN;
pub(super) const DEFAULT_READ_TIMEOUT: Duration = Duration::from_secs(5);
@@ -111,55 +107,6 @@ async fn websocket_transport_serves_health_endpoints_on_same_listener() -> Resul
Ok(())
}
#[tokio::test]
async fn websocket_transport_rejects_requests_with_origin_header() -> Result<()> {
let server = create_mock_responses_server_sequence_unchecked(Vec::new()).await;
let codex_home = TempDir::new()?;
create_config_toml(codex_home.path(), &server.uri(), "never")?;
let (mut process, bind_addr) = spawn_websocket_server(codex_home.path()).await?;
let client = reqwest::Client::new();
let deadline = Instant::now() + Duration::from_secs(10);
let healthz = loop {
match client
.get(format!("http://{bind_addr}/healthz"))
.header(ORIGIN.as_str(), "https://example.com")
.send()
.await
.with_context(|| format!("failed to GET http://{bind_addr}/healthz with Origin header"))
{
Ok(response) => break response,
Err(err) => {
if Instant::now() >= deadline {
bail!("failed to GET http://{bind_addr}/healthz with Origin header: {err}");
}
sleep(Duration::from_millis(50)).await;
}
}
};
assert_eq!(healthz.status(), StatusCode::FORBIDDEN);
let url = format!("ws://{bind_addr}");
let mut request = url.into_client_request()?;
request
.headers_mut()
.insert(ORIGIN, HeaderValue::from_static("https://example.com"));
match connect_async(request).await {
Err(WebSocketError::Http(response)) => {
assert_eq!(response.status(), StatusCode::FORBIDDEN);
}
Ok(_) => bail!("expected websocket handshake with Origin header to be rejected"),
Err(err) => bail!("expected HTTP rejection for Origin header, got {err}"),
}
process
.kill()
.await
.context("failed to stop websocket app-server process")?;
Ok(())
}
pub(super) async fn spawn_websocket_server(codex_home: &Path) -> Result<(Child, SocketAddr)> {
let program = codex_utils_cargo_bin::cargo_bin("codex-app-server")
.context("should find app-server binary")?;

View File

@@ -25,8 +25,6 @@ use codex_app_server_protocol::PluginAuthPolicy;
use codex_app_server_protocol::PluginInstallParams;
use codex_app_server_protocol::PluginInstallResponse;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::SkillsListParams;
use codex_app_server_protocol::SkillsListResponse;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
@@ -478,92 +476,6 @@ async fn plugin_install_filters_disallowed_apps_needing_auth() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn plugin_install_filters_product_restricted_plugin_skills() -> Result<()> {
let codex_home = TempDir::new()?;
let repo_root = TempDir::new()?;
write_plugins_enabled_config(codex_home.path())?;
write_plugin_marketplace(
repo_root.path(),
"debug",
"sample-plugin",
"./sample-plugin",
None,
None,
)?;
write_plugin_source(repo_root.path(), "sample-plugin", &[])?;
let plugin_root = repo_root.path().join("sample-plugin");
write_plugin_skill(
&plugin_root,
"all-products",
"Visible to every product",
&[],
)?;
write_plugin_skill(
&plugin_root,
"chatgpt-only",
"Visible to ChatGPT",
&["CHATGPT"],
)?;
write_plugin_skill(&plugin_root, "atlas-only", "Visible to Atlas", &["ATLAS"])?;
let marketplace_path =
AbsolutePathBuf::try_from(repo_root.path().join(".agents/plugins/marketplace.json"))?;
let mut mcp =
McpProcess::new_with_args(codex_home.path(), &["--session-source", "chatgpt"]).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_plugin_install_request(PluginInstallParams {
marketplace_path,
plugin_name: "sample-plugin".to_string(),
force_remote_sync: false,
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let response: PluginInstallResponse = to_response(response)?;
assert_eq!(response.apps_needing_auth, Vec::<AppSummary>::new());
let request_id = mcp
.send_skills_list_request(SkillsListParams {
cwds: vec![codex_home.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: None,
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let response: SkillsListResponse = to_response(response)?;
let mut skills = response
.data
.into_iter()
.flat_map(|entry| entry.skills.into_iter())
.map(|skill| skill.name)
.filter(|name| name.starts_with("sample-plugin:"))
.collect::<Vec<_>>();
skills.sort_unstable();
assert_eq!(
skills,
vec![
"sample-plugin:all-products".to_string(),
"sample-plugin:chatgpt-only".to_string(),
]
);
Ok(())
}
#[derive(Clone)]
struct AppsServerState {
response: Arc<StdMutex<serde_json::Value>>,
@@ -735,16 +647,6 @@ plugins = true
)
}
fn write_plugins_enabled_config(codex_home: &std::path::Path) -> std::io::Result<()> {
std::fs::write(
codex_home.join("config.toml"),
r#"
[features]
plugins = true
"#,
)
}
fn write_plugin_marketplace(
repo_root: &std::path::Path,
marketplace_name: &str,
@@ -753,24 +655,12 @@ fn write_plugin_marketplace(
install_policy: Option<&str>,
auth_policy: Option<&str>,
) -> std::io::Result<()> {
let policy = if install_policy.is_some() || auth_policy.is_some() {
let installation = install_policy
.map(|installation| format!("\n \"installation\": \"{installation}\""))
.unwrap_or_default();
let separator = if install_policy.is_some() && auth_policy.is_some() {
","
} else {
""
};
let authentication = auth_policy
.map(|authentication| {
format!("{separator}\n \"authentication\": \"{authentication}\"")
})
.unwrap_or_default();
format!(",\n \"policy\": {{{installation}{authentication}\n }}")
} else {
String::new()
};
let install_policy = install_policy
.map(|install_policy| format!(",\n \"installPolicy\": \"{install_policy}\""))
.unwrap_or_default();
let auth_policy = auth_policy
.map(|auth_policy| format!(",\n \"authPolicy\": \"{auth_policy}\""))
.unwrap_or_default();
std::fs::create_dir_all(repo_root.join(".git"))?;
std::fs::create_dir_all(repo_root.join(".agents/plugins"))?;
std::fs::write(
@@ -784,7 +674,7 @@ fn write_plugin_marketplace(
"source": {{
"source": "local",
"path": "{source_path}"
}}{policy}
}}{install_policy}{auth_policy}
}}
]
}}"#
@@ -814,32 +704,3 @@ fn write_plugin_source(
)?;
Ok(())
}
fn write_plugin_skill(
plugin_root: &std::path::Path,
skill_name: &str,
description: &str,
products: &[&str],
) -> Result<()> {
let skill_dir = plugin_root.join("skills").join(skill_name);
std::fs::create_dir_all(&skill_dir)?;
std::fs::write(
skill_dir.join("SKILL.md"),
format!("---\ndescription: {description}\n---\n\n# {skill_name}\n"),
)?;
if !products.is_empty() {
let products = products
.iter()
.map(|product| format!(" - {product}"))
.collect::<Vec<_>>()
.join("\n");
std::fs::create_dir_all(skill_dir.join("agents"))?;
std::fs::write(
skill_dir.join("agents/openai.yaml"),
format!("policy:\n products:\n{products}\n"),
)?;
}
Ok(())
}

View File

@@ -377,179 +377,6 @@ enabled = false
Ok(())
}
#[tokio::test]
async fn plugin_list_filters_plugins_for_custom_session_source_products() -> Result<()> {
let codex_home = TempDir::new()?;
let repo_root = TempDir::new()?;
std::fs::create_dir_all(repo_root.path().join(".git"))?;
std::fs::create_dir_all(repo_root.path().join(".agents/plugins"))?;
std::fs::write(
repo_root.path().join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "all-products",
"source": {
"source": "local",
"path": "./all-products"
}
},
{
"name": "chatgpt-only",
"source": {
"source": "local",
"path": "./chatgpt-only"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL",
"products": ["CHATGPT"]
}
},
{
"name": "atlas-only",
"source": {
"source": "local",
"path": "./atlas-only"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL",
"products": ["ATLAS"]
}
},
{
"name": "codex-only",
"source": {
"source": "local",
"path": "./codex-only"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL",
"products": ["CODEX"]
}
}
]
}"#,
)?;
let mut mcp =
McpProcess::new_with_args(codex_home.path(), &["--session-source", "chatgpt"]).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_plugin_list_request(PluginListParams {
cwds: Some(vec![AbsolutePathBuf::try_from(repo_root.path())?]),
force_remote_sync: false,
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let response: PluginListResponse = to_response(response)?;
let marketplace = response
.marketplaces
.into_iter()
.find(|marketplace| marketplace.name == "codex-curated")
.expect("expected marketplace entry");
assert_eq!(
marketplace
.plugins
.into_iter()
.map(|plugin| plugin.name)
.collect::<Vec<_>>(),
vec!["all-products".to_string(), "chatgpt-only".to_string()]
);
Ok(())
}
#[tokio::test]
async fn plugin_list_defaults_non_custom_session_source_to_codex_products() -> Result<()> {
let codex_home = TempDir::new()?;
let repo_root = TempDir::new()?;
std::fs::create_dir_all(repo_root.path().join(".git"))?;
std::fs::create_dir_all(repo_root.path().join(".agents/plugins"))?;
std::fs::write(
repo_root.path().join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "all-products",
"source": {
"source": "local",
"path": "./all-products"
}
},
{
"name": "chatgpt-only",
"source": {
"source": "local",
"path": "./chatgpt-only"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL",
"products": ["CHATGPT"]
}
},
{
"name": "codex-only",
"source": {
"source": "local",
"path": "./codex-only"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL",
"products": ["CODEX"]
}
}
]
}"#,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_plugin_list_request(PluginListParams {
cwds: Some(vec![AbsolutePathBuf::try_from(repo_root.path())?]),
force_remote_sync: false,
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let response: PluginListResponse = to_response(response)?;
let marketplace = response
.marketplaces
.into_iter()
.find(|marketplace| marketplace.name == "codex-curated")
.expect("expected marketplace entry");
assert_eq!(
marketplace
.plugins
.into_iter()
.map(|plugin| plugin.name)
.collect::<Vec<_>>(),
vec!["all-products".to_string(), "codex-only".to_string()]
);
Ok(())
}
#[tokio::test]
async fn plugin_list_returns_plugin_interface_with_absolute_asset_paths() -> Result<()> {
let codex_home = TempDir::new()?;
@@ -569,10 +396,8 @@ async fn plugin_list_returns_plugin_interface_with_absolute_asset_paths() -> Res
"source": "local",
"path": "./plugins/demo-plugin"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL"
},
"installPolicy": "AVAILABLE",
"authPolicy": "ON_INSTALL",
"category": "Design"
}
]

View File

@@ -36,10 +36,8 @@ async fn plugin_read_returns_plugin_details_with_bundle_contents() -> Result<()>
"source": "local",
"path": "./plugins/demo-plugin"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL"
},
"installPolicy": "AVAILABLE",
"authPolicy": "ON_INSTALL",
"category": "Design"
}
]

View File

@@ -190,7 +190,7 @@ async fn realtime_conversation_streams_v2_notifications() -> Result<()> {
read_notification::<ThreadRealtimeClosedNotification>(&mut mcp, "thread/realtime/closed")
.await?;
assert_eq!(closed.thread_id, output_audio.thread_id);
assert_eq!(closed.reason.as_deref(), Some("error"));
assert_eq!(closed.reason.as_deref(), Some("transport_closed"));
let connections = realtime_server.connections();
assert_eq!(connections.len(), 1);

View File

@@ -450,7 +450,6 @@ async fn thread_resume_and_read_interrupt_incomplete_rollout_turn_when_thread_is
"payload": serde_json::to_value(EventMsg::AgentMessage(AgentMessageEvent {
message: "Still running".to_string(),
phase: None,
memory_citation: None,
}))?,
})
.to_string(),

View File

@@ -19,10 +19,8 @@ which = { workspace = true }
workspace = true
[dev-dependencies]
flate2 = { workspace = true }
pretty_assertions = { workspace = true }
sha2 = { workspace = true }
tar = { workspace = true }
tokio = { workspace = true, features = ["fs", "io-util", "macros", "process", "rt", "rt-multi-thread", "time"] }
wiremock = { workspace = true }
zip = { workspace = true }

View File

@@ -11,11 +11,10 @@ use tokio::fs;
use tokio::io::AsyncReadExt;
use tokio::process::Command;
use tokio::time::timeout;
use url::Url;
const DEFAULT_EXECUTION_TIMEOUT: Duration = Duration::from_secs(30);
/// Executes artifact build commands against a resolved runtime.
/// Executes artifact build and render commands against a resolved runtime.
#[derive(Clone, Debug)]
pub struct ArtifactsClient {
runtime_source: RuntimeSource,
@@ -55,18 +54,7 @@ impl ArtifactsClient {
source,
})?;
let script_path = staging_dir.path().join("artifact-build.mjs");
let build_entrypoint_url =
Url::from_file_path(runtime.build_js_path()).map_err(|()| ArtifactsError::Io {
context: format!(
"failed to convert artifact build entrypoint to a file URL: {}",
runtime.build_js_path().display()
),
source: std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"invalid artifact build entrypoint path",
),
})?;
let wrapped_script = build_wrapped_script(&build_entrypoint_url, &request.source);
let wrapped_script = build_wrapped_script(&request.source);
fs::write(&script_path, wrapped_script)
.await
.map_err(|source| ArtifactsError::Io {
@@ -75,8 +63,44 @@ impl ArtifactsClient {
})?;
let mut command = Command::new(js_runtime.executable_path());
command.arg(&script_path).current_dir(&request.cwd);
command.stdout(Stdio::piped()).stderr(Stdio::piped());
command
.arg(&script_path)
.current_dir(&request.cwd)
.env("CODEX_ARTIFACT_BUILD_ENTRYPOINT", runtime.build_js_path())
.env(
"CODEX_ARTIFACT_RENDER_ENTRYPOINT",
runtime.render_cli_path(),
)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if js_runtime.requires_electron_run_as_node() {
command.env("ELECTRON_RUN_AS_NODE", "1");
}
for (key, value) in &request.env {
command.env(key, value);
}
run_command(
command,
request.timeout.unwrap_or(DEFAULT_EXECUTION_TIMEOUT),
)
.await
}
/// Executes the artifact render CLI against the configured runtime.
pub async fn execute_render(
&self,
request: ArtifactRenderCommandRequest,
) -> Result<ArtifactCommandOutput, ArtifactsError> {
let runtime = self.resolve_runtime().await?;
let js_runtime = runtime.resolve_js_runtime()?;
let mut command = Command::new(js_runtime.executable_path());
command
.arg(runtime.render_cli_path())
.args(request.target.to_args())
.current_dir(&request.cwd)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
if js_runtime.requires_electron_run_as_node() {
command.env("ELECTRON_RUN_AS_NODE", "1");
}
@@ -108,6 +132,76 @@ pub struct ArtifactBuildRequest {
pub env: BTreeMap<String, String>,
}
/// Request payload for the artifact render CLI.
#[derive(Clone, Debug)]
pub struct ArtifactRenderCommandRequest {
pub cwd: PathBuf,
pub timeout: Option<Duration>,
pub env: BTreeMap<String, String>,
pub target: ArtifactRenderTarget,
}
/// Render targets supported by the packaged artifact runtime.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ArtifactRenderTarget {
Presentation(PresentationRenderTarget),
Spreadsheet(SpreadsheetRenderTarget),
}
impl ArtifactRenderTarget {
/// Converts a render target to the CLI args expected by `render_cli.mjs`.
pub fn to_args(&self) -> Vec<String> {
match self {
Self::Presentation(target) => {
vec![
"pptx".to_string(),
"render".to_string(),
"--in".to_string(),
target.input_path.display().to_string(),
"--slide".to_string(),
target.slide_number.to_string(),
"--out".to_string(),
target.output_path.display().to_string(),
]
}
Self::Spreadsheet(target) => {
let mut args = vec![
"xlsx".to_string(),
"render".to_string(),
"--in".to_string(),
target.input_path.display().to_string(),
"--sheet".to_string(),
target.sheet_name.clone(),
"--out".to_string(),
target.output_path.display().to_string(),
];
if let Some(range) = &target.range {
args.push("--range".to_string());
args.push(range.clone());
}
args
}
}
}
}
/// Presentation render request parameters.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct PresentationRenderTarget {
pub input_path: PathBuf,
pub output_path: PathBuf,
pub slide_number: u32,
}
/// Spreadsheet render request parameters.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SpreadsheetRenderTarget {
pub input_path: PathBuf,
pub output_path: PathBuf,
pub sheet_name: String,
pub range: Option<String>,
}
/// Captured stdout, stderr, and exit status from an artifact subprocess.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ArtifactCommandOutput {
@@ -138,28 +232,24 @@ pub enum ArtifactsError {
TimedOut { timeout: Duration },
}
fn build_wrapped_script(build_entrypoint_url: &Url, source: &str) -> String {
let mut wrapped = String::new();
wrapped.push_str("const artifactTool = await import(");
wrapped.push_str(
&serde_json::to_string(build_entrypoint_url.as_str()).unwrap_or_else(|error| {
panic!("artifact build entrypoint URL must serialize: {error}")
}),
);
wrapped.push_str(");\n");
wrapped.push_str(
r#"globalThis.artifactTool = artifactTool;
for (const [name, value] of Object.entries(artifactTool)) {
if (name === "default" || Object.prototype.hasOwnProperty.call(globalThis, name)) {
continue;
}
globalThis[name] = value;
}
"#,
);
wrapped.push_str(source);
wrapped.push('\n');
wrapped
fn build_wrapped_script(source: &str) -> String {
format!(
concat!(
"import {{ pathToFileURL }} from \"node:url\";\n",
"const artifactTool = await import(pathToFileURL(process.env.CODEX_ARTIFACT_BUILD_ENTRYPOINT).href);\n",
"globalThis.artifactTool = artifactTool;\n",
"globalThis.artifacts = artifactTool;\n",
"globalThis.codexArtifacts = artifactTool;\n",
"for (const [name, value] of Object.entries(artifactTool)) {{\n",
" if (name === \"default\" || Object.prototype.hasOwnProperty.call(globalThis, name)) {{\n",
" continue;\n",
" }}\n",
" globalThis[name] = value;\n",
"}}\n\n",
"{}\n"
),
source
)
}
async fn run_command(

View File

@@ -5,8 +5,12 @@ mod tests;
pub use client::ArtifactBuildRequest;
pub use client::ArtifactCommandOutput;
pub use client::ArtifactRenderCommandRequest;
pub use client::ArtifactRenderTarget;
pub use client::ArtifactsClient;
pub use client::ArtifactsError;
pub use client::PresentationRenderTarget;
pub use client::SpreadsheetRenderTarget;
pub use runtime::ArtifactRuntimeError;
pub use runtime::ArtifactRuntimeManager;
pub use runtime::ArtifactRuntimeManagerConfig;
@@ -15,10 +19,13 @@ pub use runtime::ArtifactRuntimeReleaseLocator;
pub use runtime::DEFAULT_CACHE_ROOT_RELATIVE;
pub use runtime::DEFAULT_RELEASE_BASE_URL;
pub use runtime::DEFAULT_RELEASE_TAG_PREFIX;
pub use runtime::ExtractedRuntimeManifest;
pub use runtime::InstalledArtifactRuntime;
pub use runtime::JsRuntime;
pub use runtime::JsRuntimeKind;
pub use runtime::ReleaseManifest;
pub use runtime::RuntimeEntrypoints;
pub use runtime::RuntimePathEntry;
pub use runtime::can_manage_artifact_runtime;
pub use runtime::is_js_runtime_available;
pub use runtime::load_cached_runtime;

View File

@@ -13,8 +13,8 @@ pub enum ArtifactRuntimeError {
#[source]
source: std::io::Error,
},
#[error("invalid package metadata at {path}")]
InvalidPackageMetadata {
#[error("invalid manifest at {path}")]
InvalidManifest {
path: PathBuf,
#[source]
source: serde_json::Error,

View File

@@ -1,17 +1,15 @@
use super::ArtifactRuntimeError;
use super::ArtifactRuntimePlatform;
use super::ExtractedRuntimeManifest;
use super::JsRuntime;
use super::codex_app_runtime_candidates;
use super::resolve_js_runtime_from_candidates;
use super::system_electron_runtime;
use super::system_node_runtime;
use std::collections::BTreeMap;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
const ARTIFACT_TOOL_PACKAGE_NAME: &str = "@oai/artifact-tool";
/// Loads a previously installed runtime from a caller-provided cache root.
pub fn load_cached_runtime(
cache_root: &Path,
@@ -38,7 +36,10 @@ pub struct InstalledArtifactRuntime {
root_dir: PathBuf,
runtime_version: String,
platform: ArtifactRuntimePlatform,
manifest: ExtractedRuntimeManifest,
node_path: PathBuf,
build_js_path: PathBuf,
render_cli_path: PathBuf,
}
impl InstalledArtifactRuntime {
@@ -47,13 +48,19 @@ impl InstalledArtifactRuntime {
root_dir: PathBuf,
runtime_version: String,
platform: ArtifactRuntimePlatform,
manifest: ExtractedRuntimeManifest,
node_path: PathBuf,
build_js_path: PathBuf,
render_cli_path: PathBuf,
) -> Self {
Self {
root_dir,
runtime_version,
platform,
manifest,
node_path,
build_js_path,
render_cli_path,
}
}
@@ -62,16 +69,35 @@ impl InstalledArtifactRuntime {
root_dir: PathBuf,
platform: ArtifactRuntimePlatform,
) -> Result<Self, ArtifactRuntimeError> {
let package_metadata = load_package_metadata(&root_dir)?;
let manifest_path = root_dir.join("manifest.json");
let manifest_bytes =
std::fs::read(&manifest_path).map_err(|source| ArtifactRuntimeError::Io {
context: format!("failed to read {}", manifest_path.display()),
source,
})?;
let manifest = serde_json::from_slice::<ExtractedRuntimeManifest>(&manifest_bytes)
.map_err(|source| ArtifactRuntimeError::InvalidManifest {
path: manifest_path,
source,
})?;
let node_path = resolve_relative_runtime_path(&root_dir, &manifest.node.relative_path)?;
let build_js_path =
resolve_relative_runtime_path(&root_dir, &package_metadata.build_js_relative_path)?;
resolve_relative_runtime_path(&root_dir, &manifest.entrypoints.build_js.relative_path)?;
let render_cli_path = resolve_relative_runtime_path(
&root_dir,
&manifest.entrypoints.render_cli.relative_path,
)?;
verify_required_runtime_path(&build_js_path)?;
verify_required_runtime_path(&render_cli_path)?;
Ok(Self::new(
root_dir,
package_metadata.version,
manifest.runtime_version.clone(),
platform,
manifest,
node_path,
build_js_path,
render_cli_path,
))
}
@@ -80,7 +106,7 @@ impl InstalledArtifactRuntime {
&self.root_dir
}
/// Returns the runtime version recorded in `package.json`.
/// Returns the runtime version recorded in the extracted manifest.
pub fn runtime_version(&self) -> &str {
&self.runtime_version
}
@@ -90,17 +116,33 @@ impl InstalledArtifactRuntime {
self.platform
}
/// Returns the parsed extracted-runtime manifest.
pub fn manifest(&self) -> &ExtractedRuntimeManifest {
&self.manifest
}
/// Returns the bundled Node executable path advertised by the runtime manifest.
pub fn node_path(&self) -> &Path {
&self.node_path
}
/// Returns the artifact build entrypoint path.
pub fn build_js_path(&self) -> &Path {
&self.build_js_path
}
/// Returns the artifact render CLI entrypoint path.
pub fn render_cli_path(&self) -> &Path {
&self.render_cli_path
}
/// Resolves the best executable to use for artifact commands.
///
/// Preference order is a machine Node install, then Electron from the
/// machine or a Codex desktop app bundle.
/// Preference order is the bundled Node path, then a machine Node install,
/// then Electron from the machine or a Codex desktop app bundle.
pub fn resolve_js_runtime(&self) -> Result<JsRuntime, ArtifactRuntimeError> {
resolve_js_runtime_from_candidates(
Some(self.node_path()),
system_node_runtime(),
system_electron_runtime(),
codex_app_runtime_candidates(),
@@ -156,128 +198,3 @@ fn verify_required_runtime_path(path: &Path) -> Result<(), ArtifactRuntimeError>
source: std::io::Error::new(std::io::ErrorKind::NotFound, "missing runtime file"),
})
}
pub(crate) fn detect_runtime_root(extraction_root: &Path) -> Result<PathBuf, ArtifactRuntimeError> {
if is_runtime_root(extraction_root) {
return Ok(extraction_root.to_path_buf());
}
let mut directory_candidates = Vec::new();
for entry in std::fs::read_dir(extraction_root).map_err(|source| ArtifactRuntimeError::Io {
context: format!("failed to read {}", extraction_root.display()),
source,
})? {
let entry = entry.map_err(|source| ArtifactRuntimeError::Io {
context: format!("failed to read entry in {}", extraction_root.display()),
source,
})?;
let path = entry.path();
if path.is_dir() {
directory_candidates.push(path);
}
}
if directory_candidates.len() == 1 {
let candidate = &directory_candidates[0];
if is_runtime_root(candidate) {
return Ok(candidate.clone());
}
}
Err(ArtifactRuntimeError::Io {
context: format!(
"failed to detect artifact runtime root under {}",
extraction_root.display()
),
source: std::io::Error::new(
std::io::ErrorKind::NotFound,
"missing artifact runtime root",
),
})
}
fn is_runtime_root(root_dir: &Path) -> bool {
let Ok(package_metadata) = load_package_metadata(root_dir) else {
return false;
};
let Ok(build_js_path) =
resolve_relative_runtime_path(root_dir, &package_metadata.build_js_relative_path)
else {
return false;
};
build_js_path.is_file()
}
struct PackageMetadata {
version: String,
build_js_relative_path: String,
}
fn load_package_metadata(root_dir: &Path) -> Result<PackageMetadata, ArtifactRuntimeError> {
#[derive(serde::Deserialize)]
struct PackageJson {
name: String,
version: String,
exports: PackageExports,
}
#[derive(serde::Deserialize)]
#[serde(untagged)]
enum PackageExports {
Main(String),
Map(BTreeMap<String, String>),
}
impl PackageExports {
fn build_entrypoint(&self) -> Option<&str> {
match self {
Self::Main(path) => Some(path),
Self::Map(exports) => exports.get(".").map(String::as_str),
}
}
}
let package_json_path = root_dir.join("package.json");
let package_json_bytes =
std::fs::read(&package_json_path).map_err(|source| ArtifactRuntimeError::Io {
context: format!("failed to read {}", package_json_path.display()),
source,
})?;
let package_json =
serde_json::from_slice::<PackageJson>(&package_json_bytes).map_err(|source| {
ArtifactRuntimeError::InvalidPackageMetadata {
path: package_json_path.clone(),
source,
}
})?;
if package_json.name != ARTIFACT_TOOL_PACKAGE_NAME {
return Err(ArtifactRuntimeError::Io {
context: format!(
"unsupported artifact runtime package at {}; expected name `{ARTIFACT_TOOL_PACKAGE_NAME}`, got `{}`",
package_json_path.display(),
package_json.name
),
source: std::io::Error::new(
std::io::ErrorKind::InvalidData,
"unsupported package name",
),
});
}
let Some(build_js_relative_path) = package_json.exports.build_entrypoint() else {
return Err(ArtifactRuntimeError::Io {
context: format!(
"unsupported artifact runtime package at {}; expected `exports[\".\"]` to point at the JS entrypoint",
package_json_path.display()
),
source: std::io::Error::new(std::io::ErrorKind::InvalidData, "missing package export"),
});
};
Ok(PackageMetadata {
version: package_json.version,
build_js_relative_path: build_js_relative_path.trim_start_matches("./").to_string(),
})
}

View File

@@ -74,6 +74,7 @@ pub fn can_manage_artifact_runtime() -> bool {
pub(crate) fn resolve_machine_js_runtime() -> Option<JsRuntime> {
resolve_js_runtime_from_candidates(
/*preferred_node_path*/ None,
system_node_runtime(),
system_electron_runtime(),
codex_app_runtime_candidates(),
@@ -81,15 +82,20 @@ pub(crate) fn resolve_machine_js_runtime() -> Option<JsRuntime> {
}
pub(crate) fn resolve_js_runtime_from_candidates(
preferred_node_path: Option<&Path>,
node_runtime: Option<JsRuntime>,
electron_runtime: Option<JsRuntime>,
codex_app_candidates: Vec<PathBuf>,
) -> Option<JsRuntime> {
node_runtime.or(electron_runtime).or_else(|| {
codex_app_candidates
.into_iter()
.find_map(|candidate| electron_runtime_from_path(&candidate))
})
preferred_node_path
.and_then(node_runtime_from_path)
.or(node_runtime)
.or(electron_runtime)
.or_else(|| {
codex_app_candidates
.into_iter()
.find_map(|candidate| electron_runtime_from_path(&candidate))
})
}
pub(crate) fn system_node_runtime() -> Option<JsRuntime> {

View File

@@ -2,7 +2,6 @@ use super::ArtifactRuntimeError;
use super::ArtifactRuntimePlatform;
use super::InstalledArtifactRuntime;
use super::ReleaseManifest;
use super::detect_runtime_root;
use codex_package_manager::ManagedPackage;
use codex_package_manager::PackageManager;
use codex_package_manager::PackageManagerConfig;
@@ -80,9 +79,12 @@ impl ArtifactRuntimeReleaseLocator {
/// Returns the default GitHub-release locator for a runtime version.
pub fn default(runtime_version: impl Into<String>) -> Self {
Self::new(
Url::parse(DEFAULT_RELEASE_BASE_URL).unwrap_or_else(|error| {
panic!("hard-coded artifact runtime release base URL must be valid: {error}")
}),
match Url::parse(DEFAULT_RELEASE_BASE_URL) {
Ok(url) => url,
Err(error) => {
panic!("hard-coded artifact runtime release base URL must be valid: {error}")
}
},
runtime_version,
)
}
@@ -248,8 +250,4 @@ impl ManagedPackage for ArtifactRuntimePackage {
) -> Result<Self::Installed, Self::Error> {
InstalledArtifactRuntime::load(root_dir, platform)
}
fn detect_extracted_root(&self, extraction_root: &Path) -> Result<PathBuf, Self::Error> {
detect_runtime_root(extraction_root)
}
}

View File

@@ -13,3 +13,25 @@ pub struct ReleaseManifest {
pub node_version: Option<String>,
pub platforms: BTreeMap<String, PackageReleaseArchive>,
}
/// Manifest shipped inside the extracted runtime payload.
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
pub struct ExtractedRuntimeManifest {
pub schema_version: u32,
pub runtime_version: String,
pub node: RuntimePathEntry,
pub entrypoints: RuntimeEntrypoints,
}
/// A relative path entry inside an extracted runtime manifest.
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
pub struct RuntimePathEntry {
pub relative_path: String,
}
/// Entrypoints required to build and render artifacts.
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
pub struct RuntimeEntrypoints {
pub build_js: RuntimePathEntry,
pub render_cli: RuntimePathEntry,
}

View File

@@ -18,10 +18,12 @@ pub use manager::ArtifactRuntimeReleaseLocator;
pub use manager::DEFAULT_CACHE_ROOT_RELATIVE;
pub use manager::DEFAULT_RELEASE_BASE_URL;
pub use manager::DEFAULT_RELEASE_TAG_PREFIX;
pub use manifest::ExtractedRuntimeManifest;
pub use manifest::ReleaseManifest;
pub use manifest::RuntimeEntrypoints;
pub use manifest::RuntimePathEntry;
pub(crate) use installed::default_cached_runtime_root;
pub(crate) use installed::detect_runtime_root;
pub(crate) use js_runtime::codex_app_runtime_candidates;
pub(crate) use js_runtime::resolve_js_runtime_from_candidates;
pub(crate) use js_runtime::system_electron_runtime;

View File

@@ -1,17 +1,24 @@
use crate::ArtifactBuildRequest;
use crate::ArtifactCommandOutput;
use crate::ArtifactRenderCommandRequest;
use crate::ArtifactRenderTarget;
use crate::ArtifactRuntimeManager;
use crate::ArtifactRuntimeManagerConfig;
use crate::ArtifactRuntimePlatform;
use crate::ArtifactRuntimeReleaseLocator;
use crate::ArtifactsClient;
use crate::DEFAULT_CACHE_ROOT_RELATIVE;
use crate::ExtractedRuntimeManifest;
use crate::InstalledArtifactRuntime;
use crate::JsRuntime;
use crate::PresentationRenderTarget;
use crate::ReleaseManifest;
use crate::RuntimeEntrypoints;
use crate::RuntimePathEntry;
use crate::SpreadsheetRenderTarget;
use crate::load_cached_runtime;
use codex_package_manager::ArchiveFormat;
use codex_package_manager::PackageReleaseArchive;
use flate2::Compression;
use flate2::write::GzEncoder;
use pretty_assertions::assert_eq;
use sha2::Digest;
use sha2::Sha256;
@@ -19,9 +26,11 @@ use std::collections::BTreeMap;
use std::fs;
use std::io::Cursor;
use std::io::Write;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use tar::Builder as TarBuilder;
use tempfile::TempDir;
use wiremock::Mock;
use wiremock::MockServer;
@@ -62,7 +71,7 @@ fn default_release_locator_uses_openai_codex_github_releases() {
#[test]
fn load_cached_runtime_reads_installed_runtime() {
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let runtime_version = "2.5.6";
let runtime_version = "0.1.0";
let platform =
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let install_dir = codex_home
@@ -70,7 +79,11 @@ fn load_cached_runtime_reads_installed_runtime() {
.join(DEFAULT_CACHE_ROOT_RELATIVE)
.join(runtime_version)
.join(platform.as_str());
write_installed_runtime(&install_dir, runtime_version);
write_installed_runtime(
&install_dir,
runtime_version,
Some(PathBuf::from("node/bin/node")),
);
let runtime = load_cached_runtime(
&codex_home.path().join(DEFAULT_CACHE_ROOT_RELATIVE),
@@ -80,17 +93,18 @@ fn load_cached_runtime_reads_installed_runtime() {
assert_eq!(runtime.runtime_version(), runtime_version);
assert_eq!(runtime.platform(), platform);
assert!(runtime.node_path().ends_with(Path::new("node/bin/node")));
assert!(
runtime
.build_js_path()
.ends_with(Path::new("dist/artifact_tool.mjs"))
.ends_with(Path::new("artifact-tool/dist/artifact_tool.mjs"))
);
}
#[test]
fn load_cached_runtime_requires_build_entrypoint() {
fn load_cached_runtime_rejects_parent_relative_paths() {
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let runtime_version = "2.5.6";
let runtime_version = "0.1.0";
let platform =
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let install_dir = codex_home
@@ -98,8 +112,41 @@ fn load_cached_runtime_requires_build_entrypoint() {
.join(DEFAULT_CACHE_ROOT_RELATIVE)
.join(runtime_version)
.join(platform.as_str());
write_installed_runtime(&install_dir, runtime_version);
fs::remove_file(install_dir.join("dist/artifact_tool.mjs"))
write_installed_runtime(
&install_dir,
runtime_version,
Some(PathBuf::from("../node/bin/node")),
);
let error = load_cached_runtime(
&codex_home.path().join(DEFAULT_CACHE_ROOT_RELATIVE),
runtime_version,
)
.unwrap_err();
assert_eq!(
error.to_string(),
"runtime path `../node/bin/node` is invalid"
);
}
#[test]
fn load_cached_runtime_requires_build_entrypoint() {
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let runtime_version = "0.1.0";
let platform =
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let install_dir = codex_home
.path()
.join(DEFAULT_CACHE_ROOT_RELATIVE)
.join(runtime_version)
.join(platform.as_str());
write_installed_runtime(
&install_dir,
runtime_version,
Some(PathBuf::from("node/bin/node")),
);
fs::remove_file(install_dir.join("artifact-tool/dist/artifact_tool.mjs"))
.unwrap_or_else(|error| panic!("{error}"));
let error = load_cached_runtime(
@@ -112,7 +159,9 @@ fn load_cached_runtime_requires_build_entrypoint() {
error.to_string(),
format!(
"required runtime file is missing: {}",
install_dir.join("dist/artifact_tool.mjs").display()
install_dir
.join("artifact-tool/dist/artifact_tool.mjs")
.display()
)
);
}
@@ -120,7 +169,7 @@ fn load_cached_runtime_requires_build_entrypoint() {
#[tokio::test]
async fn ensure_installed_downloads_and_extracts_zip_runtime() {
let server = MockServer::start().await;
let runtime_version = "2.5.6";
let runtime_version = "0.1.0";
let platform =
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let archive_name = format!(
@@ -133,7 +182,7 @@ async fn ensure_installed_downloads_and_extracts_zip_runtime() {
schema_version: 1,
runtime_version: runtime_version.to_string(),
release_tag: format!("artifact-runtime-v{runtime_version}"),
node_version: None,
node_version: Some("22.0.0".to_string()),
platforms: BTreeMap::from([(
platform.as_str().to_string(),
PackageReleaseArchive {
@@ -176,128 +225,28 @@ async fn ensure_installed_downloads_and_extracts_zip_runtime() {
assert_eq!(runtime.runtime_version(), runtime_version);
assert_eq!(runtime.platform(), platform);
assert!(
runtime
.build_js_path()
.ends_with(Path::new("dist/artifact_tool.mjs"))
);
}
#[test]
fn load_cached_runtime_requires_package_export() {
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let runtime_version = "2.5.6";
let platform =
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let install_dir = codex_home
.path()
.join(DEFAULT_CACHE_ROOT_RELATIVE)
.join(runtime_version)
.join(platform.as_str());
write_installed_runtime(&install_dir, runtime_version);
fs::write(
install_dir.join("package.json"),
serde_json::json!({
"name": "@oai/artifact-tool",
"version": runtime_version,
"type": "module",
})
.to_string(),
)
.unwrap_or_else(|error| panic!("{error}"));
let error = load_cached_runtime(
&codex_home.path().join(DEFAULT_CACHE_ROOT_RELATIVE),
runtime_version,
)
.unwrap_err();
assert!(runtime.node_path().ends_with(Path::new("node/bin/node")));
assert_eq!(
error.to_string(),
format!(
"invalid package metadata at {}",
install_dir.join("package.json").display()
)
);
}
#[tokio::test]
async fn ensure_installed_downloads_and_extracts_tar_gz_runtime() {
let server = MockServer::start().await;
let runtime_version = "2.5.6";
let platform =
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let archive_name = format!(
"artifact-runtime-v{runtime_version}-{}.tar.gz",
platform.as_str()
);
let archive_bytes = build_tar_gz_archive(runtime_version);
let archive_sha = format!("{:x}", Sha256::digest(&archive_bytes));
let manifest = ReleaseManifest {
schema_version: 1,
runtime_version: runtime_version.to_string(),
release_tag: format!("artifact-runtime-v{runtime_version}"),
node_version: None,
platforms: BTreeMap::from([(
platform.as_str().to_string(),
PackageReleaseArchive {
archive: archive_name.clone(),
sha256: archive_sha,
format: ArchiveFormat::TarGz,
size_bytes: Some(archive_bytes.len() as u64),
},
)]),
};
Mock::given(method("GET"))
.and(path(format!(
"/artifact-runtime-v{runtime_version}/artifact-runtime-v{runtime_version}-manifest.json"
)))
.respond_with(ResponseTemplate::new(200).set_body_json(&manifest))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!(
"/artifact-runtime-v{runtime_version}/{archive_name}"
)))
.respond_with(ResponseTemplate::new(200).set_body_bytes(archive_bytes))
.mount(&server)
.await;
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let locator = ArtifactRuntimeReleaseLocator::new(
url::Url::parse(&format!("{}/", server.uri())).unwrap_or_else(|error| panic!("{error}")),
runtime_version,
);
let manager = ArtifactRuntimeManager::new(ArtifactRuntimeManagerConfig::new(
codex_home.path().to_path_buf(),
locator,
));
let runtime = manager
.ensure_installed()
.await
.unwrap_or_else(|error| panic!("{error}"));
assert_eq!(runtime.runtime_version(), runtime_version);
assert_eq!(runtime.platform(), platform);
assert!(
runtime
.build_js_path()
.ends_with(Path::new("dist/artifact_tool.mjs"))
runtime.resolve_js_runtime().expect("resolve js runtime"),
JsRuntime::node(runtime.node_path().to_path_buf())
);
}
#[test]
fn load_cached_runtime_uses_custom_cache_root() {
let codex_home = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let runtime_version = "2.5.6";
let runtime_version = "0.1.0";
let custom_cache_root = codex_home.path().join("runtime-cache");
let platform =
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}"));
let install_dir = custom_cache_root
.join(runtime_version)
.join(platform.as_str());
write_installed_runtime(&install_dir, runtime_version);
write_installed_runtime(
&install_dir,
runtime_version,
Some(PathBuf::from("node/bin/node")),
);
let config = ArtifactRuntimeManagerConfig::with_default_release(
codex_home.path().to_path_buf(),
@@ -316,38 +265,102 @@ fn load_cached_runtime_uses_custom_cache_root() {
#[cfg(unix)]
async fn artifacts_client_execute_build_writes_wrapped_script_and_env() {
let temp = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let runtime_root = temp.path().join("runtime");
write_installed_runtime(&runtime_root, "2.5.6");
let runtime = crate::InstalledArtifactRuntime::load(
runtime_root,
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}")),
)
.unwrap_or_else(|error| panic!("{error}"));
let output_path = temp.path().join("build-output.txt");
let wrapped_script_path = temp.path().join("wrapped-script.mjs");
let runtime = fake_installed_runtime(temp.path(), &output_path, &wrapped_script_path);
let client = ArtifactsClient::from_installed_runtime(runtime);
let output = client
.execute_build(ArtifactBuildRequest {
source: concat!(
"console.log(typeof artifacts);\n",
"console.log(typeof codexArtifacts);\n",
"console.log(artifactTool.ok);\n",
"console.log(ok);\n",
"console.error('stderr-ok');\n",
"console.log('stdout-ok');\n"
)
.to_string(),
source: "console.log('hello');".to_string(),
cwd: temp.path().to_path_buf(),
timeout: Some(Duration::from_secs(5)),
env: BTreeMap::new(),
env: BTreeMap::from([
(
"CODEX_TEST_OUTPUT".to_string(),
output_path.display().to_string(),
),
("CUSTOM_ENV".to_string(), "custom-value".to_string()),
]),
})
.await
.unwrap_or_else(|error| panic!("{error}"));
assert_success(&output);
assert_eq!(output.stderr.trim(), "stderr-ok");
let command_log = fs::read_to_string(&output_path).unwrap_or_else(|error| panic!("{error}"));
assert!(command_log.contains("arg0="));
assert!(command_log.contains("CODEX_ARTIFACT_BUILD_ENTRYPOINT="));
assert!(command_log.contains("CODEX_ARTIFACT_RENDER_ENTRYPOINT="));
assert!(command_log.contains("CUSTOM_ENV=custom-value"));
let wrapped_script =
fs::read_to_string(wrapped_script_path).unwrap_or_else(|error| panic!("{error}"));
assert!(wrapped_script.contains("globalThis.artifacts = artifactTool;"));
assert!(wrapped_script.contains("globalThis.codexArtifacts = artifactTool;"));
assert!(wrapped_script.contains("console.log('hello');"));
}
#[tokio::test]
#[cfg(unix)]
async fn artifacts_client_execute_render_passes_expected_args() {
let temp = TempDir::new().unwrap_or_else(|error| panic!("{error}"));
let output_path = temp.path().join("render-output.txt");
let wrapped_script_path = temp.path().join("unused-script-copy.mjs");
let runtime = fake_installed_runtime(temp.path(), &output_path, &wrapped_script_path);
let client = ArtifactsClient::from_installed_runtime(runtime.clone());
let render_output = temp.path().join("slide.png");
let output = client
.execute_render(ArtifactRenderCommandRequest {
cwd: temp.path().to_path_buf(),
timeout: Some(Duration::from_secs(5)),
env: BTreeMap::from([(
"CODEX_TEST_OUTPUT".to_string(),
output_path.display().to_string(),
)]),
target: ArtifactRenderTarget::Presentation(PresentationRenderTarget {
input_path: temp.path().join("deck.pptx"),
output_path: render_output.clone(),
slide_number: 3,
}),
})
.await
.unwrap_or_else(|error| panic!("{error}"));
assert_success(&output);
let command_log = fs::read_to_string(&output_path).unwrap_or_else(|error| panic!("{error}"));
assert!(command_log.contains(&format!("arg0={}", runtime.render_cli_path().display())));
assert!(command_log.contains("arg1=pptx"));
assert!(command_log.contains("arg2=render"));
assert!(command_log.contains("arg5=--slide"));
assert!(command_log.contains("arg6=3"));
assert!(command_log.contains("arg7=--out"));
assert!(command_log.contains(&format!("arg8={}", render_output.display())));
}
#[test]
fn spreadsheet_render_target_to_args_includes_optional_range() {
let target = ArtifactRenderTarget::Spreadsheet(SpreadsheetRenderTarget {
input_path: PathBuf::from("/tmp/input.xlsx"),
output_path: PathBuf::from("/tmp/output.png"),
sheet_name: "Summary".to_string(),
range: Some("A1:C8".to_string()),
});
assert_eq!(
output.stdout.lines().collect::<Vec<_>>(),
vec!["undefined", "undefined", "true", "true", "stdout-ok"]
target.to_args(),
vec![
"xlsx".to_string(),
"render".to_string(),
"--in".to_string(),
"/tmp/input.xlsx".to_string(),
"--sheet".to_string(),
"Summary".to_string(),
"--out".to_string(),
"/tmp/output.png".to_string(),
"--range".to_string(),
"A1:C8".to_string(),
]
);
}
@@ -356,26 +369,94 @@ fn assert_success(output: &ArtifactCommandOutput) {
assert_eq!(output.exit_code, Some(0));
}
fn write_installed_runtime(install_dir: &Path, runtime_version: &str) {
fs::create_dir_all(install_dir.join("dist")).unwrap_or_else(|error| panic!("{error}"));
#[cfg(unix)]
fn fake_installed_runtime(
root: &Path,
output_path: &Path,
wrapped_script_path: &Path,
) -> InstalledArtifactRuntime {
let runtime_root = root.join("runtime");
write_installed_runtime(&runtime_root, "0.1.0", Some(PathBuf::from("node/bin/node")));
write_fake_node_script(
&runtime_root.join("node/bin/node"),
output_path,
wrapped_script_path,
);
InstalledArtifactRuntime::load(
runtime_root,
ArtifactRuntimePlatform::detect_current().unwrap_or_else(|error| panic!("{error}")),
)
.unwrap_or_else(|error| panic!("{error}"))
}
fn write_installed_runtime(
install_dir: &Path,
runtime_version: &str,
node_relative: Option<PathBuf>,
) {
fs::create_dir_all(install_dir.join("node/bin")).unwrap_or_else(|error| panic!("{error}"));
fs::create_dir_all(install_dir.join("artifact-tool/dist"))
.unwrap_or_else(|error| panic!("{error}"));
fs::create_dir_all(install_dir.join("granola-render/dist"))
.unwrap_or_else(|error| panic!("{error}"));
let node_relative = node_relative.unwrap_or_else(|| PathBuf::from("node/bin/node"));
fs::write(
install_dir.join("package.json"),
serde_json::json!({
"name": "@oai/artifact-tool",
"version": runtime_version,
"type": "module",
"exports": {
".": "./dist/artifact_tool.mjs",
}
})
.to_string(),
install_dir.join("manifest.json"),
serde_json::json!(sample_extracted_manifest(runtime_version, node_relative)).to_string(),
)
.unwrap_or_else(|error| panic!("{error}"));
fs::write(install_dir.join("node/bin/node"), "#!/bin/sh\n")
.unwrap_or_else(|error| panic!("{error}"));
fs::write(
install_dir.join("dist/artifact_tool.mjs"),
install_dir.join("artifact-tool/dist/artifact_tool.mjs"),
"export const ok = true;\n",
)
.unwrap_or_else(|error| panic!("{error}"));
fs::write(
install_dir.join("granola-render/dist/render_cli.mjs"),
"export const ok = true;\n",
)
.unwrap_or_else(|error| panic!("{error}"));
}
#[cfg(unix)]
fn write_fake_node_script(script_path: &Path, output_path: &Path, wrapped_script_path: &Path) {
fs::write(
script_path,
format!(
concat!(
"#!/bin/sh\n",
"printf 'arg0=%s\\n' \"$1\" > \"{}\"\n",
"cp \"$1\" \"{}\"\n",
"shift\n",
"i=1\n",
"for arg in \"$@\"; do\n",
" printf 'arg%s=%s\\n' \"$i\" \"$arg\" >> \"{}\"\n",
" i=$((i + 1))\n",
"done\n",
"printf 'CODEX_ARTIFACT_BUILD_ENTRYPOINT=%s\\n' \"$CODEX_ARTIFACT_BUILD_ENTRYPOINT\" >> \"{}\"\n",
"printf 'CODEX_ARTIFACT_RENDER_ENTRYPOINT=%s\\n' \"$CODEX_ARTIFACT_RENDER_ENTRYPOINT\" >> \"{}\"\n",
"printf 'CUSTOM_ENV=%s\\n' \"$CUSTOM_ENV\" >> \"{}\"\n",
"echo stdout-ok\n",
"echo stderr-ok >&2\n"
),
output_path.display(),
wrapped_script_path.display(),
output_path.display(),
output_path.display(),
output_path.display(),
output_path.display(),
),
)
.unwrap_or_else(|error| panic!("{error}"));
#[cfg(unix)]
{
let mut permissions = fs::metadata(script_path)
.unwrap_or_else(|error| panic!("{error}"))
.permissions();
permissions.set_mode(0o755);
fs::set_permissions(script_path, permissions).unwrap_or_else(|error| panic!("{error}"));
}
}
fn build_zip_archive(runtime_version: &str) -> Vec<u8> {
@@ -383,22 +464,34 @@ fn build_zip_archive(runtime_version: &str) -> Vec<u8> {
{
let mut zip = ZipWriter::new(&mut bytes);
let options = SimpleFileOptions::default();
let package_json = serde_json::json!({
"name": "@oai/artifact-tool",
"version": runtime_version,
"type": "module",
"exports": {
".": "./dist/artifact_tool.mjs",
}
})
.to_string()
.into_bytes();
zip.start_file("artifact-runtime/package.json", options)
let manifest = serde_json::to_vec(&sample_extracted_manifest(
runtime_version,
PathBuf::from("node/bin/node"),
))
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file("artifact-runtime/manifest.json", options)
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(&package_json)
zip.write_all(&manifest)
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file("artifact-runtime/dist/artifact_tool.mjs", options)
zip.start_file(
"artifact-runtime/node/bin/node",
options.unix_permissions(0o755),
)
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(b"#!/bin/sh\n")
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file(
"artifact-runtime/artifact-tool/dist/artifact_tool.mjs",
options,
)
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(b"export const ok = true;\n")
.unwrap_or_else(|error| panic!("{error}"));
zip.start_file(
"artifact-runtime/granola-render/dist/render_cli.mjs",
options,
)
.unwrap_or_else(|error| panic!("{error}"));
zip.write_all(b"export const ok = true;\n")
.unwrap_or_else(|error| panic!("{error}"));
zip.finish().unwrap_or_else(|error| panic!("{error}"));
@@ -406,48 +499,23 @@ fn build_zip_archive(runtime_version: &str) -> Vec<u8> {
bytes.into_inner()
}
fn build_tar_gz_archive(runtime_version: &str) -> Vec<u8> {
let mut bytes = Vec::new();
{
let encoder = GzEncoder::new(&mut bytes, Compression::default());
let mut archive = TarBuilder::new(encoder);
let package_json = serde_json::json!({
"name": "@oai/artifact-tool",
"version": runtime_version,
"type": "module",
"exports": {
".": "./dist/artifact_tool.mjs",
}
})
.to_string()
.into_bytes();
let mut package_header = tar::Header::new_gnu();
package_header.set_mode(0o644);
package_header.set_size(package_json.len() as u64);
package_header.set_cksum();
archive
.append_data(
&mut package_header,
"package/package.json",
package_json.as_slice(),
)
.unwrap_or_else(|error| panic!("{error}"));
let build_js = b"export const ok = true;\n";
let mut build_header = tar::Header::new_gnu();
build_header.set_mode(0o644);
build_header.set_size(build_js.len() as u64);
build_header.set_cksum();
archive
.append_data(
&mut build_header,
"package/dist/artifact_tool.mjs",
&build_js[..],
)
.unwrap_or_else(|error| panic!("{error}"));
archive.finish().unwrap_or_else(|error| panic!("{error}"));
fn sample_extracted_manifest(
runtime_version: &str,
node_relative: PathBuf,
) -> ExtractedRuntimeManifest {
ExtractedRuntimeManifest {
schema_version: 1,
runtime_version: runtime_version.to_string(),
node: RuntimePathEntry {
relative_path: node_relative.display().to_string(),
},
entrypoints: RuntimeEntrypoints {
build_js: RuntimePathEntry {
relative_path: "artifact-tool/dist/artifact_tool.mjs".to_string(),
},
render_cli: RuntimePathEntry {
relative_path: "granola-render/dist/render_cli.mjs".to_string(),
},
},
}
bytes
}

View File

@@ -1,6 +1,6 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "codex-auth",
name = "auth",
crate_name = "codex_auth",
)

View File

@@ -1,5 +1,5 @@
[package]
name = "codex-core-auth"
name = "codex-auth"
version.workspace = true
edition.workspace = true
license.workspace = true
@@ -8,22 +8,32 @@ license.workspace = true
workspace = true
[dependencies]
base64 = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
codex-app-server-protocol = { workspace = true }
codex-auth = { workspace = true }
codex-keyring-store = { workspace = true }
once_cell = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
sha2 = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
[target.'cfg(target_os = "linux")'.dependencies]
keyring = { workspace = true, features = ["linux-native-async-persistent"] }
[target.'cfg(target_os = "macos")'.dependencies]
keyring = { workspace = true, features = ["apple-native"] }
[target.'cfg(target_os = "windows")'.dependencies]
keyring = { workspace = true, features = ["windows-native"] }
[target.'cfg(any(target_os = "freebsd", target_os = "openbsd"))'.dependencies]
keyring = { workspace = true, features = ["sync-secret-service"] }
[dev-dependencies]
anyhow = { workspace = true }
base64 = { workspace = true }
keyring = { workspace = true }
pretty_assertions = { workspace = true }
serde = { workspace = true, features = ["derive"] }
tempfile = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
tokio = { workspace = true, features = ["macros", "rt"] }

View File

@@ -0,0 +1,52 @@
use crate::CODEX_API_KEY_ENV_VAR;
use crate::OPENAI_API_KEY_ENV_VAR;
use crate::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct AuthEnvTelemetry {
pub openai_api_key_env_present: bool,
pub codex_api_key_env_present: bool,
pub codex_api_key_env_enabled: bool,
pub provider_env_key_name: Option<String>,
pub provider_env_key_present: Option<bool>,
pub refresh_token_url_override_present: bool,
}
pub fn collect_auth_env_telemetry(
provider_env_key_configured: bool,
provider_env_key: Option<&str>,
codex_api_key_env_enabled: bool,
) -> AuthEnvTelemetry {
AuthEnvTelemetry {
openai_api_key_env_present: env_var_present(OPENAI_API_KEY_ENV_VAR),
codex_api_key_env_present: env_var_present(CODEX_API_KEY_ENV_VAR),
codex_api_key_env_enabled,
provider_env_key_name: provider_env_key_configured.then(|| "configured".to_string()),
provider_env_key_present: provider_env_key.map(env_var_present),
refresh_token_url_override_present: env_var_present(REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR),
}
}
fn env_var_present(name: &str) -> bool {
match std::env::var(name) {
Ok(value) => !value.trim().is_empty(),
Err(std::env::VarError::NotUnicode(_)) => true,
Err(std::env::VarError::NotPresent) => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn collect_auth_env_telemetry_buckets_provider_env_key_name() {
let telemetry = collect_auth_env_telemetry(true, Some("sk-should-not-leak"), false);
assert_eq!(
telemetry.provider_env_key_name,
Some("configured".to_string())
);
}
}

103
codex-rs/auth/src/lib.rs Normal file
View File

@@ -0,0 +1,103 @@
mod env_telemetry;
pub mod storage;
pub mod token_data;
use std::env;
use std::path::Path;
use codex_app_server_protocol::AuthMode;
pub use env_telemetry::AuthEnvTelemetry;
pub use env_telemetry::collect_auth_env_telemetry;
pub use storage::AuthCredentialsStoreMode;
pub use storage::AuthDotJson;
pub use storage::AuthStorageBackend;
pub use storage::create_auth_storage;
pub use token_data::IdTokenInfo;
pub use token_data::IdTokenInfoError;
pub use token_data::KnownPlan;
pub use token_data::PlanType;
pub use token_data::TokenData;
pub use token_data::parse_chatgpt_jwt_claims;
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
pub const CODEX_API_KEY_ENV_VAR: &str = "CODEX_API_KEY";
pub const REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR: &str = "CODEX_REFRESH_TOKEN_URL_OVERRIDE";
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExternalAuthTokens {
pub access_token: String,
pub chatgpt_account_id: String,
pub chatgpt_plan_type: Option<String>,
}
pub fn read_openai_api_key_from_env() -> Option<String> {
env::var(OPENAI_API_KEY_ENV_VAR)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
pub fn read_codex_api_key_from_env() -> Option<String> {
env::var(CODEX_API_KEY_ENV_VAR)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
pub fn logout(
codex_home: &Path,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<bool> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.delete()
}
pub fn login_with_api_key(
codex_home: &Path,
api_key: &str,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some(api_key.to_string()),
tokens: None,
last_refresh: None,
};
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
}
pub fn login_with_chatgpt_auth_tokens(
codex_home: &Path,
access_token: &str,
chatgpt_account_id: &str,
chatgpt_plan_type: Option<&str>,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson::from_external_access_token(
access_token,
chatgpt_account_id,
chatgpt_plan_type,
)?;
save_auth(
codex_home,
&auth_dot_json,
AuthCredentialsStoreMode::Ephemeral,
)
}
pub fn save_auth(
codex_home: &Path,
auth: &AuthDotJson,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.save(auth)
}
pub fn load_auth_dot_json(
codex_home: &Path,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<Option<AuthDotJson>> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.load()
}

View File

@@ -19,41 +19,32 @@ use std::sync::Arc;
use std::sync::Mutex;
use tracing::warn;
use crate::PlanType;
use crate::TokenData;
use crate::parse_chatgpt_jwt_claims;
use codex_app_server_protocol::AuthMode;
use codex_auth::token_data::PlanType;
use codex_auth::token_data::TokenData;
use codex_auth::token_data::parse_chatgpt_jwt_claims;
use codex_keyring_store::DefaultKeyringStore;
use codex_keyring_store::KeyringStore;
use once_cell::sync::Lazy;
/// Determine where Codex should store CLI auth credentials.
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum AuthCredentialsStoreMode {
#[default]
/// Persist credentials in CODEX_HOME/auth.json.
File,
/// Persist credentials in the keyring. Fail if unavailable.
Keyring,
/// Use keyring when available; otherwise, fall back to a file in CODEX_HOME.
Auto,
/// Store credentials in memory only for the current process.
Ephemeral,
}
/// Expected structure for $CODEX_HOME/auth.json.
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
pub struct AuthDotJson {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth_mode: Option<AuthMode>,
#[serde(rename = "OPENAI_API_KEY")]
pub openai_api_key: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tokens: Option<TokenData>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_refresh: Option<DateTime<Utc>>,
}
@@ -108,11 +99,11 @@ impl AuthDotJson {
}
}
pub fn get_auth_file(codex_home: &Path) -> PathBuf {
fn get_auth_file(codex_home: &Path) -> PathBuf {
codex_home.join("auth.json")
}
pub(super) fn delete_file_if_exists(codex_home: &Path) -> std::io::Result<bool> {
fn delete_file_if_exists(codex_home: &Path) -> std::io::Result<bool> {
let auth_file = get_auth_file(codex_home);
match std::fs::remove_file(&auth_file) {
Ok(()) => Ok(true),
@@ -128,23 +119,20 @@ pub trait AuthStorageBackend: Debug + Send + Sync {
}
#[derive(Clone, Debug)]
pub struct FileAuthStorage {
struct FileAuthStorage {
codex_home: PathBuf,
}
impl FileAuthStorage {
pub fn new(codex_home: PathBuf) -> Self {
fn new(codex_home: PathBuf) -> Self {
Self { codex_home }
}
/// Attempt to read and parse the `auth.json` file in the given `CODEX_HOME` directory.
/// Returns the full AuthDotJson structure.
pub fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
let mut file = File::open(auth_file)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let auth_dot_json: AuthDotJson = serde_json::from_str(&contents)?;
Ok(auth_dot_json)
}
}
@@ -162,7 +150,6 @@ impl AuthStorageBackend for FileAuthStorage {
fn save(&self, auth_dot_json: &AuthDotJson) -> std::io::Result<()> {
let auth_file = get_auth_file(&self.codex_home);
if let Some(parent) = auth_file.parent() {
std::fs::create_dir_all(parent)?;
}
@@ -186,8 +173,7 @@ impl AuthStorageBackend for FileAuthStorage {
const KEYRING_SERVICE: &str = "Codex Auth";
// turns codex_home path into a stable, short key string
pub(crate) fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
let canonical = codex_home
.canonicalize()
.unwrap_or_else(|_| codex_home.to_path_buf());
@@ -201,13 +187,13 @@ pub(crate) fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
}
#[derive(Clone, Debug)]
pub(crate) struct KeyringAuthStorage {
pub(crate) codex_home: PathBuf,
pub(crate) keyring_store: Arc<dyn KeyringStore>,
struct KeyringAuthStorage {
codex_home: PathBuf,
keyring_store: Arc<dyn KeyringStore>,
}
impl KeyringAuthStorage {
pub(crate) fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
codex_home,
keyring_store,
@@ -252,7 +238,6 @@ impl AuthStorageBackend for KeyringAuthStorage {
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
let key = compute_store_key(&self.codex_home)?;
// Simpler error mapping per style: prefer method reference over closure
let serialized = serde_json::to_string(auth).map_err(std::io::Error::other)?;
self.save_to_keyring(&key, &serialized)?;
if let Err(err) = delete_file_if_exists(&self.codex_home) {
@@ -275,13 +260,13 @@ impl AuthStorageBackend for KeyringAuthStorage {
}
#[derive(Clone, Debug)]
pub(crate) struct AutoAuthStorage {
pub(crate) keyring_storage: Arc<KeyringAuthStorage>,
pub(crate) file_storage: Arc<FileAuthStorage>,
struct AutoAuthStorage {
keyring_storage: Arc<KeyringAuthStorage>,
file_storage: Arc<FileAuthStorage>,
}
impl AutoAuthStorage {
pub(crate) fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
keyring_storage: Arc::new(KeyringAuthStorage::new(codex_home.clone(), keyring_store)),
file_storage: Arc::new(FileAuthStorage::new(codex_home)),
@@ -312,12 +297,10 @@ impl AuthStorageBackend for AutoAuthStorage {
}
fn delete(&self) -> std::io::Result<bool> {
// Keyring storage will delete from disk as well
self.keyring_storage.delete()
}
}
// A global in-memory store for mapping codex_home -> AuthDotJson.
static EPHEMERAL_AUTH_STORE: Lazy<Mutex<HashMap<String, AuthDotJson>>> =
Lazy::new(|| Mutex::new(HashMap::new()));

View File

@@ -0,0 +1,289 @@
use super::*;
use crate::token_data::IdTokenInfo;
use anyhow::Context;
use base64::Engine;
use codex_keyring_store::tests::MockKeyringStore;
use keyring::Error as KeyringError;
use pretty_assertions::assert_eq;
use serde::Serialize;
use serde_json::json;
use tempfile::tempdir;
#[tokio::test]
async fn file_storage_load_returns_auth_dot_json() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
};
storage
.save(&auth_dot_json)
.context("failed to save auth file")?;
let loaded = storage.load().context("failed to load auth file")?;
assert_eq!(Some(auth_dot_json), loaded);
Ok(())
}
#[tokio::test]
async fn file_storage_save_persists_auth_dot_json() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
};
let file = get_auth_file(codex_home.path());
storage
.save(&auth_dot_json)
.context("failed to save auth file")?;
let same_auth_dot_json = storage
.try_read_auth_json(&file)
.context("failed to read auth file after save")?;
assert_eq!(auth_dot_json, same_auth_dot_json);
Ok(())
}
#[test]
fn file_storage_delete_removes_auth_file() -> anyhow::Result<()> {
let dir = tempdir()?;
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("sk-test-key".to_string()),
tokens: None,
last_refresh: None,
};
let storage = create_auth_storage(dir.path().to_path_buf(), AuthCredentialsStoreMode::File);
storage.save(&auth_dot_json)?;
assert!(dir.path().join("auth.json").exists());
let storage = FileAuthStorage::new(dir.path().to_path_buf());
let removed = storage.delete()?;
assert!(removed);
assert!(!dir.path().join("auth.json").exists());
Ok(())
}
#[test]
fn ephemeral_storage_save_load_delete_is_in_memory_only() -> anyhow::Result<()> {
let dir = tempdir()?;
let storage = create_auth_storage(
dir.path().to_path_buf(),
AuthCredentialsStoreMode::Ephemeral,
);
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("sk-ephemeral".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
};
storage.save(&auth_dot_json)?;
let loaded = storage.load()?;
assert_eq!(Some(auth_dot_json), loaded);
let removed = storage.delete()?;
assert!(removed);
let loaded = storage.load()?;
assert_eq!(None, loaded);
assert!(!get_auth_file(dir.path()).exists());
Ok(())
}
fn seed_keyring_and_fallback_auth_file_for_delete<F>(
mock_keyring: &MockKeyringStore,
codex_home: &Path,
compute_key: F,
) -> anyhow::Result<(String, PathBuf)>
where
F: FnOnce() -> std::io::Result<String>,
{
let key = compute_key()?;
mock_keyring.save(KEYRING_SERVICE, &key, "{}")?;
let auth_file = get_auth_file(codex_home);
std::fs::write(&auth_file, "stale")?;
Ok((key, auth_file))
}
fn seed_keyring_with_auth<F>(
mock_keyring: &MockKeyringStore,
compute_key: F,
auth: &AuthDotJson,
) -> anyhow::Result<()>
where
F: FnOnce() -> std::io::Result<String>,
{
let key = compute_key()?;
let serialized = serde_json::to_string(auth)?;
mock_keyring.save(KEYRING_SERVICE, &key, &serialized)?;
Ok(())
}
fn assert_keyring_saved_auth_and_removed_fallback(
mock_keyring: &MockKeyringStore,
key: &str,
codex_home: &Path,
expected: &AuthDotJson,
) {
let saved_value = mock_keyring
.saved_value(key)
.expect("keyring entry should exist");
let expected_serialized = serde_json::to_string(expected).expect("serialize expected auth");
assert_eq!(saved_value, expected_serialized);
let auth_file = get_auth_file(codex_home);
assert!(
!auth_file.exists(),
"fallback auth.json should be removed after keyring save"
);
}
fn id_token_with_prefix(prefix: &str) -> IdTokenInfo {
#[derive(Serialize)]
struct Header {
alg: &'static str,
typ: &'static str,
}
let header = Header {
alg: "none",
typ: "JWT",
};
let payload = json!({
"email": format!("{prefix}@example.com"),
"https://api.openai.com/auth": {
"chatgpt_account_id": format!("{prefix}-account"),
},
});
let encode = |bytes: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes);
let header_b64 = encode(&serde_json::to_vec(&header).expect("serialize header"));
let payload_b64 = encode(&serde_json::to_vec(&payload).expect("serialize payload"));
let signature_b64 = encode(b"sig");
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
crate::token_data::parse_chatgpt_jwt_claims(&fake_jwt).expect("fake JWT should parse")
}
fn auth_with_prefix(prefix: &str) -> AuthDotJson {
AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some(format!("{prefix}-api-key")),
tokens: Some(TokenData {
id_token: id_token_with_prefix(prefix),
access_token: format!("{prefix}-access"),
refresh_token: format!("{prefix}-refresh"),
account_id: Some(format!("{prefix}-account-id")),
}),
last_refresh: None,
}
}
#[test]
fn keyring_auth_storage_load_returns_deserialized_auth() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let storage = KeyringAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let expected = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("sk-test".to_string()),
tokens: None,
last_refresh: None,
};
seed_keyring_with_auth(
&mock_keyring,
|| compute_store_key(codex_home.path()),
&expected,
)?;
let loaded = storage.load()?;
assert_eq!(Some(expected), loaded);
Ok(())
}
#[test]
fn keyring_auth_storage_compute_store_key_for_home_directory() -> anyhow::Result<()> {
let codex_home = PathBuf::from("~/.codex");
let key = compute_store_key(codex_home.as_path())?;
assert_eq!(key, "cli|940db7b1d0e4eb40");
Ok(())
}
#[test]
fn keyring_auth_storage_save_persists_and_removes_fallback_file() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let storage = KeyringAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let auth_file = get_auth_file(codex_home.path());
std::fs::write(&auth_file, "stale")?;
let auth = AuthDotJson {
auth_mode: Some(AuthMode::Chatgpt),
openai_api_key: None,
tokens: Some(TokenData {
id_token: Default::default(),
access_token: "access".to_string(),
refresh_token: "refresh".to_string(),
account_id: Some("account".to_string()),
}),
last_refresh: Some(Utc::now()),
};
storage.save(&auth)?;
let key = compute_store_key(codex_home.path())?;
assert_keyring_saved_auth_and_removed_fallback(&mock_keyring, &key, codex_home.path(), &auth);
Ok(())
}
#[test]
fn keyring_auth_storage_delete_removes_keyring_and_file() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let storage = KeyringAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let (key, auth_file) =
seed_keyring_and_fallback_auth_file_for_delete(&mock_keyring, codex_home.path(), || {
compute_store_key(codex_home.path())
})?;
let removed = storage.delete()?;
assert!(removed);
assert!(!mock_keyring.contains(&key));
assert!(!auth_file.exists());
Ok(())
}
#[test]
fn auto_auth_storage_falls_back_to_file_on_keyring_load_error() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let key = compute_store_key(codex_home.path())?;
mock_keyring.set_error(&key, KeyringError::NoEntry);
let storage = AutoAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let expected = auth_with_prefix("fallback");
FileAuthStorage::new(codex_home.path().to_path_buf()).save(&expected)?;
let loaded = storage.load()?;
assert_eq!(Some(expected), loaded);
Ok(())
}

View File

@@ -161,3 +161,7 @@ where
{
serializer.serialize_str(&id_token.raw_jwt)
}
#[cfg(test)]
#[path = "token_data_tests.rs"]
mod tests;

View File

@@ -0,0 +1,20 @@
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn parses_id_token_claims() {
let jwt = "eyJhbGciOiJub25lIn0.eyJlbWFpbCI6InVzZXJAZXhhbXBsZS5jb20iLCJodHRwczovL2FwaS5vcGVuYWkuY29tL2F1dGgiOnsiY2hhdGdwdF9wbGFuX3R5cGUiOiJwcm8iLCJjaGF0Z3B0X3VzZXJfaWQiOiJ1c2VyLTEiLCJjaGF0Z3B0X2FjY291bnRfaWQiOiJ3cy0xIn19.c2ln";
let claims = parse_chatgpt_jwt_claims(jwt).expect("jwt should parse");
assert_eq!(
claims,
IdTokenInfo {
email: Some("user@example.com".to_string()),
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
chatgpt_user_id: Some("user-1".to_string()),
chatgpt_account_id: Some("ws-1".to_string()),
raw_jwt: jwt.to_string(),
}
);
}

View File

@@ -1,10 +1,10 @@
use codex_core::AuthManager;
use codex_core::TokenData;
use std::path::Path;
use std::sync::LazyLock;
use std::sync::RwLock;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::token_data::TokenData;
static CHATGPT_TOKEN: LazyLock<RwLock<Option<TokenData>>> = LazyLock::new(|| RwLock::new(None));

View File

@@ -1,6 +1,6 @@
use codex_core::AuthManager;
use codex_core::TokenData;
use codex_core::config::Config;
use codex_core::token_data::TokenData;
use std::collections::HashSet;
use std::time::Duration;

View File

@@ -4,25 +4,17 @@ mod pid_tracker;
mod seatbelt;
use std::path::PathBuf;
use std::process::Stdio;
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config::ConfigOverrides;
use codex_core::config::NetworkProxyAuditMetadata;
use codex_core::exec_env::create_env;
use codex_core::landlock::create_linux_sandbox_command_args_for_policies;
use codex_core::landlock::spawn_command_under_linux_sandbox;
#[cfg(target_os = "macos")]
use codex_core::seatbelt::create_seatbelt_command_args_for_policies_with_extensions;
#[cfg(target_os = "macos")]
use codex_core::spawn::CODEX_SANDBOX_ENV_VAR;
use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR;
use codex_core::seatbelt::spawn_command_under_seatbelt;
use codex_core::spawn::StdioPolicy;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::permissions::NetworkSandboxPolicy;
use codex_utils_cli::CliConfigOverrides;
use tokio::process::Child;
use tokio::process::Command as TokioCommand;
use toml::Value as TomlValue;
use crate::LandlockCommand;
use crate::SeatbeltCommand;
@@ -117,12 +109,16 @@ async fn run_command_under_sandbox(
sandbox_type: SandboxType,
log_denials: bool,
) -> anyhow::Result<()> {
let config = load_debug_sandbox_config(
let sandbox_mode = create_sandbox_mode(full_auto);
let config = Config::load_with_cli_overrides_and_harness_overrides(
config_overrides
.parse_overrides()
.map_err(anyhow::Error::msg)?,
codex_linux_sandbox_exe,
full_auto,
ConfigOverrides {
sandbox_mode: Some(sandbox_mode),
codex_linux_sandbox_exe,
..Default::default()
},
)
.await?;
@@ -134,6 +130,7 @@ async fn run_command_under_sandbox(
// separately.
let sandbox_policy_cwd = cwd.clone();
let stdio_policy = StdioPolicy::Inherit;
let env = create_env(
&config.permissions.shell_environment_policy,
/*thread_id*/ None,
@@ -246,29 +243,14 @@ async fn run_command_under_sandbox(
let mut child = match sandbox_type {
#[cfg(target_os = "macos")]
SandboxType::Seatbelt => {
let args = create_seatbelt_command_args_for_policies_with_extensions(
spawn_command_under_seatbelt(
command,
&config.permissions.file_system_sandbox_policy,
config.permissions.network_sandbox_policy,
sandbox_policy_cwd.as_path(),
false,
network.as_ref(),
None,
);
let network_policy = config.permissions.network_sandbox_policy;
spawn_debug_sandbox_child(
PathBuf::from("/usr/bin/sandbox-exec"),
args,
None,
cwd,
network_policy,
config.permissions.sandbox_policy.get(),
sandbox_policy_cwd.as_path(),
stdio_policy,
network.as_ref(),
env,
|env_map| {
env_map.insert(CODEX_SANDBOX_ENV_VAR.to_string(), "seatbelt".to_string());
if let Some(network) = network.as_ref() {
network.apply_to_env(env_map);
}
},
)
.await?
}
@@ -278,29 +260,16 @@ async fn run_command_under_sandbox(
.codex_linux_sandbox_exe
.expect("codex-linux-sandbox executable not found");
let use_legacy_landlock = config.features.use_legacy_landlock();
let args = create_linux_sandbox_command_args_for_policies(
spawn_command_under_linux_sandbox(
codex_linux_sandbox_exe,
command,
cwd.as_path(),
cwd,
config.permissions.sandbox_policy.get(),
&config.permissions.file_system_sandbox_policy,
config.permissions.network_sandbox_policy,
sandbox_policy_cwd.as_path(),
use_legacy_landlock,
/*allow_network_for_proxy*/ false,
);
let network_policy = config.permissions.network_sandbox_policy;
spawn_debug_sandbox_child(
codex_linux_sandbox_exe,
args,
Some("codex-linux-sandbox"),
cwd,
network_policy,
stdio_policy,
network.as_ref(),
env,
|env_map| {
if let Some(network) = network.as_ref() {
network.apply_to_env(env_map);
}
},
)
.await?
}
@@ -339,218 +308,3 @@ pub fn create_sandbox_mode(full_auto: bool) -> SandboxMode {
SandboxMode::ReadOnly
}
}
async fn spawn_debug_sandbox_child(
program: PathBuf,
args: Vec<String>,
arg0: Option<&str>,
cwd: PathBuf,
network_sandbox_policy: NetworkSandboxPolicy,
mut env: std::collections::HashMap<String, String>,
apply_env: impl FnOnce(&mut std::collections::HashMap<String, String>),
) -> std::io::Result<Child> {
let mut cmd = TokioCommand::new(&program);
#[cfg(unix)]
cmd.arg0(arg0.map_or_else(|| program.to_string_lossy().to_string(), String::from));
#[cfg(not(unix))]
let _ = arg0;
cmd.args(args);
cmd.current_dir(cwd);
apply_env(&mut env);
cmd.env_clear();
cmd.envs(env);
if !network_sandbox_policy.is_enabled() {
cmd.env(CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR, "1");
}
cmd.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.kill_on_drop(true)
.spawn()
}
async fn load_debug_sandbox_config(
cli_overrides: Vec<(String, TomlValue)>,
codex_linux_sandbox_exe: Option<PathBuf>,
full_auto: bool,
) -> anyhow::Result<Config> {
load_debug_sandbox_config_with_codex_home(
cli_overrides,
codex_linux_sandbox_exe,
full_auto,
/*codex_home*/ None,
)
.await
}
async fn load_debug_sandbox_config_with_codex_home(
cli_overrides: Vec<(String, TomlValue)>,
codex_linux_sandbox_exe: Option<PathBuf>,
full_auto: bool,
codex_home: Option<PathBuf>,
) -> anyhow::Result<Config> {
let config = build_debug_sandbox_config(
cli_overrides.clone(),
ConfigOverrides {
codex_linux_sandbox_exe: codex_linux_sandbox_exe.clone(),
..Default::default()
},
codex_home.clone(),
)
.await?;
if config_uses_permission_profiles(&config) {
if full_auto {
anyhow::bail!(
"`codex sandbox --full-auto` is only supported for legacy `sandbox_mode` configs; choose a writable `[permissions]` profile instead"
);
}
return Ok(config);
}
build_debug_sandbox_config(
cli_overrides,
ConfigOverrides {
sandbox_mode: Some(create_sandbox_mode(full_auto)),
codex_linux_sandbox_exe,
..Default::default()
},
codex_home,
)
.await
.map_err(Into::into)
}
async fn build_debug_sandbox_config(
cli_overrides: Vec<(String, TomlValue)>,
harness_overrides: ConfigOverrides,
codex_home: Option<PathBuf>,
) -> std::io::Result<Config> {
let mut builder = ConfigBuilder::default()
.cli_overrides(cli_overrides)
.harness_overrides(harness_overrides);
if let Some(codex_home) = codex_home {
builder = builder
.codex_home(codex_home.clone())
.fallback_cwd(Some(codex_home));
}
builder.build().await
}
fn config_uses_permission_profiles(config: &Config) -> bool {
config
.config_layer_stack
.effective_config()
.get("default_permissions")
.is_some()
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
fn escape_toml_path(path: &std::path::Path) -> String {
path.display().to_string().replace('\\', "\\\\")
}
fn write_permissions_profile_config(
codex_home: &TempDir,
docs: &std::path::Path,
private: &std::path::Path,
) -> std::io::Result<()> {
std::fs::create_dir_all(private)?;
let config = format!(
"default_permissions = \"limited-read-test\"\n\
[permissions.limited-read-test.filesystem]\n\
\":minimal\" = \"read\"\n\
\"{}\" = \"read\"\n\
\"{}\" = \"none\"\n\
\n\
[permissions.limited-read-test.network]\n\
enabled = true\n",
escape_toml_path(docs),
escape_toml_path(private),
);
std::fs::write(codex_home.path().join("config.toml"), config)?;
Ok(())
}
#[tokio::test]
async fn debug_sandbox_honors_active_permission_profiles() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let sandbox_paths = TempDir::new()?;
let docs = sandbox_paths.path().join("docs");
let private = docs.join("private");
write_permissions_profile_config(&codex_home, &docs, &private)?;
let codex_home_path = codex_home.path().to_path_buf();
let profile_config = build_debug_sandbox_config(
Vec::new(),
ConfigOverrides::default(),
Some(codex_home_path.clone()),
)
.await?;
let legacy_config = build_debug_sandbox_config(
Vec::new(),
ConfigOverrides {
sandbox_mode: Some(create_sandbox_mode(false)),
..Default::default()
},
Some(codex_home_path.clone()),
)
.await?;
let config = load_debug_sandbox_config_with_codex_home(
Vec::new(),
None,
false,
Some(codex_home_path),
)
.await?;
assert!(config_uses_permission_profiles(&config));
assert!(
profile_config.permissions.file_system_sandbox_policy
!= legacy_config.permissions.file_system_sandbox_policy,
"test fixture should distinguish profile syntax from legacy sandbox_mode"
);
assert_eq!(
config.permissions.file_system_sandbox_policy,
profile_config.permissions.file_system_sandbox_policy,
);
assert_ne!(
config.permissions.file_system_sandbox_policy,
legacy_config.permissions.file_system_sandbox_policy,
);
Ok(())
}
#[tokio::test]
async fn debug_sandbox_rejects_full_auto_for_permission_profiles() -> anyhow::Result<()> {
let codex_home = TempDir::new()?;
let sandbox_paths = TempDir::new()?;
let docs = sandbox_paths.path().join("docs");
let private = docs.join("private");
write_permissions_profile_config(&codex_home, &docs, &private)?;
let err = load_debug_sandbox_config_with_codex_home(
Vec::new(),
None,
true,
Some(codex_home.path().to_path_buf()),
)
.await
.expect_err("full-auto should be rejected for active permission profiles");
assert!(
err.to_string().contains("--full-auto"),
"unexpected error: {err}"
);
Ok(())
}
}

View File

@@ -331,17 +331,6 @@ struct AppServerCommand {
)]
listen: codex_app_server::AppServerTransport,
/// Session source stamped into new threads started by this app-server.
///
/// Known values such as `vscode`, `cli`, `exec`, and `mcp` map to built-in
/// sources. Any other non-empty value is recorded as a custom source.
#[arg(
long = "session-source",
value_name = "SOURCE",
default_value = "vscode"
)]
session_source: String,
/// Controls whether analytics are enabled by default.
///
/// Analytics are disabled by default for app-server. Users have to explicitly opt in
@@ -654,17 +643,12 @@ async fn cli_main(arg0_paths: Arg0DispatchPaths) -> anyhow::Result<()> {
None => {
reject_remote_mode_for_subcommand(root_remote.as_deref(), "app-server")?;
let transport = app_server_cli.listen;
let session_source = codex_protocol::protocol::SessionSource::from_startup_arg(
app_server_cli.session_source.as_str(),
)
.map_err(|err| anyhow::anyhow!("invalid --session-source: {err}"))?;
codex_app_server::run_main_with_transport(
arg0_paths.clone(),
root_config_overrides,
codex_core::config_loader::LoaderOverrides::default(),
app_server_cli.analytics_default_enabled,
transport,
session_source,
)
.await?;
}
@@ -1631,7 +1615,6 @@ mod tests {
app_server.listen,
codex_app_server::AppServerTransport::Stdio
);
assert_eq!(app_server.session_source, "vscode");
}
#[test]
@@ -1641,13 +1624,6 @@ mod tests {
assert!(app_server.analytics_default_enabled);
}
#[test]
fn app_server_session_source_accepts_custom_value() {
let app_server =
app_server_from_args(["codex", "app-server", "--session-source", "atlas"].as_ref());
assert_eq!(app_server.session_source, "atlas");
}
#[test]
fn remote_flag_parses_for_interactive_root() {
let cli = MultitoolCli::try_parse_from(["codex", "--remote", "ws://127.0.0.1:4500"])

View File

@@ -1122,7 +1122,6 @@ mod tests {
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1167,7 +1166,6 @@ mod tests {
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1248,7 +1246,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1300,7 +1297,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1352,7 +1348,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1514,7 +1509,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1544,7 +1538,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1594,7 +1587,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::OnRequest]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1643,7 +1635,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::OnRequest]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1696,7 +1687,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1750,7 +1740,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1804,7 +1793,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1891,7 +1879,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::Never]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,
@@ -1917,7 +1904,6 @@ enabled = false
allowed_approval_policies: Some(vec![AskForApproval::OnRequest]),
allowed_sandbox_modes: None,
allowed_web_search_modes: None,
guardian_developer_instructions: None,
feature_requirements: None,
mcp_servers: None,
apps: None,

View File

@@ -93,6 +93,7 @@ async fn models_client_hits_models_endpoint() {
effective_context_window_percent: 95,
experimental_supported_tools: Vec::new(),
input_modalities: default_input_modalities(),
prefer_websockets: false,
used_fallback_model_metadata: false,
supports_search_tool: false,
}],

View File

@@ -1,23 +0,0 @@
[package]
name = "codex-auth"
version.workspace = true
edition.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
codex-api = { workspace = true }
codex-app-server-protocol = { workspace = true }
http = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
[dev-dependencies]
maplit = { workspace = true }
pretty_assertions = { workspace = true }
toml = { workspace = true }

View File

@@ -1,15 +0,0 @@
#[derive(Debug)]
pub struct EnvVarError {
pub var: String,
pub instructions: Option<String>,
}
impl std::fmt::Display for EnvVarError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Missing environment variable: `{}`.", self.var)?;
if let Some(instructions) = &self.instructions {
write!(f, " {instructions}")?;
}
Ok(())
}
}

View File

@@ -1,19 +0,0 @@
pub mod error;
pub mod provider;
pub mod token_data;
#[cfg(test)]
mod model_provider_info_tests;
#[cfg(test)]
mod token_data_tests;
pub use error::EnvVarError;
pub use provider::DEFAULT_LMSTUDIO_PORT;
pub use provider::DEFAULT_OLLAMA_PORT;
pub use provider::LMSTUDIO_OSS_PROVIDER_ID;
pub use provider::ModelProviderInfo;
pub use provider::OLLAMA_OSS_PROVIDER_ID;
pub use provider::OPENAI_PROVIDER_ID;
pub use provider::WireApi;
pub use provider::built_in_model_providers;
pub use provider::create_oss_provider_with_base_url;

View File

@@ -1,291 +0,0 @@
use crate::error::EnvVarError;
use codex_api::Provider as ApiProvider;
use codex_api::provider::RetryConfig as ApiRetryConfig;
use codex_app_server_protocol::AuthMode as ApiAuthMode;
use http::HeaderMap;
use http::header::HeaderName;
use http::header::HeaderValue;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
use std::fmt;
use std::time::Duration;
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
pub const DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS: u64 = 15_000;
const MAX_STREAM_MAX_RETRIES: u64 = 100;
const MAX_REQUEST_MAX_RETRIES: u64 = 100;
const OPENAI_PROVIDER_NAME: &str = "OpenAI";
pub const OPENAI_PROVIDER_ID: &str = "openai";
pub const CHAT_WIRE_API_REMOVED_ERROR: &str = "`wire_api = \"chat\"` is no longer supported.\nHow to fix: set `wire_api = \"responses\"` in your provider config.\nMore info: https://github.com/openai/codex/discussions/7782";
pub const LEGACY_OLLAMA_CHAT_PROVIDER_ID: &str = "ollama-chat";
pub const OLLAMA_CHAT_PROVIDER_REMOVED_ERROR: &str = "`ollama-chat` is no longer supported.\nHow to fix: replace `ollama-chat` with `ollama` in `model_provider`, `oss_provider`, or `--local-provider`.\nMore info: https://github.com/openai/codex/discussions/7782";
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
#[schemars(rename = "WireApi")]
pub enum WireApi {
#[default]
Responses,
}
impl fmt::Display for WireApi {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let value = match self {
Self::Responses => "responses",
};
f.write_str(value)
}
}
impl<'de> Deserialize<'de> for WireApi {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
match value.as_str() {
"responses" => Ok(Self::Responses),
"chat" => Err(serde::de::Error::custom(CHAT_WIRE_API_REMOVED_ERROR)),
_ => Err(serde::de::Error::unknown_variant(&value, &["responses"])),
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema)]
#[schemars(deny_unknown_fields)]
#[schemars(rename = "ModelProviderInfo")]
pub struct ModelProviderInfo {
pub name: String,
pub base_url: Option<String>,
pub env_key: Option<String>,
pub env_key_instructions: Option<String>,
pub experimental_bearer_token: Option<String>,
#[serde(default)]
pub wire_api: WireApi,
pub query_params: Option<HashMap<String, String>>,
pub http_headers: Option<HashMap<String, String>>,
pub env_http_headers: Option<HashMap<String, String>>,
pub request_max_retries: Option<u64>,
pub stream_max_retries: Option<u64>,
pub stream_idle_timeout_ms: Option<u64>,
#[schemars(
description = "Maximum time (in milliseconds) to wait for a websocket connection attempt before treating it as failed."
)]
pub websocket_connect_timeout_ms: Option<u64>,
#[serde(default)]
pub requires_openai_auth: bool,
#[serde(default)]
pub supports_websockets: bool,
}
impl ModelProviderInfo {
fn build_header_map(&self) -> HeaderMap {
let capacity = self.http_headers.as_ref().map_or(0, HashMap::len)
+ self.env_http_headers.as_ref().map_or(0, HashMap::len);
let mut headers = HeaderMap::with_capacity(capacity);
if let Some(extra) = &self.http_headers {
for (k, v) in extra {
if let (Ok(name), Ok(value)) = (HeaderName::try_from(k), HeaderValue::try_from(v)) {
headers.insert(name, value);
}
}
}
if let Some(env_headers) = &self.env_http_headers {
for (header, env_var) in env_headers {
if let Ok(val) = std::env::var(env_var)
&& !val.trim().is_empty()
&& let (Ok(name), Ok(value)) =
(HeaderName::try_from(header), HeaderValue::try_from(val))
{
headers.insert(name, value);
}
}
}
headers
}
pub fn to_api_provider(
&self,
auth_mode: Option<ApiAuthMode>,
) -> Result<ApiProvider, EnvVarError> {
let default_base_url = if matches!(
auth_mode,
Some(ApiAuthMode::Chatgpt | ApiAuthMode::ChatgptAuthTokens)
) {
"https://chatgpt.com/backend-api/codex"
} else {
"https://api.openai.com/v1"
};
let base_url = self
.base_url
.clone()
.unwrap_or_else(|| default_base_url.to_string());
let retry = ApiRetryConfig {
max_attempts: self.request_max_retries(),
base_delay: Duration::from_millis(200),
retry_429: false,
retry_5xx: true,
retry_transport: true,
};
Ok(ApiProvider {
name: self.name.clone(),
base_url,
query_params: self.query_params.clone(),
headers: self.build_header_map(),
retry,
stream_idle_timeout: self.stream_idle_timeout(),
})
}
pub fn api_key(&self) -> Result<Option<String>, EnvVarError> {
match &self.env_key {
Some(env_key) => {
let api_key = std::env::var(env_key)
.ok()
.filter(|v| !v.trim().is_empty())
.ok_or_else(|| EnvVarError {
var: env_key.clone(),
instructions: self.env_key_instructions.clone(),
})?;
Ok(Some(api_key))
}
None => Ok(None),
}
}
pub fn request_max_retries(&self) -> u64 {
self.request_max_retries
.unwrap_or(DEFAULT_REQUEST_MAX_RETRIES)
.min(MAX_REQUEST_MAX_RETRIES)
}
pub fn stream_max_retries(&self) -> u64 {
self.stream_max_retries
.unwrap_or(DEFAULT_STREAM_MAX_RETRIES)
.min(MAX_STREAM_MAX_RETRIES)
}
pub fn stream_idle_timeout(&self) -> Duration {
self.stream_idle_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_STREAM_IDLE_TIMEOUT_MS))
}
pub fn websocket_connect_timeout(&self) -> Duration {
self.websocket_connect_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS))
}
pub fn create_openai_provider(base_url: Option<String>) -> ModelProviderInfo {
ModelProviderInfo {
name: OPENAI_PROVIDER_NAME.into(),
base_url,
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: Some(
[("version".to_string(), env!("CARGO_PKG_VERSION").to_string())]
.into_iter()
.collect(),
),
env_http_headers: Some(
[
(
"OpenAI-Organization".to_string(),
"OPENAI_ORGANIZATION".to_string(),
),
("OpenAI-Project".to_string(), "OPENAI_PROJECT".to_string()),
]
.into_iter()
.collect(),
),
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: true,
supports_websockets: true,
}
}
pub fn is_openai(&self) -> bool {
self.name == OPENAI_PROVIDER_NAME
}
}
pub const DEFAULT_LMSTUDIO_PORT: u16 = 1234;
pub const DEFAULT_OLLAMA_PORT: u16 = 11434;
pub const LMSTUDIO_OSS_PROVIDER_ID: &str = "lmstudio";
pub const OLLAMA_OSS_PROVIDER_ID: &str = "ollama";
pub fn built_in_model_providers(
openai_base_url: Option<String>,
) -> HashMap<String, ModelProviderInfo> {
use ModelProviderInfo as P;
let openai_provider = P::create_openai_provider(openai_base_url);
[
(OPENAI_PROVIDER_ID, openai_provider),
(
OLLAMA_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_OLLAMA_PORT, WireApi::Responses),
),
(
LMSTUDIO_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_LMSTUDIO_PORT, WireApi::Responses),
),
]
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect()
}
pub fn create_oss_provider(default_provider_port: u16, wire_api: WireApi) -> ModelProviderInfo {
let default_codex_oss_base_url = format!(
"http://localhost:{codex_oss_port}/v1",
codex_oss_port = std::env::var("CODEX_OSS_PORT")
.ok()
.filter(|value| !value.trim().is_empty())
.and_then(|value| value.parse::<u16>().ok())
.unwrap_or(default_provider_port)
);
let codex_oss_base_url = std::env::var("CODEX_OSS_BASE_URL")
.ok()
.filter(|v| !v.trim().is_empty())
.unwrap_or(default_codex_oss_base_url);
create_oss_provider_with_base_url(&codex_oss_base_url, wire_api)
}
pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> ModelProviderInfo {
ModelProviderInfo {
name: "gpt-oss".into(),
base_url: Some(base_url.into()),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
}
}

View File

@@ -4,10 +4,14 @@ version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
doctest = false
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-execpolicy = { workspace = true }
codex-protocol = { workspace = true }
@@ -24,6 +28,16 @@ toml = { workspace = true }
toml_edit = { workspace = true }
tracing = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation = "0.9"
[target.'cfg(target_os = "windows")'.dependencies]
windows-sys = { version = "0.52", features = [
"Win32_Foundation",
"Win32_System_Com",
"Win32_UI_Shell",
] }
[dev-dependencies]
anyhow = { workspace = true }
pretty_assertions = { workspace = true }

View File

@@ -299,7 +299,6 @@ pub struct ConfigRequirementsToml {
pub enforce_residency: Option<ResidencyRequirement>,
#[serde(rename = "experimental_network")]
pub network: Option<NetworkRequirementsToml>,
pub guardian_developer_instructions: Option<String>,
}
/// Value paired with the requirement source it came from, for better error
@@ -335,7 +334,6 @@ pub struct ConfigRequirementsWithSources {
pub rules: Option<Sourced<RequirementsExecPolicyToml>>,
pub enforce_residency: Option<Sourced<ResidencyRequirement>>,
pub network: Option<Sourced<NetworkRequirementsToml>>,
pub guardian_developer_instructions: Option<Sourced<String>>,
}
impl ConfigRequirementsWithSources {
@@ -366,17 +364,9 @@ impl ConfigRequirementsWithSources {
rules: _,
enforce_residency: _,
network: _,
guardian_developer_instructions: _,
} = &other;
let mut other = other;
if other
.guardian_developer_instructions
.as_deref()
.is_some_and(|value| value.trim().is_empty())
{
other.guardian_developer_instructions = None;
}
fill_missing_take!(
self,
other,
@@ -390,7 +380,6 @@ impl ConfigRequirementsWithSources {
rules,
enforce_residency,
network,
guardian_developer_instructions,
}
);
@@ -414,7 +403,6 @@ impl ConfigRequirementsWithSources {
rules,
enforce_residency,
network,
guardian_developer_instructions,
} = self;
ConfigRequirementsToml {
allowed_approval_policies: allowed_approval_policies.map(|sourced| sourced.value),
@@ -426,8 +414,6 @@ impl ConfigRequirementsWithSources {
rules: rules.map(|sourced| sourced.value),
enforce_residency: enforce_residency.map(|sourced| sourced.value),
network: network.map(|sourced| sourced.value),
guardian_developer_instructions: guardian_developer_instructions
.map(|sourced| sourced.value),
}
}
}
@@ -482,10 +468,6 @@ impl ConfigRequirementsToml {
&& self.rules.is_none()
&& self.enforce_residency.is_none()
&& self.network.is_none()
&& self
.guardian_developer_instructions
.as_deref()
.is_none_or(|value| value.trim().is_empty())
}
}
@@ -503,7 +485,6 @@ impl TryFrom<ConfigRequirementsWithSources> for ConfigRequirements {
rules,
enforce_residency,
network,
guardian_developer_instructions: _guardian_developer_instructions,
} = toml;
let approval_policy = match allowed_approval_policies {
@@ -724,7 +705,6 @@ mod tests {
rules,
enforce_residency,
network,
guardian_developer_instructions,
} = toml;
ConfigRequirementsWithSources {
allowed_approval_policies: allowed_approval_policies
@@ -741,8 +721,6 @@ mod tests {
enforce_residency: enforce_residency
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
network: network.map(|value| Sourced::new(value, RequirementSource::Unknown)),
guardian_developer_instructions: guardian_developer_instructions
.map(|value| Sourced::new(value, RequirementSource::Unknown)),
}
}
@@ -765,8 +743,6 @@ mod tests {
};
let enforce_residency = ResidencyRequirement::Us;
let enforce_source = source.clone();
let guardian_developer_instructions =
"Use the company-managed guardian policy.".to_string();
// Intentionally constructed without `..Default::default()` so adding a new field to
// `ConfigRequirementsToml` forces this test to be updated.
@@ -780,7 +756,6 @@ mod tests {
rules: None,
enforce_residency: Some(enforce_residency),
network: None,
guardian_developer_instructions: Some(guardian_developer_instructions.clone()),
};
target.merge_unset_fields(source.clone(), other);
@@ -792,7 +767,7 @@ mod tests {
allowed_approval_policies,
source.clone()
)),
allowed_sandbox_modes: Some(Sourced::new(allowed_sandbox_modes, source.clone(),)),
allowed_sandbox_modes: Some(Sourced::new(allowed_sandbox_modes, source)),
allowed_web_search_modes: Some(Sourced::new(
allowed_web_search_modes,
enforce_source.clone(),
@@ -806,10 +781,6 @@ mod tests {
rules: None,
enforce_residency: Some(Sourced::new(enforce_residency, enforce_source)),
network: None,
guardian_developer_instructions: Some(Sourced::new(
guardian_developer_instructions,
source,
)),
}
);
}
@@ -844,7 +815,6 @@ mod tests {
rules: None,
enforce_residency: None,
network: None,
guardian_developer_instructions: None,
}
);
Ok(())
@@ -887,78 +857,11 @@ mod tests {
rules: None,
enforce_residency: None,
network: None,
guardian_developer_instructions: None,
}
);
Ok(())
}
#[test]
fn merge_unset_fields_ignores_blank_guardian_override() {
let mut target = ConfigRequirementsWithSources::default();
target.merge_unset_fields(
RequirementSource::CloudRequirements,
ConfigRequirementsToml {
guardian_developer_instructions: Some(" \n\t".to_string()),
..Default::default()
},
);
target.merge_unset_fields(
RequirementSource::SystemRequirementsToml {
file: system_requirements_toml_file_for_test()
.expect("system requirements.toml path"),
},
ConfigRequirementsToml {
guardian_developer_instructions: Some(
"Use the system guardian policy.".to_string(),
),
..Default::default()
},
);
assert_eq!(
target.guardian_developer_instructions,
Some(Sourced::new(
"Use the system guardian policy.".to_string(),
RequirementSource::SystemRequirementsToml {
file: system_requirements_toml_file_for_test()
.expect("system requirements.toml path"),
},
)),
);
}
#[test]
fn deserialize_guardian_developer_instructions() -> Result<()> {
let requirements: ConfigRequirementsToml = from_str(
r#"
guardian_developer_instructions = """
Use the cloud-managed guardian policy.
"""
"#,
)?;
assert_eq!(
requirements.guardian_developer_instructions.as_deref(),
Some("Use the cloud-managed guardian policy.\n")
);
Ok(())
}
#[test]
fn blank_guardian_developer_instructions_is_empty() -> Result<()> {
let requirements: ConfigRequirementsToml = from_str(
r#"
guardian_developer_instructions = """
"""
"#,
)?;
assert!(requirements.is_empty());
Ok(())
}
#[test]
fn deserialize_apps_requirements() -> Result<()> {
let toml_str = r#"

View File

@@ -1,10 +1,10 @@
use super::LoaderOverrides;
use crate::LoaderOverrides;
use crate::config_error_from_toml;
use crate::io_error_from_config_error;
#[cfg(target_os = "macos")]
use super::macos::ManagedAdminConfigLayer;
use crate::macos::ManagedAdminConfigLayer;
#[cfg(target_os = "macos")]
use super::macos::load_managed_admin_config_layer;
use codex_config::config_error_from_toml;
use codex_config::io_error_from_config_error;
use crate::macos::load_managed_admin_config_layer;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::io;
use std::path::Path;
@@ -16,26 +16,26 @@ use toml::Value as TomlValue;
const CODEX_MANAGED_CONFIG_SYSTEM_PATH: &str = "/etc/codex/managed_config.toml";
#[derive(Debug, Clone)]
pub(super) struct MangedConfigFromFile {
pub struct ManagedConfigFromFile {
pub managed_config: TomlValue,
pub file: AbsolutePathBuf,
}
#[derive(Debug, Clone)]
pub(super) struct ManagedConfigFromMdm {
pub struct ManagedConfigFromMdm {
pub managed_config: TomlValue,
pub raw_toml: String,
}
#[derive(Debug, Clone)]
pub(super) struct LoadedConfigLayers {
pub struct LoadedConfigLayers {
/// If present, data read from a file such as `/etc/codex/managed_config.toml`.
pub managed_config: Option<MangedConfigFromFile>,
pub managed_config: Option<ManagedConfigFromFile>,
/// If present, data read from managed preferences (macOS only).
pub managed_config_from_mdm: Option<ManagedConfigFromMdm>,
}
pub(super) async fn load_config_layers_internal(
pub async fn load_config_layers_internal(
codex_home: &Path,
overrides: LoaderOverrides,
) -> io::Result<LoadedConfigLayers> {
@@ -59,7 +59,7 @@ pub(super) async fn load_config_layers_internal(
let managed_config =
read_config_from_path(&managed_config_path, /*log_missing_as_info*/ false)
.await?
.map(|managed_config| MangedConfigFromFile {
.map(|managed_config| ManagedConfigFromFile {
managed_config,
file: managed_config_path.clone(),
});
@@ -88,7 +88,7 @@ fn map_managed_admin_layer(layer: ManagedAdminConfigLayer) -> ManagedConfigFromM
}
}
pub(super) async fn read_config_from_path(
async fn read_config_from_path(
path: impl AsRef<Path>,
log_missing_as_info: bool,
) -> io::Result<Option<TomlValue>> {
@@ -120,8 +120,7 @@ pub(super) async fn read_config_from_path(
}
}
/// Return the default managed config path.
pub(super) fn managed_config_default_path(codex_home: &Path) -> PathBuf {
fn managed_config_default_path(codex_home: &Path) -> PathBuf {
#[cfg(unix)]
{
let _ = codex_home;

View File

@@ -3,6 +3,10 @@ mod config_requirements;
mod constraint;
mod diagnostics;
mod fingerprint;
mod layer_io;
mod loader;
#[cfg(target_os = "macos")]
mod macos;
mod merge;
mod overrides;
mod requirements_exec_policy;
@@ -44,6 +48,15 @@ pub use diagnostics::format_config_error;
pub use diagnostics::format_config_error_with_source;
pub use diagnostics::io_error_from_config_error;
pub use fingerprint::version_for_toml;
pub use layer_io::LoadedConfigLayers;
pub use layer_io::ManagedConfigFromFile;
pub use layer_io::ManagedConfigFromMdm;
pub use layer_io::load_config_layers_internal;
pub use loader::load_managed_admin_requirements;
pub use loader::load_requirements_from_legacy_scheme;
pub use loader::load_requirements_toml;
pub use loader::system_config_toml_file;
pub use loader::system_requirements_toml_file;
pub use merge::merge_toml_values;
pub use overrides::build_cli_overrides_layer;
pub use requirements_exec_policy::RequirementsExecPolicy;

View File

@@ -0,0 +1,236 @@
use crate::ConfigRequirementsToml;
use crate::ConfigRequirementsWithSources;
use crate::LoadedConfigLayers;
use crate::RequirementSource;
#[cfg(target_os = "macos")]
use crate::macos::load_managed_admin_requirements_toml;
use codex_protocol::config_types::SandboxMode;
use codex_protocol::protocol::AskForApproval;
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
use std::io;
use std::path::Path;
#[cfg(windows)]
use std::path::PathBuf;
#[cfg(unix)]
pub const SYSTEM_CONFIG_TOML_FILE_UNIX: &str = "/etc/codex/config.toml";
#[cfg(windows)]
const DEFAULT_PROGRAM_DATA_DIR_WINDOWS: &str = r"C:\ProgramData";
pub async fn load_requirements_toml(
config_requirements_toml: &mut ConfigRequirementsWithSources,
requirements_toml_file: impl AsRef<Path>,
) -> io::Result<()> {
let requirements_toml_file =
AbsolutePathBuf::from_absolute_path(requirements_toml_file.as_ref())?;
match tokio::fs::read_to_string(&requirements_toml_file).await {
Ok(contents) => {
let requirements_config: ConfigRequirementsToml =
toml::from_str(&contents).map_err(|err| {
io::Error::new(
io::ErrorKind::InvalidData,
format!(
"Error parsing requirements file {}: {err}",
requirements_toml_file.as_ref().display(),
),
)
})?;
config_requirements_toml.merge_unset_fields(
RequirementSource::SystemRequirementsToml {
file: requirements_toml_file.clone(),
},
requirements_config,
);
}
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
Err(err) => {
return Err(io::Error::new(
err.kind(),
format!(
"Failed to read requirements file {}: {err}",
requirements_toml_file.as_ref().display(),
),
));
}
}
Ok(())
}
pub async fn load_managed_admin_requirements(
config_requirements_toml: &mut ConfigRequirementsWithSources,
managed_config_requirements_base64: Option<&str>,
) -> io::Result<()> {
#[cfg(target_os = "macos")]
{
load_managed_admin_requirements_toml(
config_requirements_toml,
managed_config_requirements_base64,
)
.await
}
#[cfg(not(target_os = "macos"))]
{
let _ = config_requirements_toml;
let _ = managed_config_requirements_base64;
Ok(())
}
}
#[cfg(unix)]
pub fn system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
AbsolutePathBuf::from_absolute_path(Path::new("/etc/codex/requirements.toml"))
}
#[cfg(windows)]
pub fn system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
windows_system_requirements_toml_file()
}
#[cfg(unix)]
pub fn system_config_toml_file() -> io::Result<AbsolutePathBuf> {
AbsolutePathBuf::from_absolute_path(Path::new(SYSTEM_CONFIG_TOML_FILE_UNIX))
}
#[cfg(windows)]
pub fn system_config_toml_file() -> io::Result<AbsolutePathBuf> {
windows_system_config_toml_file()
}
#[cfg(windows)]
fn windows_codex_system_dir() -> PathBuf {
let program_data = windows_program_data_dir_from_known_folder().unwrap_or_else(|err| {
tracing::warn!(
error = %err,
"Failed to resolve ProgramData known folder; using default path"
);
PathBuf::from(DEFAULT_PROGRAM_DATA_DIR_WINDOWS)
});
program_data.join("OpenAI").join("Codex")
}
#[cfg(windows)]
fn windows_system_requirements_toml_file() -> io::Result<AbsolutePathBuf> {
let requirements_toml_file = windows_codex_system_dir().join("requirements.toml");
AbsolutePathBuf::try_from(requirements_toml_file)
}
#[cfg(windows)]
fn windows_system_config_toml_file() -> io::Result<AbsolutePathBuf> {
let config_toml_file = windows_codex_system_dir().join("config.toml");
AbsolutePathBuf::try_from(config_toml_file)
}
#[cfg(windows)]
fn windows_program_data_dir_from_known_folder() -> io::Result<PathBuf> {
use std::ffi::OsString;
use std::os::windows::ffi::OsStringExt;
use windows_sys::Win32::System::Com::CoTaskMemFree;
use windows_sys::Win32::UI::Shell::FOLDERID_ProgramData;
use windows_sys::Win32::UI::Shell::KF_FLAG_DEFAULT;
use windows_sys::Win32::UI::Shell::SHGetKnownFolderPath;
let mut path_ptr = std::ptr::null_mut::<u16>();
let known_folder_flags = u32::try_from(KF_FLAG_DEFAULT).map_err(|_| {
io::Error::other(format!(
"KF_FLAG_DEFAULT did not fit in u32: {KF_FLAG_DEFAULT}"
))
})?;
let hr = unsafe {
SHGetKnownFolderPath(&FOLDERID_ProgramData, known_folder_flags, 0, &mut path_ptr)
};
if hr != 0 {
return Err(io::Error::other(format!(
"SHGetKnownFolderPath(FOLDERID_ProgramData) failed with HRESULT {hr:#010x}"
)));
}
if path_ptr.is_null() {
return Err(io::Error::other(
"SHGetKnownFolderPath(FOLDERID_ProgramData) returned a null pointer",
));
}
let path = unsafe {
let mut len = 0usize;
while *path_ptr.add(len) != 0 {
len += 1;
}
let wide = std::slice::from_raw_parts(path_ptr, len);
let path = PathBuf::from(OsString::from_wide(wide));
CoTaskMemFree(path_ptr.cast());
path
};
Ok(path)
}
pub async fn load_requirements_from_legacy_scheme(
config_requirements_toml: &mut ConfigRequirementsWithSources,
loaded_config_layers: LoadedConfigLayers,
) -> io::Result<()> {
let LoadedConfigLayers {
managed_config,
managed_config_from_mdm,
} = loaded_config_layers;
for (source, config) in managed_config_from_mdm
.map(|config| {
(
RequirementSource::LegacyManagedConfigTomlFromMdm,
config.managed_config,
)
})
.into_iter()
.chain(managed_config.map(|config| {
(
RequirementSource::LegacyManagedConfigTomlFromFile { file: config.file },
config.managed_config,
)
}))
{
let legacy_config: LegacyManagedConfigToml =
config.try_into().map_err(|err: toml::de::Error| {
io::Error::new(
io::ErrorKind::InvalidData,
format!("Failed to parse config requirements as TOML: {err}"),
)
})?;
let requirements = ConfigRequirementsToml::from(legacy_config);
config_requirements_toml.merge_unset_fields(source, requirements);
}
Ok(())
}
#[derive(Deserialize, Debug, Clone, Default, PartialEq)]
struct LegacyManagedConfigToml {
approval_policy: Option<AskForApproval>,
sandbox_mode: Option<SandboxMode>,
}
impl From<LegacyManagedConfigToml> for ConfigRequirementsToml {
fn from(legacy: LegacyManagedConfigToml) -> Self {
let mut config_requirements_toml = ConfigRequirementsToml::default();
let LegacyManagedConfigToml {
approval_policy,
sandbox_mode,
} = legacy;
if let Some(approval_policy) = approval_policy {
config_requirements_toml.allowed_approval_policies = Some(vec![approval_policy]);
}
if let Some(sandbox_mode) = sandbox_mode {
let required_mode = sandbox_mode.into();
let mut allowed_modes = vec![crate::SandboxModeRequirement::ReadOnly];
if required_mode != crate::SandboxModeRequirement::ReadOnly {
allowed_modes.push(required_mode);
}
config_requirements_toml.allowed_sandbox_modes = Some(allowed_modes);
}
config_requirements_toml
}
}

View File

@@ -1,6 +1,6 @@
use super::ConfigRequirementsToml;
use super::ConfigRequirementsWithSources;
use super::RequirementSource;
use crate::ConfigRequirementsToml;
use crate::ConfigRequirementsWithSources;
use crate::RequirementSource;
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use core_foundation::base::TCFType;
@@ -16,19 +16,19 @@ const MANAGED_PREFERENCES_CONFIG_KEY: &str = "config_toml_base64";
const MANAGED_PREFERENCES_REQUIREMENTS_KEY: &str = "requirements_toml_base64";
#[derive(Debug, Clone)]
pub(super) struct ManagedAdminConfigLayer {
pub struct ManagedAdminConfigLayer {
pub config: TomlValue,
pub raw_toml: String,
}
pub(super) fn managed_preferences_requirements_source() -> RequirementSource {
fn managed_preferences_requirements_source() -> RequirementSource {
RequirementSource::MdmManagedPreferences {
domain: MANAGED_PREFERENCES_APPLICATION_ID.to_string(),
key: MANAGED_PREFERENCES_REQUIREMENTS_KEY.to_string(),
}
}
pub(crate) async fn load_managed_admin_config_layer(
pub async fn load_managed_admin_config_layer(
override_base64: Option<&str>,
) -> io::Result<Option<ManagedAdminConfigLayer>> {
if let Some(encoded) = override_base64 {
@@ -61,7 +61,7 @@ fn load_managed_admin_config() -> io::Result<Option<ManagedAdminConfigLayer>> {
.transpose()
}
pub(crate) async fn load_managed_admin_requirements_toml(
pub async fn load_managed_admin_requirements_toml(
target: &mut ConfigRequirementsWithSources,
override_base64: Option<&str>,
) -> io::Result<()> {

View File

@@ -35,8 +35,6 @@ codex-async-utils = { workspace = true }
codex-client = { workspace = true }
codex-connectors = { workspace = true }
codex-config = { workspace = true }
codex-core-auth = { workspace = true }
codex-environment = { workspace = true }
codex-shell-command = { workspace = true }
codex-skills = { workspace = true }
codex-execpolicy = { workspace = true }
@@ -126,7 +124,6 @@ landlock = { workspace = true }
seccompiler = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation = "0.9"
keyring = { workspace = true, features = ["apple-native"] }
# Build OpenSSL from source for musl builds.
@@ -139,11 +136,6 @@ openssl-sys = { workspace = true, features = ["vendored"] }
[target.'cfg(target_os = "windows")'.dependencies]
keyring = { workspace = true, features = ["windows-native"] }
windows-sys = { version = "0.52", features = [
"Win32_Foundation",
"Win32_System_Com",
"Win32_UI_Shell",
] }
[target.'cfg(any(target_os = "freebsd", target_os = "openbsd"))'.dependencies]
keyring = { workspace = true, features = ["sync-secret-service"] }

View File

@@ -60,35 +60,6 @@ only when the split filesystem policy round-trips through the legacy
cases like `/repo = write`, `/repo/a = none`, `/repo/a/b = write`, where the
more specific writable child must reopen under a denied parent.
The Linux sandbox helper prefers `/usr/bin/bwrap` whenever it is available and
falls back to the vendored bubblewrap path otherwise. When `/usr/bin/bwrap` is
missing, Codex also surfaces a startup warning through its normal notification
path instead of printing directly from the sandbox helper.
### Windows
Legacy `SandboxPolicy` / `sandbox_mode` configs are still supported on
Windows.
The elevated setup/runner backend supports legacy `ReadOnlyAccess::Restricted`
for `read-only` and `workspace-write` policies. Restricted read access honors
explicit readable roots plus the command `cwd`, and keeps writable roots
readable when `workspace-write` is used.
When `include_platform_defaults = true`, the elevated Windows backend adds
backend-managed system read roots required for basic execution, such as
`C:\Windows`, `C:\Program Files`, `C:\Program Files (x86)`, and
`C:\ProgramData`. When it is `false`, those extra system roots are omitted.
The unelevated restricted-token backend still supports the legacy full-read
Windows model only. Restricted read-only policies continue to fail closed there
instead of running with weaker read enforcement.
New `[permissions]` / split filesystem policies remain supported on Windows
only when they round-trip through the legacy `SandboxPolicy` model without
changing semantics. Richer split-only carveouts still fail closed instead of
running with weaker enforcement.
### All Platforms
Expects the binary containing `codex-core` to simulate the virtual `apply_patch` CLI when `arg1` is `--codex-run-as-apply-patch`. See the `codex-arg0` crate for details.

View File

@@ -1,14 +0,0 @@
//! Auth storage backend for Codex CLI credentials.
//!
//! This crate provides the storage layer for auth.json (file, keyring, auto, ephemeral)
//! and the AuthDotJson / AuthCredentialsStoreMode types. The higher-level auth logic
//! (CodexAuth, AuthManager, token refresh) lives in codex-core.
pub mod storage;
pub use storage::AuthCredentialsStoreMode;
pub use storage::AuthDotJson;
pub use storage::AuthStorageBackend;
pub use storage::FileAuthStorage;
pub use storage::create_auth_storage;
pub use storage::get_auth_file;

View File

@@ -1549,42 +1549,6 @@
},
"type": "object"
},
"ToolSuggestConfig": {
"additionalProperties": false,
"properties": {
"discoverables": {
"default": [],
"items": {
"$ref": "#/definitions/ToolSuggestDiscoverable"
},
"type": "array"
}
},
"type": "object"
},
"ToolSuggestDiscoverable": {
"additionalProperties": false,
"properties": {
"id": {
"type": "string"
},
"type": {
"$ref": "#/definitions/ToolSuggestDiscoverableType"
}
},
"required": [
"id",
"type"
],
"type": "object"
},
"ToolSuggestDiscoverableType": {
"enum": [
"connector",
"plugin"
],
"type": "string"
},
"ToolsToml": {
"additionalProperties": false,
"properties": {
@@ -2467,14 +2431,6 @@
"minimum": 0.0,
"type": "integer"
},
"tool_suggest": {
"allOf": [
{
"$ref": "#/definitions/ToolSuggestConfig"
}
],
"description": "Additional discoverable tools that can be suggested for installation."
},
"tools": {
"allOf": [
{
@@ -2523,4 +2479,4 @@
},
"title": "ConfigToml",
"type": "object"
}
}

Some files were not shown because too many files have changed in this diff Show More