Compare commits

..

2 Commits

Author SHA1 Message Date
Shayne Sweeney
06a02e0263 Merge origin/main into dev/shayne/scope-mcp-startup
Co-authored-by: Codex <noreply@openai.com>
2026-05-07 17:25:53 -04:00
Shayne Sweeney
06fd1307aa fix(tui): scope MCP startup events to emitting thread
Sub-agent McpConnectionManager startup events leaked into the leader
TUI's status header because McpServerStatusUpdated notifications carried
no thread identity and were routed Global, which fell through to the
leader's chat_widget and overwrote its mcp_startup_status map. Once a
sub-agent re-emitted Starting for a server the leader already had as
Ready, the leader's spinner reopened and the settle check could never
pass.

Add an optional thread_id to McpServerStatusUpdatedNotification, set it
from the emitting session in apply_bespoke_event_handling, and route
notifications carrying a thread_id through ServerNotificationThreadTarget
so non-leader threads enqueue to their own buffered store rather than
mutating the leader's map. A missing thread_id keeps the existing Global
behaviour for backward compatibility with older app servers.

Adds:
- routing unit tests covering both presence and absence of thread_id
- an app-level test asserting a sub-agent McpServerStatusUpdated does
  not flip the leader's task-running state
- thread_id assertion in the existing app-server integration test

Closes #18068
Refs #16821, #19542

Co-authored-by: Codex <noreply@openai.com>
2026-05-02 11:35:26 -04:00
272 changed files with 3937 additions and 4899 deletions

View File

@@ -2,6 +2,7 @@ name: 💻 CLI Bug
description: Report an issue in the Codex CLI
labels:
- bug
- needs triage
body:
- type: markdown
attributes:
@@ -40,9 +41,9 @@ body:
id: terminal
attributes:
label: What terminal emulator and version are you using (if applicable)?
description: Also note any multiplexer in use (screen / tmux / zellij)
description: |
Also note any multiplexer in use (screen / tmux / zellij).
E.g., VS Code, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
E.g, VSCode, Terminal.app, iTerm2, Ghostty, Windows Terminal (WSL / PowerShell)
- type: textarea
id: actual
attributes:

View File

@@ -10,7 +10,7 @@ body:
Before you submit a feature:
1. Search existing issues for similar features. If you find one, 👍 it rather than opening a new one.
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex/blob/main/docs/contributing.md) for more details.
2. The Codex team will try to balance the varying needs of the community when prioritizing or rejecting new features. Not all features will be accepted. See [Contributing](https://github.com/openai/codex#contributing) for more details.
- type: input
id: variant

View File

@@ -1,6 +1,6 @@
name: 📗 Documentation Issue
description: Tell us if there is missing or incorrect documentation
labels: [documentation]
labels: [docs]
body:
- type: markdown
attributes:
@@ -24,4 +24,4 @@ body:
- type: textarea
attributes:
label: Where did you find it?
description: If possible, please provide the URL(s) where you found this issue.
description: If possible, please provide the URL(s) where you found this issue.

View File

@@ -50,7 +50,7 @@ runs:
- name: Restore bazel repository cache
id: cache_bazel_repository_restore
continue-on-error: true
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.setup_bazel.outputs.repository-cache-path }}
key: ${{ steps.cache_bazel_repository_key.outputs.repository-cache-key }}

View File

@@ -30,7 +30,7 @@ runs:
using: composite
steps:
- name: Azure login for Trusted Signing (OIDC)
uses: azure/login@a457da9ea143d694b1b9c7c869ebb04ebe844ef5 # v2.3.0
uses: azure/login@a457da9ea143d694b1b9c7c869ebb04ebe844ef5 # v2
with:
client-id: ${{ inputs.client-id }}
tenant-id: ${{ inputs.tenant-id }}
@@ -54,7 +54,7 @@ runs:
} >> "$GITHUB_OUTPUT"
- name: Sign Windows binaries with Azure Trusted Signing
uses: azure/trusted-signing-action@1d365fec12862c4aa68fcac418143d73f0cea293 # v0.5.11
uses: azure/trusted-signing-action@1d365fec12862c4aa68fcac418143d73f0cea293 # v0
with:
endpoint: ${{ inputs.endpoint }}
trusted-signing-account-name: ${{ inputs.account-name }}

View File

@@ -6,37 +6,25 @@ updates:
directory: .github/actions/codex
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: cargo
directories:
- codex-rs
- codex-rs/*
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: devcontainers
directory: /
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: docker
directory: codex-cli
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
cooldown:
default-days: 7
- package-ecosystem: rust-toolchain
directory: codex-rs
schedule:
interval: weekly
cooldown:
default-days: 7

View File

@@ -56,7 +56,7 @@ jobs:
name: Bazel test on ${{ matrix.os }} for ${{ matrix.target }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Check rusty_v8 MODULE.bazel checksums
if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu'
@@ -122,7 +122,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-test-${{ matrix.target }}
path: ${{ runner.temp }}/bazel-execution-logs
@@ -133,7 +133,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}
@@ -148,7 +148,7 @@ jobs:
name: Bazel test on windows-latest for x86_64-pc-windows-gnullvm (native main)
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Bazel CI
id: prepare_bazel
@@ -195,7 +195,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-test-windows-native-x86_64-pc-windows-gnullvm
path: ${{ runner.temp }}/bazel-execution-logs
@@ -206,7 +206,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}
@@ -231,7 +231,7 @@ jobs:
name: Bazel clippy on ${{ matrix.os }} for ${{ matrix.target }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Bazel CI
id: prepare_bazel
@@ -286,7 +286,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-clippy-${{ matrix.target }}
path: ${{ runner.temp }}/bazel-execution-logs
@@ -297,7 +297,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}
@@ -318,7 +318,7 @@ jobs:
name: Verify release build on ${{ matrix.os }} for ${{ matrix.target }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Bazel CI
id: prepare_bazel
@@ -390,7 +390,7 @@ jobs:
- name: Upload Bazel execution logs
if: always() && !cancelled()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: bazel-execution-logs-verify-release-build-${{ matrix.target }}
path: ${{ runner.temp }}/bazel-execution-logs
@@ -401,7 +401,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.prepare_bazel.outputs.repository-cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ steps.prepare_bazel.outputs.repository-cache-path }}
key: ${{ steps.prepare_bazel.outputs.repository-cache-key }}

View File

@@ -8,7 +8,7 @@ jobs:
name: Blob size policy
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0

View File

@@ -14,7 +14,7 @@ jobs:
working-directory: ./codex-rs
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0

View File

@@ -12,7 +12,7 @@ jobs:
NODE_OPTIONS: --max-old-space-size=4096
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Verify codex-rs Cargo manifests inherit workspace settings
run: python3 .github/scripts/verify_cargo_workspace_manifests.py
@@ -29,7 +29,7 @@ jobs:
run_install: false
- name: Setup Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 22
@@ -63,7 +63,7 @@ jobs:
echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT"
- name: Upload staged npm package artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: codex-npm-staging
path: ${{ steps.stage_npm_package.outputs.pack_output }}

View File

@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Close inactive PRs from contributors
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@@ -18,9 +18,9 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Annotate locations with typos
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1.1.0
uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1
- name: Codespell
uses: codespell-project/actions-codespell@8f01853be192eb0f849a5c7d721450e7a467c579 # v2.2
with:

View File

@@ -19,7 +19,7 @@ jobs:
reason: ${{ steps.normalize-all.outputs.reason }}
has_matches: ${{ steps.normalize-all.outputs.has_matches }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Codex inputs
env:
@@ -155,7 +155,7 @@ jobs:
reason: ${{ steps.normalize-open.outputs.reason }}
has_matches: ${{ steps.normalize-open.outputs.has_matches }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Prepare Codex inputs
env:
@@ -342,7 +342,7 @@ jobs:
issues: write
steps:
- name: Comment on issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
CODEX_OUTPUT: ${{ needs.select-final.outputs.codex_output }}
with:

View File

@@ -17,7 +17,7 @@ jobs:
outputs:
codex_output: ${{ steps.codex.outputs.final-message }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- id: codex
uses: openai/codex-action@5c3f4ccdb2b8790f73d6b21751ac00e602aa0c02 # v1.7

View File

@@ -17,7 +17,7 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
components: rustfmt
@@ -31,9 +31,9 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-shear
version: 1.11.2
@@ -47,14 +47,14 @@ jobs:
CARGO_DYLINT_VERSION: 5.0.0
DYLINT_LINK_VERSION: 5.0.0
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
toolchain: nightly-2025-09-18
components: llvm-tools-preview, rustc-dev, rust-src
- name: Cache cargo-dylint tooling
id: cargo_dylint_cache
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/cargo-dylint
@@ -97,7 +97,7 @@ jobs:
group: codex-runners
labels: codex-windows-x64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: ./.github/actions/setup-bazel-ci
with:
target: ${{ runner.os }}
@@ -233,7 +233,7 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Linux build dependencies
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -276,7 +276,7 @@ jobs:
# avoid caching the large target dir on the gnu-dev job.
- name: Restore cargo home cache
id: cache_cargo_home_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -294,7 +294,7 @@ jobs:
# Install and restore sccache cache
- name: Install sccache
if: ${{ env.USE_SCCACHE == 'true' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: sccache
version: 0.7.5
@@ -321,7 +321,7 @@ jobs:
- name: Restore sccache cache (fallback)
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
id: cache_sccache_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
@@ -348,7 +348,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Restore APT cache (musl)
id: cache_apt_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
/var/cache/apt
@@ -356,7 +356,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install Zig
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2
with:
version: 0.14.0
@@ -430,7 +430,7 @@ jobs:
- name: Install cargo-chef
if: ${{ matrix.profile == 'release' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-chef
version: 0.1.71
@@ -449,7 +449,7 @@ jobs:
- name: Upload Cargo timings (clippy)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-ci-clippy-${{ matrix.target }}-${{ matrix.profile }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -460,7 +460,7 @@ jobs:
- name: Save cargo home cache
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -476,7 +476,7 @@ jobs:
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
@@ -501,7 +501,7 @@ jobs:
- name: Save APT cache (musl)
if: always() && !cancelled() && (matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl') && steps.cache_apt_restore.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
/var/cache/apt
@@ -559,7 +559,7 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Linux build dependencies
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -590,7 +590,7 @@ jobs:
- name: Restore cargo home cache
id: cache_cargo_home_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -603,7 +603,7 @@ jobs:
- name: Install sccache
if: ${{ env.USE_SCCACHE == 'true' }}
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: sccache
version: 0.7.5
@@ -630,7 +630,7 @@ jobs:
- name: Restore sccache cache (fallback)
if: ${{ env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true' }}
id: cache_sccache_restore
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}
@@ -638,7 +638,7 @@ jobs:
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-
sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: nextest
version: 0.9.103
@@ -674,7 +674,7 @@ jobs:
- name: Upload Cargo timings (nextest)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-ci-nextest-${{ matrix.target }}-${{ matrix.profile }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -683,7 +683,7 @@ jobs:
- name: Save cargo home cache
if: always() && !cancelled() && steps.cache_cargo_home_restore.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/
@@ -695,7 +695,7 @@ jobs:
- name: Save sccache cache (fallback)
if: always() && !cancelled() && env.USE_SCCACHE == 'true' && env.SCCACHE_GHA_ENABLED != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: ${{ github.workspace }}/.sccache/
key: sccache-${{ matrix.runner }}-${{ matrix.target }}-${{ matrix.profile }}-${{ steps.lockhash.outputs.hash }}-${{ github.run_id }}

View File

@@ -14,7 +14,7 @@ jobs:
codex: ${{ steps.detect.outputs.codex }}
workflows: ${{ steps.detect.outputs.workflows }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
- name: Detect changed paths (no external action)
@@ -61,7 +61,7 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
components: rustfmt
@@ -77,9 +77,9 @@ jobs:
run:
working-directory: codex-rs
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
with:
tool: cargo-shear
version: 1.11.2
@@ -95,7 +95,7 @@ jobs:
CARGO_DYLINT_VERSION: 5.0.0
DYLINT_LINK_VERSION: 5.0.0
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- name: Install nightly argument-comment-lint toolchain
shell: bash
@@ -109,7 +109,7 @@ jobs:
rustup default nightly-2025-09-18
- name: Cache cargo-dylint tooling
id: cargo_dylint_cache
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cargo/bin/cargo-dylint
@@ -170,7 +170,7 @@ jobs:
echo "No argument-comment-lint relevant changes."
echo "run=false" >> "$GITHUB_OUTPUT"
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}
- name: Run argument comment lint on codex-rs via Bazel
if: ${{ steps.argument_comment_lint_gate.outputs.run == 'true' }}

View File

@@ -56,7 +56,7 @@ jobs:
labels: codex-windows-x64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
with:
@@ -100,7 +100,7 @@ jobs:
(cd "${RUNNER_TEMP}" && tar -czf "$GITHUB_WORKSPACE/$archive_path" argument-comment-lint)
fi
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: argument-comment-lint-${{ matrix.target }}
path: dist/argument-comment-lint/${{ matrix.target }}/*

View File

@@ -18,7 +18,7 @@ jobs:
if: github.repository == 'openai/codex'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: main
fetch-depth: 0
@@ -43,7 +43,7 @@ jobs:
curl --http1.1 --fail --show-error --location "${headers[@]}" "${url}" | jq '.' > codex-rs/models-manager/models.json
- name: Open pull request (if changed)
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8
with:
commit-message: "Update models.json"
title: "Update models.json"

View File

@@ -83,7 +83,7 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Print runner specs (Windows)
shell: powershell
run: |
@@ -112,7 +112,7 @@ jobs:
cargo build --target ${{ matrix.target }} --release --timings "${build_args[@]}"
- name: Upload Cargo timings
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-release-windows-${{ matrix.target }}-${{ matrix.bundle }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -128,7 +128,7 @@ jobs:
done
- name: Upload Windows binaries
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: windows-binaries-${{ matrix.target }}-${{ matrix.bundle }}
path: |
@@ -165,22 +165,22 @@ jobs:
labels: codex-windows-arm64
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Download prebuilt Windows primary binaries
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: windows-binaries-${{ matrix.target }}-primary
path: codex-rs/target/${{ matrix.target }}/release
- name: Download prebuilt Windows helper binaries
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: windows-binaries-${{ matrix.target }}-helpers
path: codex-rs/target/${{ matrix.target }}/release
- name: Download prebuilt Windows app-server binary
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: windows-binaries-${{ matrix.target }}-app-server
path: codex-rs/target/${{ matrix.target }}/release
@@ -281,7 +281,7 @@ jobs:
"${GITHUB_WORKSPACE}/.github/workflows/zstd" -T0 -19 "$dest/$base"
done
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: ${{ matrix.target }}
path: |

View File

@@ -45,7 +45,7 @@ jobs:
git \
libncursesw5-dev
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build, smoke-test, and stage zsh artifact
shell: bash
@@ -53,7 +53,7 @@ jobs:
"${GITHUB_WORKSPACE}/.github/scripts/build-zsh-release-artifact.sh" \
"dist/zsh/${{ matrix.target }}/${{ matrix.archive_name }}"
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: codex-zsh-${{ matrix.target }}
path: dist/zsh/${{ matrix.target }}/*
@@ -81,7 +81,7 @@ jobs:
brew install autoconf
fi
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build, smoke-test, and stage zsh artifact
shell: bash
@@ -89,7 +89,7 @@ jobs:
"${GITHUB_WORKSPACE}/.github/scripts/build-zsh-release-artifact.sh" \
"dist/zsh/${{ matrix.target }}/${{ matrix.archive_name }}"
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: codex-zsh-${{ matrix.target }}
path: dist/zsh/${{ matrix.target }}/*

View File

@@ -19,7 +19,7 @@ jobs:
tag-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- uses: dtolnay/rust-toolchain@a0b273b48ed29de4470960879e8381ff45632f26 # 1.93.0
- name: Validate tag matches Cargo.toml version
shell: bash
@@ -118,7 +118,7 @@ jobs:
build_dmg: "false"
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Print runner specs (Linux)
if: ${{ runner.os == 'Linux' }}
shell: bash
@@ -181,7 +181,7 @@ jobs:
- if: ${{ matrix.target == 'x86_64-unknown-linux-musl' || matrix.target == 'aarch64-unknown-linux-musl'}}
name: Install Zig
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2
with:
version: 0.14.0
@@ -284,7 +284,7 @@ jobs:
cargo build --target ${{ matrix.target }} --release --timings "${build_args[@]}"
- name: Upload Cargo timings
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: cargo-timings-rust-release-${{ matrix.target }}-${{ matrix.bundle }}
path: codex-rs/target/**/cargo-timings/cargo-timing.html
@@ -430,7 +430,7 @@ jobs:
zstd -T0 -19 --rm "$dest/$base"
done
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: ${{ matrix.artifact_name }}
# Upload the per-binary .zst files, .tar.gz equivalents, and any
@@ -476,7 +476,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Generate release notes from tag commit message
id: release_notes
@@ -498,7 +498,7 @@ jobs:
echo "path=${notes_path}" >> "${GITHUB_OUTPUT}"
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
path: dist
@@ -553,7 +553,7 @@ jobs:
run_install: false
- name: Setup Node.js for npm packaging
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 22
@@ -579,7 +579,7 @@ jobs:
cp scripts/install/install.ps1 dist/install.ps1
- name: Create GitHub Release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2
with:
name: ${{ steps.release_name.outputs.name }}
tag_name: ${{ github.ref_name }}
@@ -638,7 +638,7 @@ jobs:
steps:
- name: Setup Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
# Node 24 bundles npm >= 11.5.1, which trusted publishing requires.
node-version: 24

View File

@@ -17,10 +17,10 @@ jobs:
v8_version: ${{ steps.v8_version.outputs.version }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -69,7 +69,7 @@ jobs:
target: aarch64-unknown-linux-musl
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Bazel
uses: ./.github/actions/setup-bazel-ci
@@ -77,7 +77,7 @@ jobs:
target: ${{ matrix.target }}
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -133,7 +133,7 @@ jobs:
--output-dir "dist/${TARGET}"
- name: Upload staged musl artifacts
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: rusty-v8-${{ needs.metadata.outputs.v8_version }}-${{ matrix.target }}
path: dist/${{ matrix.target }}/*
@@ -161,12 +161,12 @@ jobs:
exit 1
fi
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
path: dist
- name: Create GitHub Release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2
with:
tag_name: ${{ needs.metadata.outputs.release_tag }}
name: ${{ needs.metadata.outputs.release_tag }}

View File

@@ -13,7 +13,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install Linux bwrap build dependencies
shell: bash
@@ -28,7 +28,7 @@ jobs:
run_install: false
- name: Setup Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: 22
cache: pnpm
@@ -115,7 +115,7 @@ jobs:
- name: Save bazel repository cache
if: always() && !cancelled() && steps.setup_bazel.outputs.cache-hit != 'true'
continue-on-error: true
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5
with:
path: |
~/.cache/bazel-repo-cache

View File

@@ -40,10 +40,10 @@ jobs:
v8_version: ${{ steps.v8_version.outputs.version }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -74,7 +74,7 @@ jobs:
target: aarch64-unknown-linux-musl
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Bazel
uses: ./.github/actions/setup-bazel-ci
@@ -82,7 +82,7 @@ jobs:
target: ${{ matrix.target }}
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: "3.12"
@@ -132,7 +132,7 @@ jobs:
--output-dir "dist/${TARGET}"
- name: Upload staged musl artifacts
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: v8-canary-${{ needs.metadata.outputs.v8_version }}-${{ matrix.target }}
path: dist/${{ matrix.target }}/*

View File

@@ -130,7 +130,7 @@ When UI or text output changes intentionally, update the snapshots as follows:
If you dont have the tool:
- `cargo install --locked cargo-insta`
- `cargo install cargo-insta`
### Test assertions

11
MODULE.bazel.lock generated

File diff suppressed because one or more lines are too long

152
codex-rs/Cargo.lock generated
View File

@@ -757,7 +757,6 @@ checksum = "96571e6996817bf3d58f6b569e4b9fd2e9d2fcf9f7424eed07b2ce9bb87535e5"
dependencies = [
"aws-credential-types",
"aws-runtime",
"aws-sdk-signin",
"aws-sdk-sso",
"aws-sdk-ssooidc",
"aws-sdk-sts",
@@ -768,20 +767,15 @@ dependencies = [
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
"base64-simd",
"bytes",
"fastrand",
"hex",
"http 1.4.0",
"p256",
"rand 0.8.5",
"ring",
"sha2",
"time",
"tokio",
"tracing",
"url",
"uuid",
"zeroize",
]
@@ -844,28 +838,6 @@ dependencies = [
"uuid",
]
[[package]]
name = "aws-sdk-signin"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c084bd63941916e1348cb8d9e05ac2e49bdd40a380e9167702683184c6c6be53"
dependencies = [
"aws-credential-types",
"aws-runtime",
"aws-smithy-async",
"aws-smithy-http",
"aws-smithy-json",
"aws-smithy-runtime",
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-types",
"bytes",
"fastrand",
"http 0.2.12",
"regex-lite",
"tracing",
]
[[package]]
name = "aws-sdk-sso"
version = "1.91.0"
@@ -1208,12 +1180,6 @@ dependencies = [
"windows-link",
]
[[package]]
name = "base16ct"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
[[package]]
name = "base64"
version = "0.21.7"
@@ -2740,7 +2706,6 @@ dependencies = [
"tokio",
"tokio-tungstenite",
"tokio-util",
"toml 0.9.11+spec-1.1.0",
"tracing",
"uuid",
"wiremock",
@@ -3638,11 +3603,16 @@ dependencies = [
"codex-rollout",
"codex-state",
"pretty_assertions",
"prost 0.14.3",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"tokio",
"tokio-stream",
"tonic",
"tonic-prost",
"tonic-prost-build",
"tracing",
"uuid",
]
@@ -4449,18 +4419,6 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-bigint"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"subtle",
"zeroize",
]
[[package]]
name = "crypto-common"
version = "0.1.7"
@@ -5174,20 +5132,6 @@ version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
[[package]]
name = "ecdsa"
version = "0.16.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
dependencies = [
"der",
"digest",
"elliptic-curve",
"rfc6979",
"signature",
"spki",
]
[[package]]
name = "ed25519"
version = "2.2.3"
@@ -5221,26 +5165,6 @@ dependencies = [
"serde",
]
[[package]]
name = "elliptic-curve"
version = "0.13.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
dependencies = [
"base16ct",
"crypto-bigint",
"digest",
"ff",
"generic-array",
"group",
"pem-rfc7468",
"pkcs8",
"rand_core 0.6.4",
"sec1",
"subtle",
"zeroize",
]
[[package]]
name = "ena"
version = "0.14.3"
@@ -5494,16 +5418,6 @@ dependencies = [
"simd-adler32",
]
[[package]]
name = "ff"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393"
dependencies = [
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "fiat-crypto"
version = "0.2.9"
@@ -6898,17 +6812,6 @@ dependencies = [
"system-deps",
]
[[package]]
name = "group"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
dependencies = [
"ff",
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "gzip-header"
version = "1.0.0"
@@ -9408,18 +9311,6 @@ dependencies = [
"supports-color 3.0.2",
]
[[package]]
name = "p256"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b"
dependencies = [
"ecdsa",
"elliptic-curve",
"primeorder",
"sha2",
]
[[package]]
name = "parking"
version = "2.2.1"
@@ -9849,15 +9740,6 @@ dependencies = [
"syn 2.0.114",
]
[[package]]
name = "primeorder"
version = "0.13.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6"
dependencies = [
"elliptic-curve",
]
[[package]]
name = "proc-macro-crate"
version = "3.4.0"
@@ -10808,16 +10690,6 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7"
[[package]]
name = "rfc6979"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2"
dependencies = [
"hmac",
"subtle",
]
[[package]]
name = "ring"
version = "0.17.14"
@@ -11277,20 +11149,6 @@ version = "3.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "490dcfcbfef26be6800d11870ff2df8774fa6e86d047e3e8c8a76b25655e41ca"
[[package]]
name = "sec1"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
dependencies = [
"base16ct",
"der",
"generic-array",
"pkcs8",
"subtle",
"zeroize",
]
[[package]]
name = "seccompiler"
version = "0.5.0"

View File

@@ -475,13 +475,13 @@ ignored = [
[profile.dev]
# Keep line tables/backtraces while avoiding expensive full variable debug info
# across local dev builds.
debug = "limited"
debug = 1
[profile.dev-small]
inherits = "dev"
opt-level = 0
debug = "none"
strip = "symbols"
debug = 0
strip = true
[profile.release]
lto = "fat"
@@ -493,15 +493,8 @@ strip = "symbols"
# See https://github.com/openai/codex/issues/1411 for details.
codegen-units = 1
[profile.profiling]
inherits = "release"
debug = "full"
lto = false
strip = false
[profile.ci-test]
# Reduce binary size to reduce disk pressure.
debug = "limited"
debug = 1 # Reduce debug symbol size
inherits = "test"
opt-level = 0

View File

@@ -7,7 +7,6 @@ version.workspace = true
[lib]
name = "codex_agent_graph_store"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -340,9 +340,8 @@ impl AnalyticsEventsClient {
});
}
pub fn track_server_response(&self, completed_at_ms: u64, response: ServerResponse) {
pub fn track_server_response(&self, response: ServerResponse) {
self.record_fact(AnalyticsFact::ServerResponse {
completed_at_ms,
response: Box::new(response),
});
}

View File

@@ -18,7 +18,6 @@ use crate::facts::TurnStatus;
use crate::facts::TurnSteerRejectionReason;
use crate::facts::TurnSteerResult;
use crate::facts::TurnSubmissionType;
use crate::now_unix_millis;
use crate::now_unix_seconds;
use codex_app_server_protocol::CodexErrorInfo;
use codex_app_server_protocol::CommandExecutionSource;
@@ -262,7 +261,7 @@ pub struct GuardianReviewTrackContext {
approval_request_source: GuardianApprovalRequestSource,
reviewed_action: GuardianReviewedAction,
review_timeout_ms: u64,
pub started_at_ms: u64,
started_at: u64,
started_instant: Instant,
}
@@ -284,7 +283,7 @@ impl GuardianReviewTrackContext {
approval_request_source,
reviewed_action,
review_timeout_ms,
started_at_ms: now_unix_millis(),
started_at: now_unix_seconds(),
started_instant: Instant::now(),
}
}
@@ -317,7 +316,7 @@ impl GuardianReviewTrackContext {
tool_call_count: None,
time_to_first_token_ms: result.time_to_first_token_ms,
completion_latency_ms: Some(self.started_instant.elapsed().as_millis() as u64),
started_at: self.started_at_ms / 1_000,
started_at: self.started_at,
completed_at: Some(now_unix_seconds()),
input_tokens: result.token_usage.as_ref().map(|usage| usage.input_tokens),
cached_input_tokens: result

View File

@@ -296,7 +296,6 @@ pub(crate) enum AnalyticsFact {
request: Box<ServerRequest>,
},
ServerResponse {
completed_at_ms: u64,
response: Box<ServerResponse>,
},
Notification(Box<ServerNotification>),

View File

@@ -325,7 +325,6 @@ impl AnalyticsReducer {
} => {}
AnalyticsFact::ServerResponse {
response: _response,
..
} => {}
AnalyticsFact::Custom(input) => match input {
CustomAnalyticsFact::SubAgentThreadStarted(input) => {

View File

@@ -7,8 +7,6 @@ license.workspace = true
[lib]
name = "codex_ansi_escape"
path = "src/lib.rs"
test = false
doctest = false
[lints]
workspace = true

View File

@@ -7,7 +7,6 @@ license.workspace = true
[lib]
name = "codex_app_server_client"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -7,7 +7,6 @@ license.workspace = true
[lib]
name = "codex_app_server_protocol"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -2091,18 +2091,8 @@
],
"type": "object"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareUpdateTargetsParams": {
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -2114,7 +2104,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],
@@ -3223,6 +3212,24 @@
],
"type": "object"
},
"SkillsListExtraRootsForCwd": {
"properties": {
"cwd": {
"type": "string"
},
"extraUserRoots": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"cwd",
"extraUserRoots"
],
"type": "object"
},
"SkillsListParams": {
"properties": {
"cwds": {
@@ -3235,6 +3242,17 @@
"forceReload": {
"description": "When true, bypass the skills cache and re-scan skills from disk.",
"type": "boolean"
},
"perCwdExtraUserRoots": {
"default": null,
"description": "Optional per-cwd extra roots to scan as user-scoped skills.",
"items": {
"$ref": "#/definitions/SkillsListExtraRootsForCwd"
},
"type": [
"array",
"null"
]
}
},
"type": "object"
@@ -4078,31 +4096,6 @@
],
"type": "object"
},
"TurnItemsView": {
"oneOf": [
{
"description": "`items` was not loaded for this turn. The field is intentionally empty.",
"enum": [
"notLoaded"
],
"type": "string"
},
{
"description": "`items` contains only a display summary for this turn.",
"enum": [
"summary"
],
"type": "string"
},
{
"description": "`items` contains every ThreadItem available from persisted app-server history for this turn.",
"enum": [
"full"
],
"type": "string"
}
]
},
"TurnStartParams": {
"properties": {
"approvalPolicy": {

View File

@@ -593,11 +593,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -607,7 +602,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -18,11 +18,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -32,7 +27,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -297,11 +297,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -313,7 +308,6 @@
"cwd",
"itemId",
"permissions",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -1963,11 +1963,6 @@
"action": {
"$ref": "#/definitions/GuardianApprovalReviewAction"
},
"completedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review completed.",
"format": "int64",
"type": "integer"
},
"decisionSource": {
"$ref": "#/definitions/AutoReviewDecisionSource"
},
@@ -1978,11 +1973,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -1999,11 +1989,9 @@
},
"required": [
"action",
"completedAtMs",
"decisionSource",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -2022,11 +2010,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -2045,7 +2028,6 @@
"action",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -2119,6 +2101,14 @@
},
"status": {
"$ref": "#/definitions/McpServerStartupState"
},
"threadId": {
"default": null,
"description": "Identifier of the thread whose `McpConnectionManager` produced this update. Always set on notifications emitted by current servers; remains `Option` so legacy clients that omit the field still deserialise and route as a global event.",
"type": [
"string",
"null"
]
}
},
"required": [

View File

@@ -417,11 +417,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -431,7 +426,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -604,11 +598,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -618,7 +607,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -1599,11 +1587,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -1615,7 +1598,6 @@
"cwd",
"itemId",
"permissions",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -2146,11 +2146,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -2160,7 +2155,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -2417,11 +2411,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -2431,7 +2420,6 @@
},
"required": [
"itemId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -3603,11 +3591,6 @@
"null"
]
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this approval request started.",
"format": "int64",
"type": "integer"
},
"threadId": {
"type": "string"
},
@@ -3619,7 +3602,6 @@
"cwd",
"itemId",
"permissions",
"startedAtMs",
"threadId",
"turnId"
],
@@ -9896,11 +9878,6 @@
"action": {
"$ref": "#/definitions/v2/GuardianApprovalReviewAction"
},
"completedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review completed.",
"format": "int64",
"type": "integer"
},
"decisionSource": {
"$ref": "#/definitions/v2/AutoReviewDecisionSource"
},
@@ -9911,11 +9888,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -9932,11 +9904,9 @@
},
"required": [
"action",
"completedAtMs",
"decisionSource",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -9957,11 +9927,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -9980,7 +9945,6 @@
"action",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -10767,6 +10731,14 @@
},
"status": {
"$ref": "#/definitions/v2/McpServerStartupState"
},
"threadId": {
"default": null,
"description": "Identifier of the thread whose `McpConnectionManager` produced this update. Always set on notifications emitted by current servers; remains `Option` so legacy clients that omit the field still deserialise and route as a global event.",
"type": [
"string",
"null"
]
}
},
"required": [
@@ -12414,19 +12386,9 @@
],
"type": "object"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareUpdateTargetsParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/v2/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -12438,7 +12400,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],
@@ -12448,9 +12409,6 @@
"PluginShareUpdateTargetsResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/v2/PluginShareDiscoverability"
},
"principals": {
"items": {
"$ref": "#/definitions/v2/PluginSharePrincipal"
@@ -12459,7 +12417,6 @@
}
},
"required": [
"discoverability",
"principals"
],
"title": "PluginShareUpdateTargetsResponse",
@@ -14781,6 +14738,24 @@
],
"type": "object"
},
"SkillsListExtraRootsForCwd": {
"properties": {
"cwd": {
"type": "string"
},
"extraUserRoots": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"cwd",
"extraUserRoots"
],
"type": "object"
},
"SkillsListParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
@@ -14794,6 +14769,17 @@
"forceReload": {
"description": "When true, bypass the skills cache and re-scan skills from disk.",
"type": "boolean"
},
"perCwdExtraUserRoots": {
"default": null,
"description": "Optional per-cwd extra roots to scan as user-scoped skills.",
"items": {
"$ref": "#/definitions/v2/SkillsListExtraRootsForCwd"
},
"type": [
"array",
"null"
]
}
},
"title": "SkillsListParams",

View File

@@ -6489,11 +6489,6 @@
"action": {
"$ref": "#/definitions/GuardianApprovalReviewAction"
},
"completedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review completed.",
"format": "int64",
"type": "integer"
},
"decisionSource": {
"$ref": "#/definitions/AutoReviewDecisionSource"
},
@@ -6504,11 +6499,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -6525,11 +6515,9 @@
},
"required": [
"action",
"completedAtMs",
"decisionSource",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -6550,11 +6538,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -6573,7 +6556,6 @@
"action",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],
@@ -7360,6 +7342,14 @@
},
"status": {
"$ref": "#/definitions/McpServerStartupState"
},
"threadId": {
"default": null,
"description": "Identifier of the thread whose `McpConnectionManager` produced this update. Always set on notifications emitted by current servers; remains `Option` so legacy clients that omit the field still deserialise and route as a global event.",
"type": [
"string",
"null"
]
}
},
"required": [
@@ -9007,19 +8997,9 @@
],
"type": "object"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginShareUpdateTargetsParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -9031,7 +9011,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],
@@ -9041,9 +9020,6 @@
"PluginShareUpdateTargetsResponse": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareDiscoverability"
},
"principals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
@@ -9052,7 +9028,6 @@
}
},
"required": [
"discoverability",
"principals"
],
"title": "PluginShareUpdateTargetsResponse",
@@ -12649,6 +12624,24 @@
],
"type": "object"
},
"SkillsListExtraRootsForCwd": {
"properties": {
"cwd": {
"type": "string"
},
"extraUserRoots": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"cwd",
"extraUserRoots"
],
"type": "object"
},
"SkillsListParams": {
"$schema": "http://json-schema.org/draft-07/schema#",
"properties": {
@@ -12662,6 +12655,17 @@
"forceReload": {
"description": "When true, bypass the skills cache and re-scan skills from disk.",
"type": "boolean"
},
"perCwdExtraUserRoots": {
"default": null,
"description": "Optional per-cwd extra roots to scan as user-scoped skills.",
"items": {
"$ref": "#/definitions/SkillsListExtraRootsForCwd"
},
"type": [
"array",
"null"
]
}
},
"title": "SkillsListParams",

View File

@@ -574,11 +574,6 @@
"action": {
"$ref": "#/definitions/GuardianApprovalReviewAction"
},
"completedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review completed.",
"format": "int64",
"type": "integer"
},
"decisionSource": {
"$ref": "#/definitions/AutoReviewDecisionSource"
},
@@ -589,11 +584,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -610,11 +600,9 @@
},
"required": [
"action",
"completedAtMs",
"decisionSource",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -574,11 +574,6 @@
"description": "Stable identifier for this review.",
"type": "string"
},
"startedAtMs": {
"description": "Unix timestamp (in milliseconds) when this review started.",
"format": "int64",
"type": "integer"
},
"targetItemId": {
"description": "Identifier for the reviewed item or tool call when one exists.\n\nIn most cases, one review maps to one target item. The exceptions are - execve reviews, where a single command may contain multiple execve calls to review (only possible when using the shell_zsh_fork feature) - network policy reviews, where there is no target item\n\nA network call is triggered by a CommandExecution item, so having a target_item_id set to the CommandExecution item would be misleading because the review is about the network call, not the command execution. Therefore, target_item_id is set to None for network policy reviews.",
"type": [
@@ -597,7 +592,6 @@
"action",
"review",
"reviewId",
"startedAtMs",
"threadId",
"turnId"
],

View File

@@ -23,6 +23,14 @@
},
"status": {
"$ref": "#/definitions/McpServerStartupState"
},
"threadId": {
"default": null,
"description": "Identifier of the thread whose `McpConnectionManager` produced this update. Always set on notifications emitted by current servers; remains `Option` so legacy clients that omit the field still deserialise and route as a global event.",
"type": [
"string",
"null"
]
}
},
"required": [

View File

@@ -23,19 +23,9 @@
"principalType"
],
"type": "object"
},
"PluginShareUpdateDiscoverability": {
"enum": [
"UNLISTED",
"PRIVATE"
],
"type": "string"
}
},
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareUpdateDiscoverability"
},
"remotePluginId": {
"type": "string"
},
@@ -47,7 +37,6 @@
}
},
"required": [
"discoverability",
"remotePluginId",
"shareTargets"
],

View File

@@ -1,14 +1,6 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"PluginShareDiscoverability": {
"enum": [
"LISTED",
"UNLISTED",
"PRIVATE"
],
"type": "string"
},
"PluginSharePrincipal": {
"properties": {
"name": {
@@ -38,9 +30,6 @@
}
},
"properties": {
"discoverability": {
"$ref": "#/definitions/PluginShareDiscoverability"
},
"principals": {
"items": {
"$ref": "#/definitions/PluginSharePrincipal"
@@ -49,7 +38,6 @@
}
},
"required": [
"discoverability",
"principals"
],
"title": "PluginShareUpdateTargetsResponse",

View File

@@ -1,5 +1,25 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"definitions": {
"SkillsListExtraRootsForCwd": {
"properties": {
"cwd": {
"type": "string"
},
"extraUserRoots": {
"items": {
"type": "string"
},
"type": "array"
}
},
"required": [
"cwd",
"extraUserRoots"
],
"type": "object"
}
},
"properties": {
"cwds": {
"description": "When empty, defaults to the current session working directory.",
@@ -11,6 +31,17 @@
"forceReload": {
"description": "When true, bypass the skills cache and re-scan skills from disk.",
"type": "boolean"
},
"perCwdExtraUserRoots": {
"default": null,
"description": "Optional per-cwd extra roots to scan as user-scoped skills.",
"items": {
"$ref": "#/definitions/SkillsListExtraRootsForCwd"
},
"type": [
"array",
"null"
]
}
},
"title": "SkillsListParams",

View File

@@ -8,9 +8,6 @@ import type { NetworkApprovalContext } from "./NetworkApprovalContext";
import type { NetworkPolicyAmendment } from "./NetworkPolicyAmendment";
export type CommandExecutionRequestApprovalParams = {threadId: string, turnId: string, itemId: string, /**
* Unix timestamp (in milliseconds) when this approval request started.
*/
startedAtMs: number, /**
* Unique identifier for this specific approval callback.
*
* For regular shell/unified_exec approvals, this is null.

View File

@@ -3,10 +3,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type FileChangeRequestApprovalParams = { threadId: string, turnId: string, itemId: string,
/**
* Unix timestamp (in milliseconds) when this approval request started.
*/
startedAtMs: number,
/**
* Optional explanatory reason (e.g. request for extra write access).
*/

View File

@@ -10,14 +10,6 @@ import type { GuardianApprovalReviewAction } from "./GuardianApprovalReviewActio
* shape is expected to change soon.
*/
export type ItemGuardianApprovalReviewCompletedNotification = { threadId: string, turnId: string,
/**
* Unix timestamp (in milliseconds) when this review started.
*/
startedAtMs: number,
/**
* Unix timestamp (in milliseconds) when this review completed.
*/
completedAtMs: number,
/**
* Stable identifier for this review.
*/

View File

@@ -9,10 +9,6 @@ import type { GuardianApprovalReviewAction } from "./GuardianApprovalReviewActio
* shape is expected to change soon.
*/
export type ItemGuardianApprovalReviewStartedNotification = { threadId: string, turnId: string,
/**
* Unix timestamp (in milliseconds) when this review started.
*/
startedAtMs: number,
/**
* Stable identifier for this review.
*/

View File

@@ -3,4 +3,11 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { McpServerStartupState } from "./McpServerStartupState";
export type McpServerStatusUpdatedNotification = { name: string, status: McpServerStartupState, error: string | null, };
export type McpServerStatusUpdatedNotification = {
/**
* Identifier of the thread whose `McpConnectionManager` produced this
* update. Always set on notifications emitted by current servers;
* remains `Option` so legacy clients that omit the field still
* deserialise and route as a global event.
*/
threadId: string | null, name: string, status: McpServerStartupState, error: string | null, };

View File

@@ -4,8 +4,4 @@
import type { AbsolutePathBuf } from "../AbsolutePathBuf";
import type { RequestPermissionProfile } from "./RequestPermissionProfile";
export type PermissionsRequestApprovalParams = { threadId: string, turnId: string, itemId: string,
/**
* Unix timestamp (in milliseconds) when this approval request started.
*/
startedAtMs: number, cwd: AbsolutePathBuf, reason: string | null, permissions: RequestPermissionProfile, };
export type PermissionsRequestApprovalParams = { threadId: string, turnId: string, itemId: string, cwd: AbsolutePathBuf, reason: string | null, permissions: RequestPermissionProfile, };

View File

@@ -2,6 +2,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginShareTarget } from "./PluginShareTarget";
import type { PluginShareUpdateDiscoverability } from "./PluginShareUpdateDiscoverability";
export type PluginShareUpdateTargetsParams = { remotePluginId: string, discoverability: PluginShareUpdateDiscoverability, shareTargets: Array<PluginShareTarget>, };
export type PluginShareUpdateTargetsParams = { remotePluginId: string, shareTargets: Array<PluginShareTarget>, };

View File

@@ -1,7 +1,6 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { PluginShareDiscoverability } from "./PluginShareDiscoverability";
import type { PluginSharePrincipal } from "./PluginSharePrincipal";
export type PluginShareUpdateTargetsResponse = { principals: Array<PluginSharePrincipal>, discoverability: PluginShareDiscoverability, };
export type PluginShareUpdateTargetsResponse = { principals: Array<PluginSharePrincipal>, };

View File

@@ -2,4 +2,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type PluginShareUpdateDiscoverability = "UNLISTED" | "PRIVATE";
export type SkillsListExtraRootsForCwd = { cwd: string, extraUserRoots: Array<string>, };

View File

@@ -1,6 +1,7 @@
// GENERATED CODE! DO NOT MODIFY BY HAND!
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SkillsListExtraRootsForCwd } from "./SkillsListExtraRootsForCwd";
export type SkillsListParams = {
/**
@@ -10,4 +11,8 @@ cwds?: Array<string>,
/**
* When true, bypass the skills cache and re-scan skills from disk.
*/
forceReload?: boolean, };
forceReload?: boolean,
/**
* Optional per-cwd extra roots to scan as user-scoped skills.
*/
perCwdExtraUserRoots?: Array<SkillsListExtraRootsForCwd> | null, };

View File

@@ -287,7 +287,6 @@ export type { PluginSharePrincipalType } from "./PluginSharePrincipalType";
export type { PluginShareSaveParams } from "./PluginShareSaveParams";
export type { PluginShareSaveResponse } from "./PluginShareSaveResponse";
export type { PluginShareTarget } from "./PluginShareTarget";
export type { PluginShareUpdateDiscoverability } from "./PluginShareUpdateDiscoverability";
export type { PluginShareUpdateTargetsParams } from "./PluginShareUpdateTargetsParams";
export type { PluginShareUpdateTargetsResponse } from "./PluginShareUpdateTargetsResponse";
export type { PluginSkillReadParams } from "./PluginSkillReadParams";
@@ -337,6 +336,7 @@ export type { SkillsChangedNotification } from "./SkillsChangedNotification";
export type { SkillsConfigWriteParams } from "./SkillsConfigWriteParams";
export type { SkillsConfigWriteResponse } from "./SkillsConfigWriteResponse";
export type { SkillsListEntry } from "./SkillsListEntry";
export type { SkillsListExtraRootsForCwd } from "./SkillsListExtraRootsForCwd";
export type { SkillsListParams } from "./SkillsListParams";
export type { SkillsListResponse } from "./SkillsListResponse";
export type { SortDirection } from "./SortDirection";

View File

@@ -581,13 +581,6 @@ client_request_definitions! {
serialization: None,
response: v2::ThreadTurnsListResponse,
},
#[experimental("thread/turns/items/list")]
ThreadTurnsItemsList => "thread/turns/items/list" {
params: v2::ThreadTurnsItemsListParams,
// Explicitly concurrent: this primarily reads append-only rollout storage.
serialization: None,
response: v2::ThreadTurnsItemsListResponse,
},
/// Append raw Responses API items to the thread history without starting a user turn.
ThreadInjectItems => "thread/inject_items" {
params: v2::ThreadInjectItemsParams,
@@ -1656,6 +1649,7 @@ mod tests {
params: v2::SkillsListParams {
cwds: Vec::new(),
force_reload: false,
per_cwd_extra_user_roots: None,
},
};
assert_eq!(
@@ -1849,23 +1843,10 @@ mod tests {
cursor: None,
limit: None,
sort_direction: None,
items_view: None,
},
};
assert_eq!(thread_turns_list.serialization_scope(), None);
let thread_turns_items_list = ClientRequest::ThreadTurnsItemsList {
request_id: request_id(),
params: v2::ThreadTurnsItemsListParams {
thread_id: "thread-1".to_string(),
turn_id: "turn-1".to_string(),
cursor: None,
limit: None,
sort_direction: None,
},
};
assert_eq!(thread_turns_items_list.serialization_scope(), None);
let mcp_resource_read = ClientRequest::McpResourceRead {
request_id: request_id(),
params: v2::McpResourceReadParams {
@@ -2966,7 +2947,6 @@ mod tests {
thread_id: "thr_123".to_string(),
turn_id: "turn_123".to_string(),
item_id: "call_123".to_string(),
started_at_ms: 0,
approval_id: None,
reason: None,
network_approval_context: None,

View File

@@ -243,7 +243,6 @@ pub fn guardian_auto_approval_review_notification(
thread_id: conversation_id.to_string(),
turn_id,
review_id: assessment.id.clone(),
started_at_ms: assessment.started_at_ms,
target_item_id: assessment.target_item_id.clone(),
review,
action,
@@ -259,10 +258,6 @@ pub fn guardian_auto_approval_review_notification(
thread_id: conversation_id.to_string(),
turn_id,
review_id: assessment.id.clone(),
started_at_ms: assessment.started_at_ms,
completed_at_ms: assessment
.completed_at_ms
.unwrap_or(assessment.started_at_ms),
target_item_id: assessment.target_item_id.clone(),
decision_source: assessment
.decision_source

View File

@@ -2143,8 +2143,6 @@ mod tests {
id: "review-guardian-exec".into(),
target_item_id: Some("guardian-exec".into()),
turn_id: "turn-1".into(),
started_at_ms: 1_000,
completed_at_ms: None,
status: GuardianAssessmentStatus::InProgress,
risk_level: None,
user_authorization: None,
@@ -2162,8 +2160,6 @@ mod tests {
id: "review-guardian-exec".into(),
target_item_id: Some("guardian-exec".into()),
turn_id: "turn-1".into(),
started_at_ms: 1_000,
completed_at_ms: Some(1_042),
status: GuardianAssessmentStatus::Denied,
risk_level: Some(codex_protocol::protocol::GuardianRiskLevel::High),
user_authorization: Some(codex_protocol::protocol::GuardianUserAuthorization::Low),
@@ -2226,8 +2222,6 @@ mod tests {
id: "review-guardian-execve".into(),
target_item_id: Some("guardian-execve".into()),
turn_id: "turn-1".into(),
started_at_ms: 2_000,
completed_at_ms: None,
status: GuardianAssessmentStatus::InProgress,
risk_level: None,
user_authorization: None,
@@ -2531,7 +2525,6 @@ mod tests {
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
call_id: "patch-call".into(),
turn_id: turn_id.to_string(),
started_at_ms: 0,
changes: [(
PathBuf::from("README.md"),
codex_protocol::protocol::FileChange::Add {

View File

@@ -1073,9 +1073,6 @@ pub struct ItemStartedNotification {
pub struct ItemGuardianApprovalReviewStartedNotification {
pub thread_id: String,
pub turn_id: String,
/// Unix timestamp (in milliseconds) when this review started.
#[ts(type = "number")]
pub started_at_ms: i64,
/// Stable identifier for this review.
pub review_id: String,
/// Identifier for the reviewed item or tool call when one exists.
@@ -1102,12 +1099,6 @@ pub struct ItemGuardianApprovalReviewStartedNotification {
pub struct ItemGuardianApprovalReviewCompletedNotification {
pub thread_id: String,
pub turn_id: String,
/// Unix timestamp (in milliseconds) when this review started.
#[ts(type = "number")]
pub started_at_ms: i64,
/// Unix timestamp (in milliseconds) when this review completed.
#[ts(type = "number")]
pub completed_at_ms: i64,
/// Stable identifier for this review.
pub review_id: String,
/// Identifier for the reviewed item or tool call when one exists.
@@ -1257,9 +1248,6 @@ pub struct CommandExecutionRequestApprovalParams {
pub thread_id: String,
pub turn_id: String,
pub item_id: String,
/// Unix timestamp (in milliseconds) when this approval request started.
#[ts(type = "number")]
pub started_at_ms: i64,
/// Unique identifier for this specific approval callback.
///
/// For regular shell/unified_exec approvals, this is null.
@@ -1333,9 +1321,6 @@ pub struct FileChangeRequestApprovalParams {
pub thread_id: String,
pub turn_id: String,
pub item_id: String,
/// Unix timestamp (in milliseconds) when this approval request started.
#[ts(type = "number")]
pub started_at_ms: i64,
/// Optional explanatory reason (e.g. request for extra write access).
#[ts(optional = nullable)]
pub reason: Option<String>,

View File

@@ -231,6 +231,12 @@ pub enum McpServerStartupState {
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct McpServerStatusUpdatedNotification {
/// Identifier of the thread whose `McpConnectionManager` produced this
/// update. Always set on notifications emitted by current servers;
/// remains `Option` so legacy clients that omit the field still
/// deserialise and route as a global event.
#[serde(default)]
pub thread_id: Option<String>,
pub name: String,
pub status: McpServerStartupState,
pub error: Option<String>,

View File

@@ -826,9 +826,6 @@ pub struct PermissionsRequestApprovalParams {
pub thread_id: String,
pub turn_id: String,
pub item_id: String,
/// Unix timestamp (in milliseconds) when this approval request started.
#[ts(type = "number")]
pub started_at_ms: i64,
pub cwd: AbsolutePathBuf,
pub reason: Option<String>,
pub permissions: RequestPermissionProfile,

View File

@@ -15,7 +15,7 @@ use serde::Serialize;
use std::path::PathBuf;
use ts_rs::TS;
#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, JsonSchema, TS)]
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct SkillsListParams {
@@ -26,6 +26,19 @@ pub struct SkillsListParams {
/// When true, bypass the skills cache and re-scan skills from disk.
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub force_reload: bool,
/// Optional per-cwd extra roots to scan as user-scoped skills.
#[serde(default)]
#[ts(optional = nullable)]
pub per_cwd_extra_user_roots: Option<Vec<SkillsListExtraRootsForCwd>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct SkillsListExtraRootsForCwd {
pub cwd: PathBuf,
pub extra_user_roots: Vec<PathBuf>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -218,7 +231,6 @@ pub struct PluginShareSaveResponse {
#[ts(export_to = "v2/")]
pub struct PluginShareUpdateTargetsParams {
pub remote_plugin_id: String,
pub discoverability: PluginShareUpdateDiscoverability,
pub share_targets: Vec<PluginShareTarget>,
}
@@ -227,7 +239,6 @@ pub struct PluginShareUpdateTargetsParams {
#[ts(export_to = "v2/")]
pub struct PluginShareUpdateTargetsResponse {
pub principals: Vec<PluginSharePrincipal>,
pub discoverability: PluginShareDiscoverability,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -277,17 +288,6 @@ pub enum PluginShareDiscoverability {
Private,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[ts(export_to = "v2/")]
pub enum PluginShareUpdateDiscoverability {
#[serde(rename = "UNLISTED")]
#[ts(rename = "UNLISTED")]
Unlisted,
#[serde(rename = "PRIVATE")]
#[ts(rename = "PRIVATE")]
Private,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
#[ts(export_to = "v2/")]
pub enum PluginSharePrincipalType {

View File

@@ -95,59 +95,6 @@ fn turn_defaults_legacy_missing_items_view_to_full() {
assert_eq!(turn.items_view, TurnItemsView::Full);
}
#[test]
fn thread_turns_list_params_accepts_items_view() {
let params = serde_json::from_value::<ThreadTurnsListParams>(json!({
"threadId": "thr_123",
"cursor": null,
"limit": 25,
"sortDirection": "desc",
"itemsView": "notLoaded",
}))
.expect("thread turns list params should deserialize");
assert_eq!(params.thread_id, "thr_123");
assert_eq!(params.items_view, Some(TurnItemsView::NotLoaded));
}
#[test]
fn thread_turns_items_list_round_trips() {
let params = ThreadTurnsItemsListParams {
thread_id: "thr_123".to_string(),
turn_id: "turn_456".to_string(),
cursor: Some("cursor_1".to_string()),
limit: Some(50),
sort_direction: Some(SortDirection::Asc),
};
assert_eq!(
serde_json::to_value(&params).expect("serialize params"),
json!({
"threadId": "thr_123",
"turnId": "turn_456",
"cursor": "cursor_1",
"limit": 50,
"sortDirection": "asc",
})
);
let response = ThreadTurnsItemsListResponse {
data: vec![ThreadItem::ContextCompaction {
id: "item_1".to_string(),
}],
next_cursor: None,
backwards_cursor: Some("cursor_0".to_string()),
};
assert_eq!(
serde_json::to_value(&response).expect("serialize response"),
json!({
"data": [{"type": "contextCompaction", "id": "item_1"}],
"nextCursor": null,
"backwardsCursor": "cursor_0",
})
);
}
#[test]
fn thread_list_params_accepts_single_cwd() {
let params = serde_json::from_value::<ThreadListParams>(json!({
@@ -277,7 +224,6 @@ fn command_execution_request_approval_rejects_relative_additional_permission_pat
"threadId": "thr_123",
"turnId": "turn_123",
"itemId": "call_123",
"startedAtMs": 1,
"command": "cat file",
"cwd": absolute_path_string("tmp"),
"commandActions": null,
@@ -318,7 +264,6 @@ fn permissions_request_approval_uses_request_permission_profile() {
"threadId": "thr_123",
"turnId": "turn_123",
"itemId": "call_123",
"startedAtMs": 1,
"cwd": absolute_path_string("repo"),
"reason": "Select a workspace root",
"permissions": {
@@ -381,7 +326,6 @@ fn permissions_request_approval_rejects_macos_permissions() {
"threadId": "thr_123",
"turnId": "turn_123",
"itemId": "call_123",
"startedAtMs": 1,
"cwd": absolute_path_string("repo"),
"reason": "Select a workspace root",
"permissions": {
@@ -2508,20 +2452,33 @@ fn skills_list_params_serialization_uses_force_reload() {
serde_json::to_value(SkillsListParams {
cwds: Vec::new(),
force_reload: false,
per_cwd_extra_user_roots: None,
})
.unwrap(),
json!({}),
json!({
"perCwdExtraUserRoots": null,
}),
);
assert_eq!(
serde_json::to_value(SkillsListParams {
cwds: vec![PathBuf::from("/repo")],
force_reload: true,
per_cwd_extra_user_roots: Some(vec![SkillsListExtraRootsForCwd {
cwd: PathBuf::from("/repo"),
extra_user_roots: vec![PathBuf::from("/shared/skills"), PathBuf::from("/tmp/x")],
}]),
})
.unwrap(),
json!({
"cwds": ["/repo"],
"forceReload": true,
"perCwdExtraUserRoots": [
{
"cwd": "/repo",
"extraUserRoots": ["/shared/skills", "/tmp/x"],
}
],
}),
);
}
@@ -2936,7 +2893,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
assert_eq!(
serde_json::to_value(PluginShareUpdateTargetsParams {
remote_plugin_id: "plugins~Plugin_00000000000000000000000000000000".to_string(),
discoverability: PluginShareUpdateDiscoverability::Unlisted,
share_targets: vec![PluginShareTarget {
principal_type: PluginSharePrincipalType::Group,
principal_id: "group-1".to_string(),
@@ -2945,7 +2901,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
.unwrap(),
json!({
"remotePluginId": "plugins~Plugin_00000000000000000000000000000000",
"discoverability": "UNLISTED",
"shareTargets": [{
"principalType": "group",
"principalId": "group-1",
@@ -2960,7 +2915,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
principal_id: "user-1".to_string(),
name: "Gavin".to_string(),
}],
discoverability: PluginShareDiscoverability::Unlisted,
})
.unwrap(),
json!({
@@ -2969,7 +2923,6 @@ fn plugin_share_params_and_response_serialization_use_camel_case_fields() {
"principalId": "user-1",
"name": "Gavin",
}],
"discoverability": "UNLISTED",
}),
);

View File

@@ -6,11 +6,9 @@ use super::PermissionProfileSelectionParams;
use super::SandboxMode;
use super::SandboxPolicy;
use super::Thread;
use super::ThreadItem;
use super::ThreadSource;
use super::Turn;
use super::TurnEnvironmentParams;
use super::TurnItemsView;
use super::shared::v2_enum_from_core;
use codex_experimental_api_macros::ExperimentalApi;
use codex_protocol::config_types::Personality;
@@ -1007,9 +1005,6 @@ pub struct ThreadTurnsListParams {
/// Optional turn pagination direction; defaults to descending.
#[ts(optional = nullable)]
pub sort_direction: Option<SortDirection>,
/// How much item detail to include for each returned turn; defaults to summary.
#[ts(optional = nullable)]
pub items_view: Option<TurnItemsView>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
@@ -1027,36 +1022,6 @@ pub struct ThreadTurnsListResponse {
pub backwards_cursor: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadTurnsItemsListParams {
pub thread_id: String,
pub turn_id: String,
/// Opaque cursor to pass to the next call to continue after the last item.
#[ts(optional = nullable)]
pub cursor: Option<String>,
/// Optional item page size.
#[ts(optional = nullable)]
pub limit: Option<u32>,
/// Optional item pagination direction; defaults to ascending.
#[ts(optional = nullable)]
pub sort_direction: Option<SortDirection>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]
pub struct ThreadTurnsItemsListResponse {
pub data: Vec<ThreadItem>,
/// Opaque cursor to pass to the next call to continue after the last item.
/// if None, there are no more items to return.
pub next_cursor: Option<String>,
/// Opaque cursor to pass as `cursor` when reversing `sortDirection`.
/// This is only populated when the page contains at least one item.
pub backwards_cursor: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export_to = "v2/")]

View File

@@ -23,7 +23,3 @@ tracing-subscriber = { workspace = true }
tungstenite = { workspace = true }
url = { workspace = true }
uuid = { workspace = true, features = ["v4"] }
[lib]
test = false
doctest = false

View File

@@ -1945,7 +1945,6 @@ impl CodexClient {
thread_id,
turn_id,
item_id,
started_at_ms: _,
approval_id,
reason,
network_approval_context,
@@ -2021,7 +2020,6 @@ impl CodexClient {
thread_id,
turn_id,
item_id,
started_at_ms: _,
reason,
grant_root,
} = params;

View File

@@ -7,7 +7,6 @@ license.workspace = true
[lib]
name = "codex_app_server_transport"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -15,7 +15,6 @@ path = "src/bin/notify_capture.rs"
[lib]
name = "codex_app_server"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -149,8 +149,7 @@ Example with notification opt-out:
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders`, `sourceKinds`, `archived`, `cwd`, and `searchTerm` filters. Each returned `thread` includes `status` (`ThreadStatus`), defaulting to `notLoaded` when the thread is not currently loaded.
- `thread/loaded/list` — list the thread ids currently loaded in memory.
- `thread/read` — read a stored thread by id without resuming it; optionally include turns via `includeTurns`. The returned `thread` includes `status` (`ThreadStatus`), defaulting to `notLoaded` when the thread is not currently loaded.
- `thread/turns/list` — experimental; page through a stored threads turn history without resuming it; supports cursor-based pagination with `sortDirection`, `itemsView`, `nextCursor`, and `backwardsCursor`.
- `thread/turns/items/list` — experimental; reserved for paging full items for one turn. The API shape is present, but app-server currently returns an unsupported-method JSON-RPC error.
- `thread/turns/list` — experimental; page through a stored threads turn history without resuming it; supports cursor-based pagination with `sortDirection`, `nextCursor`, and `backwardsCursor`.
- `thread/metadata/update` — patch stored thread metadata in sqlite; currently supports updating persisted `gitInfo` fields and returns the refreshed `thread`.
- `thread/memoryMode/set` — experimental; set a threads persisted memory eligibility to `"enabled"` or `"disabled"` for either a loaded thread or a stored rollout; returns `{}` on success.
- `memory/reset` — experimental; clear the current `CODEX_HOME/memories` directory and reset persisted memory stage data in sqlite while preserving existing thread memory modes; returns `{}` on success.
@@ -425,14 +424,13 @@ Use `thread/read` to fetch a stored thread by id without resuming it. Pass `incl
Use `thread/turns/list` with `capabilities.experimentalApi = true` to page a stored threads turn history without resuming it. By default, results are sorted descending so clients can start at the present and fetch older turns with `nextCursor`. The response also includes `backwardsCursor`; pass it as `cursor` on a later request with `sortDirection: "asc"` to fetch turns newer than the first item from the earlier page.
Every returned `Turn` includes `itemsView`, which tells clients whether the `items` array was omitted intentionally (`notLoaded`), contains only summary items (`summary`), or contains every item available from persisted app-server history (`full`). Pass `itemsView` to choose the returned detail level; omitted `itemsView` defaults to `"summary"`.
Every returned `Turn` includes `itemsView`, which tells clients whether the `items` array was omitted intentionally (`notLoaded`), contains only summary items (`summary`), or contains every item available from persisted app-server history (`full`). Current `thread/turns/list` responses return `full` turns.
```json
{ "method": "thread/turns/list", "id": 24, "params": {
"threadId": "thr_123",
"limit": 50,
"sortDirection": "desc",
"itemsView": "summary"
"sortDirection": "desc"
} }
{ "id": 24, "result": {
"data": [ ... ],
@@ -441,19 +439,6 @@ Every returned `Turn` includes `itemsView`, which tells clients whether the `ite
} }
```
`thread/turns/items/list` is the planned hydration API for fetching full items for one turn:
```json
{ "method": "thread/turns/items/list", "id": 25, "params": {
"threadId": "thr_123",
"turnId": "turn_456",
"limit": 100,
"sortDirection": "asc"
} }
```
This method currently returns JSON-RPC `-32601` with message `thread/turns/items/list is not supported yet`.
### Example: Update stored thread metadata
Use `thread/metadata/update` to patch sqlite-backed metadata for a thread without resuming it. Today this supports persisted `gitInfo`; omitted fields are left unchanged, while explicit `null` clears a stored value.
@@ -1485,13 +1470,21 @@ $skill-creator Add a new skill for triaging flaky CI and include step-by-step us
```
Use `skills/list` to fetch the available skills (optionally scoped by `cwds`, with `forceReload`).
You can also add `perCwdExtraUserRoots` to scan additional absolute paths as `user` scope for specific `cwd` entries.
Entries whose `cwd` is not present in `cwds` are ignored.
`skills/list` might reuse a cached skills result per `cwd`; setting `forceReload` to `true` refreshes the result from disk.
The server also emits `skills/changed` notifications when watched local skill files change. Treat this as an invalidation signal and re-run `skills/list` with your current params when needed.
```json
{ "method": "skills/list", "id": 25, "params": {
"cwds": ["/Users/me/project", "/Users/me/other-project"],
"forceReload": true
"forceReload": true,
"perCwdExtraUserRoots": [
{
"cwd": "/Users/me/project",
"extraUserRoots": ["/Users/me/shared-skills"]
}
]
} }
{ "id": 25, "result": {
"data": [{

View File

@@ -217,6 +217,7 @@ pub(crate) async fn apply_bespoke_event_handling(
}
};
let notification = McpServerStatusUpdatedNotification {
thread_id: Some(conversation_id.to_string()),
name: update.server,
status,
error,
@@ -511,7 +512,6 @@ pub(crate) async fn apply_bespoke_event_handling(
thread_id: conversation_id.to_string(),
turn_id: event.turn_id.clone(),
item_id: item_id.clone(),
started_at_ms: event.started_at_ms,
reason: event.reason.clone(),
grant_root: event.grant_root.clone(),
};
@@ -543,7 +543,6 @@ pub(crate) async fn apply_bespoke_event_handling(
call_id,
approval_id,
turn_id,
started_at_ms,
command,
cwd,
reason,
@@ -617,7 +616,6 @@ pub(crate) async fn apply_bespoke_event_handling(
thread_id: conversation_id.to_string(),
turn_id: turn_id.clone(),
item_id: call_id.clone(),
started_at_ms,
approval_id: approval_id.clone(),
reason,
network_approval_context,
@@ -767,7 +765,6 @@ pub(crate) async fn apply_bespoke_event_handling(
thread_id: conversation_id.to_string(),
turn_id: request.turn_id.clone(),
item_id: request.call_id.clone(),
started_at_ms: request.started_at_ms,
cwd: request_cwd.clone(),
reason: request.reason,
permissions: request.permissions.into(),
@@ -2253,9 +2250,6 @@ mod tests {
id: format!("review-{id}"),
target_item_id: Some(id.to_string()),
turn_id: turn_id.to_string(),
started_at_ms: 1_000,
completed_at_ms: (!matches!(status, GuardianAssessmentStatus::InProgress))
.then_some(1_042),
status,
risk_level,
user_authorization,
@@ -2320,8 +2314,6 @@ mod tests {
id: "review-1".to_string(),
target_item_id: Some("item-1".to_string()),
turn_id: String::new(),
started_at_ms: 1_000,
completed_at_ms: None,
status: codex_protocol::protocol::GuardianAssessmentStatus::InProgress,
risk_level: None,
user_authorization: None,
@@ -2335,7 +2327,6 @@ mod tests {
ServerNotification::ItemGuardianApprovalReviewStarted(payload) => {
assert_eq!(payload.thread_id, conversation_id.to_string());
assert_eq!(payload.turn_id, "turn-from-event");
assert_eq!(payload.started_at_ms, 1_000);
assert_eq!(payload.review_id, "review-1");
assert_eq!(payload.target_item_id.as_deref(), Some("item-1"));
assert_eq!(
@@ -2366,8 +2357,6 @@ mod tests {
id: "review-2".to_string(),
target_item_id: Some("item-2".to_string()),
turn_id: "turn-from-assessment".to_string(),
started_at_ms: 1_000,
completed_at_ms: Some(1_042),
status: codex_protocol::protocol::GuardianAssessmentStatus::Denied,
risk_level: Some(codex_protocol::protocol::GuardianRiskLevel::High),
user_authorization: Some(codex_protocol::protocol::GuardianUserAuthorization::Low),
@@ -2383,8 +2372,6 @@ mod tests {
ServerNotification::ItemGuardianApprovalReviewCompleted(payload) => {
assert_eq!(payload.thread_id, conversation_id.to_string());
assert_eq!(payload.turn_id, "turn-from-assessment");
assert_eq!(payload.started_at_ms, 1_000);
assert_eq!(payload.completed_at_ms, 1_042);
assert_eq!(payload.review_id, "review-2");
assert_eq!(payload.target_item_id.as_deref(), Some("item-2"));
assert_eq!(payload.decision_source, AutoReviewDecisionSource::Agent);
@@ -2420,8 +2407,6 @@ mod tests {
id: "review-3".to_string(),
target_item_id: None,
turn_id: "turn-from-assessment".to_string(),
started_at_ms: 1_000,
completed_at_ms: Some(1_042),
status: codex_protocol::protocol::GuardianAssessmentStatus::Aborted,
risk_level: None,
user_authorization: None,

View File

@@ -1,7 +1,6 @@
use codex_app_server_protocol::JSONRPCErrorError;
pub(crate) const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
pub(crate) const METHOD_NOT_FOUND_ERROR_CODE: i64 = -32601;
pub const INVALID_PARAMS_ERROR_CODE: i64 = -32602;
pub(crate) const INTERNAL_ERROR_CODE: i64 = -32603;
pub(crate) const OVERLOADED_ERROR_CODE: i64 = -32001;
@@ -11,10 +10,6 @@ pub(crate) fn invalid_request(message: impl Into<String>) -> JSONRPCErrorError {
error(INVALID_REQUEST_ERROR_CODE, message)
}
pub(crate) fn method_not_found(message: impl Into<String>) -> JSONRPCErrorError {
error(METHOD_NOT_FOUND_ERROR_CODE, message)
}
pub(crate) fn invalid_params(message: impl Into<String>) -> JSONRPCErrorError {
error(INVALID_PARAMS_ERROR_CODE, message)
}

View File

@@ -1008,9 +1008,6 @@ impl MessageProcessor {
ClientRequest::ThreadTurnsList { params, .. } => {
self.thread_processor.thread_turns_list(params).await
}
ClientRequest::ThreadTurnsItemsList { params, .. } => {
self.thread_processor.thread_turns_items_list(params).await
}
ClientRequest::ThreadShellCommand { params, .. } => {
self.thread_processor
.thread_shell_command(&request_id, params)

View File

@@ -2,8 +2,6 @@ use std::collections::HashMap;
use std::sync::Arc;
use std::sync::atomic::AtomicI64;
use std::sync::atomic::Ordering;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use codex_analytics::AnalyticsEventsClient;
use codex_app_server_protocol::ClientResponsePayload;
@@ -359,10 +357,8 @@ impl OutgoingMessageSender {
match entry {
Some((id, entry)) => {
let completed_at_ms = now_unix_timestamp_ms();
if let Ok(response) = entry.request.response_from_result(result.clone()) {
self.analytics_events_client
.track_server_response(completed_at_ms, response);
self.analytics_events_client.track_server_response(response);
}
if let Err(err) = entry.callback.send(Ok(result)) {
warn!("could not notify callback for {id:?} due to: {err:?}");
@@ -652,15 +648,6 @@ impl OutgoingMessageSender {
}
}
fn now_unix_timestamp_ms() -> u64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_millis()
.try_into()
.unwrap_or_default()
}
#[cfg(test)]
mod tests {
use std::time::Duration;
@@ -916,7 +903,6 @@ mod tests {
thread_id: "thread-1".to_string(),
turn_id: "turn-1".to_string(),
item_id: "item-1".to_string(),
started_at_ms: 0,
approval_id: None,
reason: None,
network_approval_context: None,
@@ -1209,7 +1195,6 @@ mod tests {
thread_id: thread_id.to_string(),
turn_id: "turn-1".to_string(),
item_id: "call-2".to_string(),
started_at_ms: 0,
reason: None,
grant_root: None,
},

View File

@@ -124,7 +124,6 @@ use codex_app_server_protocol::PluginSharePrincipalType;
use codex_app_server_protocol::PluginShareSaveParams;
use codex_app_server_protocol::PluginShareSaveResponse;
use codex_app_server_protocol::PluginShareTarget;
use codex_app_server_protocol::PluginShareUpdateDiscoverability;
use codex_app_server_protocol::PluginShareUpdateTargetsParams;
use codex_app_server_protocol::PluginShareUpdateTargetsResponse;
use codex_app_server_protocol::PluginSkillReadParams;
@@ -216,7 +215,6 @@ use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::ThreadStartedNotification;
use codex_app_server_protocol::ThreadStatus;
use codex_app_server_protocol::ThreadTurnsItemsListParams;
use codex_app_server_protocol::ThreadTurnsListParams;
use codex_app_server_protocol::ThreadTurnsListResponse;
use codex_app_server_protocol::ThreadUnarchiveParams;

View File

@@ -382,12 +382,44 @@ impl CatalogRequestProcessor {
&self,
params: SkillsListParams,
) -> Result<SkillsListResponse, JSONRPCErrorError> {
let SkillsListParams { cwds, force_reload } = params;
let SkillsListParams {
cwds,
force_reload,
per_cwd_extra_user_roots,
} = params;
let cwds = if cwds.is_empty() {
vec![self.config.cwd.to_path_buf()]
} else {
cwds
};
let cwd_set: HashSet<PathBuf> = cwds.iter().cloned().collect();
let mut extra_roots_by_cwd: HashMap<PathBuf, Vec<AbsolutePathBuf>> = HashMap::new();
for entry in per_cwd_extra_user_roots.unwrap_or_default() {
if !cwd_set.contains(&entry.cwd) {
warn!(
cwd = %entry.cwd.display(),
"ignoring per-cwd extra roots for cwd not present in skills/list cwds"
);
continue;
}
let mut valid_extra_roots = Vec::new();
for root in entry.extra_user_roots {
let root =
AbsolutePathBuf::from_absolute_path_checked(root.as_path()).map_err(|_| {
invalid_request(format!(
"skills/list perCwdExtraUserRoots extraUserRoots paths must be absolute: {}",
root.display()
))
})?;
valid_extra_roots.push(root);
}
extra_roots_by_cwd
.entry(entry.cwd)
.or_default()
.extend(valid_extra_roots);
}
let config = self.load_latest_config(/*fallback_cwd*/ None).await?;
let auth = self.auth_manager.auth().await;
@@ -404,6 +436,7 @@ impl CatalogRequestProcessor {
let mut data = futures::stream::iter(cwds.into_iter().enumerate())
.map(|(index, cwd)| {
let config = &config;
let extra_roots_by_cwd = &extra_roots_by_cwd;
let fs = fs.clone();
let plugins_manager = &plugins_manager;
let skills_manager = &skills_manager;
@@ -425,6 +458,9 @@ impl CatalogRequestProcessor {
);
}
};
let extra_roots = extra_roots_by_cwd
.get(&cwd)
.map_or(&[][..], std::vec::Vec::as_slice);
let effective_skill_roots = if workspace_codex_plugins_enabled {
let plugins_input = config.plugins_config_input();
plugins_manager
@@ -443,7 +479,12 @@ impl CatalogRequestProcessor {
config.bundled_skills_enabled(),
);
let outcome = skills_manager
.skills_for_cwd(&skills_input, force_reload, fs)
.skills_for_cwd_with_extra_user_roots(
&skills_input,
force_reload,
extra_roots,
fs,
)
.await;
let errors = errors_to_info(&outcome.errors);
let skills = skills_to_info(&outcome.skills, &outcome.disabled_paths);

View File

@@ -133,33 +133,6 @@ fn remote_plugin_share_discoverability(
}
}
fn remote_plugin_share_update_discoverability(
discoverability: PluginShareUpdateDiscoverability,
) -> codex_core_plugins::remote::RemotePluginShareUpdateDiscoverability {
match discoverability {
PluginShareUpdateDiscoverability::Unlisted => {
codex_core_plugins::remote::RemotePluginShareUpdateDiscoverability::Unlisted
}
PluginShareUpdateDiscoverability::Private => {
codex_core_plugins::remote::RemotePluginShareUpdateDiscoverability::Private
}
}
}
fn validate_client_plugin_share_targets(
targets: &[PluginShareTarget],
) -> Result<(), JSONRPCErrorError> {
if targets
.iter()
.any(|target| target.principal_type == PluginSharePrincipalType::Workspace)
{
return Err(invalid_request(
"shareTargets cannot include workspace principals; use discoverability UNLISTED for workspace link access",
));
}
Ok(())
}
fn remote_plugin_share_targets(
targets: Vec<PluginShareTarget>,
) -> Vec<codex_core_plugins::remote::RemotePluginShareTarget> {
@@ -756,17 +729,9 @@ impl PluginRequestProcessor {
}
if remote_plugin_id.is_some() && (discoverability.is_some() || share_targets.is_some()) {
return Err(invalid_request(
"discoverability and shareTargets are only supported when creating a plugin share; use plugin/share/updateTargets to update share settings",
"discoverability and shareTargets are only supported when creating a plugin share; use plugin/share/updateTargets to update share targets",
));
}
if discoverability == Some(PluginShareDiscoverability::Listed) {
return Err(invalid_request(
"discoverability LISTED is not supported for plugin/share/save; use UNLISTED or PRIVATE",
));
}
if let Some(share_targets) = share_targets.as_ref() {
validate_client_plugin_share_targets(share_targets)?;
}
let remote_plugin_service_config = RemotePluginServiceConfig {
chatgpt_base_url: config.chatgpt_base_url.clone(),
@@ -800,14 +765,11 @@ impl PluginRequestProcessor {
let (config, auth) = self.load_plugin_share_config_and_auth().await?;
let PluginShareUpdateTargetsParams {
remote_plugin_id,
discoverability,
share_targets,
} = params;
if remote_plugin_id.is_empty() || !is_valid_remote_plugin_id(&remote_plugin_id) {
return Err(invalid_request("invalid remote plugin id"));
}
validate_client_plugin_share_targets(&share_targets)?;
let requested_share_targets = share_targets.clone();
let remote_plugin_service_config = RemotePluginServiceConfig {
chatgpt_base_url: config.chatgpt_base_url.clone(),
@@ -817,7 +779,6 @@ impl PluginRequestProcessor {
auth.as_ref(),
&remote_plugin_id,
remote_plugin_share_targets(share_targets),
remote_plugin_share_update_discoverability(discoverability),
)
.await
.map_err(|err| {
@@ -829,14 +790,7 @@ impl PluginRequestProcessor {
.principals
.into_iter()
.map(plugin_share_principal_from_remote)
.filter(|principal| {
requested_share_targets.iter().any(|target| {
target.principal_type == principal.principal_type
&& target.principal_id == principal.principal_id
})
})
.collect(),
discoverability: remote_plugin_share_discoverability_to_info(result.discoverability),
})
}
@@ -1533,22 +1487,6 @@ fn remote_plugin_share_context_to_info(
}
}
fn remote_plugin_share_discoverability_to_info(
discoverability: codex_core_plugins::remote::RemotePluginShareDiscoverability,
) -> PluginShareDiscoverability {
match discoverability {
codex_core_plugins::remote::RemotePluginShareDiscoverability::Listed => {
PluginShareDiscoverability::Listed
}
codex_core_plugins::remote::RemotePluginShareDiscoverability::Unlisted => {
PluginShareDiscoverability::Unlisted
}
codex_core_plugins::remote::RemotePluginShareDiscoverability::Private => {
PluginShareDiscoverability::Private
}
}
}
fn remote_plugin_detail_to_info(
detail: RemoteCatalogPluginDetail,
apps: Vec<AppSummary>,

View File

@@ -1,5 +1,4 @@
use super::*;
use crate::error_code::method_not_found;
const THREAD_LIST_DEFAULT_LIMIT: usize = 25;
const THREAD_LIST_MAX_LIMIT: usize = 100;
@@ -592,15 +591,6 @@ impl ThreadRequestProcessor {
.map(|response| Some(response.into()))
}
pub(crate) async fn thread_turns_items_list(
&self,
_params: ThreadTurnsItemsListParams,
) -> Result<Option<ClientResponsePayload>, JSONRPCErrorError> {
Err(method_not_found(
"thread/turns/items/list is not supported yet",
))
}
pub(crate) async fn thread_shell_command(
&self,
request_id: &ConnectionRequestId,
@@ -2082,9 +2072,7 @@ impl ThreadRequestProcessor {
cursor,
limit,
sort_direction,
items_view,
} = params;
let items_view = items_view.unwrap_or(TurnItemsView::Summary);
let thread_uuid = ThreadId::from_string(&thread_id)
.map_err(|err| invalid_request(format!("invalid thread id: {err}")))?;
@@ -2113,7 +2101,7 @@ impl ThreadRequestProcessor {
} else {
None
};
let mut turns = reconstruct_thread_turns_for_turns_list(
let turns = reconstruct_thread_turns_for_turns_list(
&items,
self.thread_watch_manager
.loaded_status_for_thread(&thread_uuid.to_string())
@@ -2121,41 +2109,6 @@ impl ThreadRequestProcessor {
has_live_running_thread,
active_turn,
);
for turn in &mut turns {
match items_view {
TurnItemsView::NotLoaded => {
turn.items.clear();
turn.items_view = TurnItemsView::NotLoaded;
}
TurnItemsView::Summary => {
let first_user_message = turn
.items
.iter()
.find(|item| matches!(item, ThreadItem::UserMessage { .. }))
.cloned();
let final_agent_message = turn
.items
.iter()
.rev()
.find(|item| matches!(item, ThreadItem::AgentMessage { .. }))
.cloned();
turn.items = match (first_user_message, final_agent_message) {
(Some(user_message), Some(agent_message))
if user_message.id() != agent_message.id() =>
{
vec![user_message, agent_message]
}
(Some(user_message), _) => vec![user_message],
(None, Some(agent_message)) => vec![agent_message],
(None, None) => Vec::new(),
};
turn.items_view = TurnItemsView::Summary;
}
TurnItemsView::Full => {
turn.items_view = TurnItemsView::Full;
}
}
}
let page = paginate_thread_turns(
turns,
cursor.as_deref(),
@@ -3543,30 +3496,19 @@ fn normalize_thread_turns_status(
enum ThreadReadViewError {
InvalidRequest(String),
Unsupported(&'static str),
Internal(String),
}
fn thread_read_view_error(err: ThreadReadViewError) -> JSONRPCErrorError {
match err {
ThreadReadViewError::InvalidRequest(message) => invalid_request(message),
ThreadReadViewError::Unsupported(operation) => {
unsupported_thread_store_operation(operation)
}
ThreadReadViewError::Internal(message) => internal_error(message),
}
}
fn unsupported_thread_store_operation(operation: &'static str) -> JSONRPCErrorError {
method_not_found(format!("{operation} is not supported yet"))
}
fn thread_store_list_error(err: ThreadStoreError) -> JSONRPCErrorError {
match err {
ThreadStoreError::InvalidRequest { message } => invalid_request(message),
ThreadStoreError::Unsupported { operation } => {
unsupported_thread_store_operation(operation)
}
err => internal_error(format!("failed to list threads: {err}")),
}
}
@@ -3574,9 +3516,6 @@ fn thread_store_list_error(err: ThreadStoreError) -> JSONRPCErrorError {
fn thread_store_resume_read_error(err: ThreadStoreError) -> JSONRPCErrorError {
match err {
ThreadStoreError::InvalidRequest { message } => invalid_request(message),
ThreadStoreError::Unsupported { operation } => {
unsupported_thread_store_operation(operation)
}
ThreadStoreError::ThreadNotFound { thread_id } => {
invalid_request(format!("no rollout found for thread id {thread_id}"))
}
@@ -3599,7 +3538,6 @@ fn thread_turns_list_history_load_error(
ThreadStoreError::InvalidRequest { message } => {
ThreadReadViewError::InvalidRequest(message)
}
ThreadStoreError::Unsupported { operation } => ThreadReadViewError::Unsupported(operation),
err => ThreadReadViewError::Internal(format!(
"failed to load thread history for thread {thread_id}: {err}"
)),
@@ -3626,7 +3564,6 @@ fn thread_read_history_load_error(
ThreadStoreError::InvalidRequest { message } => {
ThreadReadViewError::InvalidRequest(message)
}
ThreadStoreError::Unsupported { operation } => ThreadReadViewError::Unsupported(operation),
err => ThreadReadViewError::Internal(format!(
"failed to load thread history for thread {thread_id}: {err}"
)),
@@ -3642,9 +3579,6 @@ fn conversation_summary_thread_id_read_error(
ThreadStoreError::InvalidRequest { message } if message == no_rollout_message => {
conversation_summary_not_found_error(conversation_id)
}
ThreadStoreError::Unsupported { operation } => {
unsupported_thread_store_operation(operation)
}
ThreadStoreError::ThreadNotFound { thread_id } if thread_id == conversation_id => {
conversation_summary_not_found_error(conversation_id)
}
@@ -3667,9 +3601,6 @@ fn conversation_summary_rollout_path_read_error(
) -> JSONRPCErrorError {
match err {
ThreadStoreError::InvalidRequest { message } => invalid_request(message),
ThreadStoreError::Unsupported { operation } => {
unsupported_thread_store_operation(operation)
}
err => internal_error(format!(
"failed to load conversation summary from {}: {}",
path.display(),
@@ -3684,9 +3615,6 @@ fn thread_store_write_error(operation: &str, err: ThreadStoreError) -> JSONRPCEr
invalid_request(format!("thread not found: {thread_id}"))
}
ThreadStoreError::InvalidRequest { message } => invalid_request(message),
ThreadStoreError::Unsupported { operation } => {
unsupported_thread_store_operation(operation)
}
err => internal_error(format!("failed to {operation}: {err}")),
}
}
@@ -3694,9 +3622,6 @@ fn thread_store_write_error(operation: &str, err: ThreadStoreError) -> JSONRPCEr
fn thread_store_archive_error(operation: &str, err: ThreadStoreError) -> JSONRPCErrorError {
match err {
ThreadStoreError::InvalidRequest { message } => invalid_request(message),
ThreadStoreError::Unsupported {
operation: unsupported_operation,
} => unsupported_thread_store_operation(unsupported_operation),
err => internal_error(format!("failed to {operation} thread: {err}")),
}
}

View File

@@ -258,7 +258,6 @@ async fn command_execution_request_approval_strips_additional_permissions_withou
thread_id: "thr_123".to_string(),
turn_id: "turn_123".to_string(),
item_id: "call_123".to_string(),
started_at_ms: 0,
approval_id: None,
reason: Some("Need extra read access".to_string()),
network_approval_context: None,
@@ -323,7 +322,6 @@ async fn command_execution_request_approval_keeps_additional_permissions_with_ca
thread_id: "thr_123".to_string(),
turn_id: "turn_123".to_string(),
item_id: "call_123".to_string(),
started_at_ms: 0,
approval_id: None,
reason: Some("Need extra read access".to_string()),
network_approval_context: None,

View File

@@ -6,8 +6,6 @@ license.workspace = true
[lib]
path = "lib.rs"
test = false
doctest = false
[lints]
workspace = true

View File

@@ -89,7 +89,6 @@ use codex_app_server_protocol::ThreadRollbackParams;
use codex_app_server_protocol::ThreadSetNameParams;
use codex_app_server_protocol::ThreadShellCommandParams;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadTurnsItemsListParams;
use codex_app_server_protocol::ThreadTurnsListParams;
use codex_app_server_protocol::ThreadUnarchiveParams;
use codex_app_server_protocol::ThreadUnsubscribeParams;
@@ -523,15 +522,6 @@ impl McpProcess {
self.send_request("thread/turns/list", params).await
}
/// Send a `thread/turns/items/list` JSON-RPC request.
pub async fn send_thread_turns_items_list_request(
&mut self,
params: ThreadTurnsItemsListParams,
) -> anyhow::Result<i64> {
let params = Some(serde_json::to_value(params)?);
self.send_request("thread/turns/items/list", params).await
}
/// Send a `model/list` JSON-RPC request.
pub async fn send_list_models_request(
&mut self,

View File

@@ -12,6 +12,7 @@ use codex_app_server_protocol::GetConversationSummaryParams;
use codex_app_server_protocol::GetConversationSummaryResponse;
use codex_app_server_protocol::InitializeCapabilities;
use codex_app_server_protocol::InitializeParams;
use codex_app_server_protocol::JSONRPCError;
use codex_app_server_protocol::JSONRPCResponse;
use codex_app_server_protocol::RequestId;
use codex_arg0::Arg0DispatchPaths;
@@ -45,6 +46,7 @@ const CREATED_AT_RFC3339: &str = "2025-01-02T12:00:00.000Z";
const UPDATED_AT_RFC3339: &str = "2025-01-02T12:00:00.000Z";
const PREVIEW: &str = "Summarize this conversation";
const MODEL_PROVIDER: &str = "openai";
const INVALID_REQUEST_ERROR_CODE: i64 = -32600;
fn expected_summary(conversation_id: ThreadId, path: PathBuf) -> ConversationSummary {
ConversationSummary {
@@ -112,6 +114,37 @@ async fn get_conversation_summary_by_thread_id_reads_rollout() -> Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn get_conversation_summary_by_rollout_path_rejects_remote_thread_store() -> Result<()> {
let codex_home = TempDir::new()?;
std::fs::write(
codex_home.path().join("config.toml"),
r#"experimental_thread_store_endpoint = "http://127.0.0.1:1"
"#,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_get_conversation_summary_request(GetConversationSummaryParams::RolloutPath {
rollout_path: PathBuf::from("sessions/2025/01/02/rollout.jsonl"),
})
.await?;
let error: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert_eq!(error.error.code, INVALID_REQUEST_ERROR_CODE);
assert_eq!(
error.error.message,
"rollout path queries are only supported with the local thread store"
);
Ok(())
}
#[tokio::test]
async fn get_conversation_summary_by_thread_id_reads_pathless_store_thread() -> Result<()> {
let codex_home = TempDir::new()?;

View File

@@ -219,7 +219,7 @@ async fn plugin_share_save_forwards_access_policy() -> Result<()> {
.and(body_json(json!({
"file_id": "file_123",
"etag": "\"upload_etag_123\"",
"discoverability": "UNLISTED",
"discoverability": "PRIVATE",
"share_targets": [
{
"principal_type": "user",
@@ -227,7 +227,7 @@ async fn plugin_share_save_forwards_access_policy() -> Result<()> {
},
{
"principal_type": "workspace",
"principal_id": "account-123",
"principal_id": "workspace-1",
},
],
})))
@@ -247,12 +247,16 @@ async fn plugin_share_save_forwards_access_policy() -> Result<()> {
"plugin/share/save",
Some(json!({
"pluginPath": expected_plugin_path,
"discoverability": "UNLISTED",
"discoverability": "PRIVATE",
"shareTargets": [
{
"principalType": "user",
"principalId": "user-1",
},
{
"principalType": "workspace",
"principalId": "workspace-1",
},
],
})),
)
@@ -275,124 +279,6 @@ async fn plugin_share_save_forwards_access_policy() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn plugin_share_save_rejects_listed_discoverability() -> Result<()> {
let codex_home = TempDir::new()?;
let plugin_root = TempDir::new()?;
let plugin_path = write_test_plugin(plugin_root.path(), "demo-plugin")?;
let server = MockServer::start().await;
write_remote_plugin_config(codex_home.path(), &format!("{}/backend-api", server.uri()))?;
write_chatgpt_auth(
codex_home.path(),
ChatGptAuthFixture::new("chatgpt-token")
.account_id("account-123")
.chatgpt_user_id("user-123")
.chatgpt_account_id("account-123"),
AuthCredentialsStoreMode::File,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_raw_request(
"plugin/share/save",
Some(json!({
"pluginPath": AbsolutePathBuf::try_from(plugin_path)?,
"discoverability": "LISTED",
})),
)
.await?;
let error: JSONRPCError = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert_eq!(error.error.code, -32600);
assert_eq!(
error.error.message,
"discoverability LISTED is not supported for plugin/share/save; use UNLISTED or PRIVATE"
);
Ok(())
}
#[tokio::test]
async fn plugin_share_rejects_workspace_targets_from_client() -> Result<()> {
let codex_home = TempDir::new()?;
let plugin_root = TempDir::new()?;
let plugin_path = write_test_plugin(plugin_root.path(), "demo-plugin")?;
let server = MockServer::start().await;
write_remote_plugin_config(codex_home.path(), &format!("{}/backend-api", server.uri()))?;
write_chatgpt_auth(
codex_home.path(),
ChatGptAuthFixture::new("chatgpt-token")
.account_id("account-123")
.chatgpt_user_id("user-123")
.chatgpt_account_id("account-123"),
AuthCredentialsStoreMode::File,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_raw_request(
"plugin/share/save",
Some(json!({
"pluginPath": AbsolutePathBuf::try_from(plugin_path)?,
"discoverability": "UNLISTED",
"shareTargets": [
{
"principalType": "workspace",
"principalId": "account-123",
},
],
})),
)
.await?;
let error: JSONRPCError = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert_eq!(error.error.code, -32600);
assert_eq!(
error.error.message,
"shareTargets cannot include workspace principals; use discoverability UNLISTED for workspace link access"
);
let request_id = mcp
.send_raw_request(
"plugin/share/updateTargets",
Some(json!({
"remotePluginId": "plugins_123",
"discoverability": "UNLISTED",
"shareTargets": [
{
"principalType": "workspace",
"principalId": "account-123",
},
],
})),
)
.await?;
let error: JSONRPCError = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert_eq!(error.error.code, -32600);
assert_eq!(
error.error.message,
"shareTargets cannot include workspace principals; use discoverability UNLISTED for workspace link access"
);
Ok(())
}
#[tokio::test]
async fn plugin_share_save_rejects_access_policy_for_existing_plugin() -> Result<()> {
let codex_home = TempDir::new()?;
@@ -437,7 +323,7 @@ async fn plugin_share_save_rejects_access_policy_for_existing_plugin() -> Result
assert_eq!(error.error.code, -32600);
assert_eq!(
error.error.message,
"discoverability and shareTargets are only supported when creating a plugin share; use plugin/share/updateTargets to update share settings"
"discoverability and shareTargets are only supported when creating a plugin share; use plugin/share/updateTargets to update share targets"
);
Ok(())
}
@@ -534,39 +420,24 @@ async fn plugin_share_update_targets_updates_share_targets() -> Result<()> {
)?;
Mock::given(method("PUT"))
.and(path("/backend-api/ps/plugins/plugins_123/shares"))
.and(path("/backend-api/public/plugins/plugins_123/shares"))
.and(header("authorization", "Bearer chatgpt-token"))
.and(header("chatgpt-account-id", "account-123"))
.and(body_json(json!({
"discoverability": "UNLISTED",
"targets": [
{
"principal_type": "user",
"principal_id": "user-1",
},
{
"principal_type": "workspace",
"principal_id": "account-123",
},
],
})))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"principals": [
{
"principal_type": "user",
"principal_id": "owner-1",
"name": "Owner",
},
{
"principal_type": "user",
"principal_id": "user-1",
"name": "Gavin",
},
{
"principal_type": "workspace",
"principal_id": "account-123",
"name": "Workspace",
},
],
})))
.expect(1)
@@ -580,7 +451,6 @@ async fn plugin_share_update_targets_updates_share_targets() -> Result<()> {
"plugin/share/updateTargets",
Some(json!({
"remotePluginId": "plugins_123",
"discoverability": "UNLISTED",
"shareTargets": [
{
"principalType": "user",
@@ -606,7 +476,6 @@ async fn plugin_share_update_targets_updates_share_targets() -> Result<()> {
principal_id: "user-1".to_string(),
name: "Gavin".to_string(),
}],
discoverability: codex_app_server_protocol::PluginShareDiscoverability::Unlisted,
}
);
Ok(())

View File

@@ -3,8 +3,9 @@
//!
//! The app-server startup path should honor `experimental_thread_store`
//! by routing all thread persistence through the configured store. This suite uses
//! the thread-store crate's test-only in-memory store to exercise the non-local
//! config-driven selection path without touching local rollout or sqlite storage.
//! the thread-store crate's test-only in-memory store, which exercises the same
//! config-driven selection path as a remote store without requiring the real gRPC
//! service.
//!
//! The important failure mode is accidentally materializing local persistence
//! while a non-local store is configured. After `thread/start` and a simple turn,

View File

@@ -11,6 +11,7 @@ use codex_app_server_protocol::PluginListParams;
use codex_app_server_protocol::PluginListResponse;
use codex_app_server_protocol::RequestId;
use codex_app_server_protocol::SkillsChangedNotification;
use codex_app_server_protocol::SkillsListExtraRootsForCwd;
use codex_app_server_protocol::SkillsListParams;
use codex_app_server_protocol::SkillsListResponse;
use codex_app_server_protocol::ThreadStartParams;
@@ -132,6 +133,44 @@ fn write_cached_remote_plugin_with_skill(
Ok(skill_path)
}
#[tokio::test]
async fn skills_list_includes_skills_from_per_cwd_extra_user_roots() -> Result<()> {
let codex_home = TempDir::new()?;
let cwd = TempDir::new()?;
let extra_root = TempDir::new()?;
write_skill(&extra_root, "extra-skill")?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: Some(vec![SkillsListExtraRootsForCwd {
cwd: cwd.path().to_path_buf(),
extra_user_roots: vec![extra_root.path().to_path_buf()],
}]),
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let SkillsListResponse { data } = to_response(response)?;
assert_eq!(data.len(), 1);
assert_eq!(data[0].cwd.as_path(), cwd.path());
assert!(
data[0]
.skills
.iter()
.any(|skill| skill.name == "extra-skill")
);
Ok(())
}
#[tokio::test]
async fn skills_list_loads_remote_installed_plugin_skills_from_cache() -> Result<()> {
let codex_home = TempDir::new()?;
@@ -227,6 +266,7 @@ async fn skills_list_loads_remote_installed_plugin_skills_from_cache() -> Result
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: None,
})
.await?;
let stale_skills_list_response: JSONRPCResponse = timeout(
@@ -277,6 +317,7 @@ async fn skills_list_loads_remote_installed_plugin_skills_from_cache() -> Result
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: false,
per_cwd_extra_user_roots: None,
})
.await?;
let skills_list_response: JSONRPCResponse = timeout(
@@ -351,6 +392,7 @@ async fn skills_list_excludes_plugin_skills_when_workspace_codex_plugins_disable
.send_skills_list_request(SkillsListParams {
cwds: vec![repo_root.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: None,
})
.await?;
@@ -382,13 +424,9 @@ async fn skills_list_excludes_plugin_skills_when_workspace_codex_plugins_disable
async fn skills_list_skips_cwd_roots_when_environment_disabled() -> Result<()> {
let codex_home = TempDir::new()?;
let cwd = TempDir::new()?;
let extra_root = TempDir::new()?;
write_skill(&codex_home, "home-skill")?;
let repo_skill_dir = cwd.path().join(".codex/skills/repo-skill");
std::fs::create_dir_all(&repo_skill_dir)?;
std::fs::write(
repo_skill_dir.join("SKILL.md"),
"---\nname: repo-skill\ndescription: from repo root\n---\n\n# Body\n",
)?;
write_skill(&extra_root, "extra-skill")?;
let mut mcp = McpProcess::new_with_env(
codex_home.path(),
@@ -401,6 +439,10 @@ async fn skills_list_skips_cwd_roots_when_environment_disabled() -> Result<()> {
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: Some(vec![SkillsListExtraRootsForCwd {
cwd: cwd.path().to_path_buf(),
extra_user_roots: vec![extra_root.path().to_path_buf()],
}]),
})
.await?;
@@ -423,7 +465,41 @@ async fn skills_list_skips_cwd_roots_when_environment_disabled() -> Result<()> {
data[0]
.skills
.iter()
.all(|skill| skill.name != "repo-skill")
.all(|skill| skill.name != "extra-skill")
);
Ok(())
}
#[tokio::test]
async fn skills_list_rejects_relative_extra_user_roots() -> Result<()> {
let codex_home = TempDir::new()?;
let cwd = TempDir::new()?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: Some(vec![SkillsListExtraRootsForCwd {
cwd: cwd.path().to_path_buf(),
extra_user_roots: vec![std::path::PathBuf::from("relative/skills")],
}]),
})
.await?;
let err = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
)
.await??;
assert!(
err.error
.message
.contains("perCwdExtraUserRoots extraUserRoots paths must be absolute"),
"unexpected error: {}",
err.error.message
);
Ok(())
}
@@ -441,6 +517,7 @@ async fn skills_list_accepts_relative_cwds() -> Result<()> {
.send_skills_list_request(SkillsListParams {
cwds: vec![relative_cwd.clone()],
force_reload: true,
per_cwd_extra_user_roots: None,
})
.await?;
@@ -472,6 +549,7 @@ async fn skills_list_preserves_requested_cwd_order() -> Result<()> {
second_cwd.path().to_path_buf(),
],
force_reload: true,
per_cwd_extra_user_roots: None,
})
.await?;
@@ -494,18 +572,60 @@ async fn skills_list_preserves_requested_cwd_order() -> Result<()> {
}
#[tokio::test]
async fn skills_list_uses_cached_result_until_force_reload() -> Result<()> {
async fn skills_list_ignores_per_cwd_extra_roots_for_unknown_cwd() -> Result<()> {
let codex_home = TempDir::new()?;
let cwd = TempDir::new()?;
let requested_cwd = TempDir::new()?;
let unknown_cwd = TempDir::new()?;
let extra_root = TempDir::new()?;
write_skill(&extra_root, "ignored-extra-skill")?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
// Seed the cwd cache before the cwd-local skill exists.
let request_id = mcp
.send_skills_list_request(SkillsListParams {
cwds: vec![requested_cwd.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: Some(vec![SkillsListExtraRootsForCwd {
cwd: unknown_cwd.path().to_path_buf(),
extra_user_roots: vec![extra_root.path().to_path_buf()],
}]),
})
.await?;
let response: JSONRPCResponse = timeout(
DEFAULT_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let SkillsListResponse { data } = to_response(response)?;
assert_eq!(data.len(), 1);
assert_eq!(data[0].cwd.as_path(), requested_cwd.path());
assert!(
data[0]
.skills
.iter()
.all(|skill| skill.name != "ignored-extra-skill")
);
Ok(())
}
#[tokio::test]
async fn skills_list_uses_cached_result_until_force_reload() -> Result<()> {
let codex_home = TempDir::new()?;
let cwd = TempDir::new()?;
let extra_root = TempDir::new()?;
write_skill(&extra_root, "late-extra-skill")?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
// Seed the cwd cache first without extra roots.
let first_request_id = mcp
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: false,
per_cwd_extra_user_roots: None,
})
.await?;
let first_response: JSONRPCResponse = timeout(
@@ -522,17 +642,14 @@ async fn skills_list_uses_cached_result_until_force_reload() -> Result<()> {
.all(|skill| skill.name != "late-extra-skill")
);
let skill_dir = cwd.path().join(".codex/skills/late-extra-skill");
std::fs::create_dir_all(&skill_dir)?;
std::fs::write(
skill_dir.join("SKILL.md"),
"---\nname: late-extra-skill\ndescription: late skill\n---\n\n# Body\n",
)?;
let second_request_id = mcp
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: false,
per_cwd_extra_user_roots: Some(vec![SkillsListExtraRootsForCwd {
cwd: cwd.path().to_path_buf(),
extra_user_roots: vec![extra_root.path().to_path_buf()],
}]),
})
.await?;
let second_response: JSONRPCResponse = timeout(
@@ -553,6 +670,10 @@ async fn skills_list_uses_cached_result_until_force_reload() -> Result<()> {
.send_skills_list_request(SkillsListParams {
cwds: vec![cwd.path().to_path_buf()],
force_reload: true,
per_cwd_extra_user_roots: Some(vec![SkillsListExtraRootsForCwd {
cwd: cwd.path().to_path_buf(),
extra_user_roots: vec![extra_root.path().to_path_buf()],
}]),
})
.await?;
let third_response: JSONRPCResponse = timeout(

View File

@@ -33,6 +33,7 @@ use pretty_assertions::assert_eq;
use serde_json::Value;
use serde_json::json;
use std::path::Path;
use std::path::PathBuf;
use tempfile::TempDir;
use tokio::time::timeout;
use wiremock::Mock;
@@ -50,6 +51,7 @@ use super::analytics::wait_for_analytics_payload;
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(25);
#[cfg(not(windows))]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
const INTERNAL_ERROR_CODE: i64 = -32603;
#[tokio::test]
async fn thread_fork_creates_new_thread_and_emits_started() -> Result<()> {
@@ -267,6 +269,37 @@ async fn thread_fork_can_load_source_by_path() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn thread_fork_by_path_uses_remote_thread_store_error() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_with_remote_thread_store(codex_home.path(), &server.uri())?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let fork_id = mcp
.send_thread_fork_request(ThreadForkParams {
thread_id: "not-a-valid-thread-id".to_string(),
path: Some(PathBuf::from("sessions/2025/01/05/rollout.jsonl")),
..Default::default()
})
.await?;
let fork_err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(fork_id)),
)
.await??;
assert_eq!(fork_err.error.code, INTERNAL_ERROR_CODE);
assert_eq!(
fork_err.error.message,
"failed to read thread: thread-store internal error: remote thread store does not support read_thread_by_rollout_path"
);
Ok(())
}
#[tokio::test]
async fn thread_fork_emits_restored_token_usage_before_next_turn() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
@@ -746,6 +779,33 @@ stream_max_retries = 0
)
}
fn create_config_toml_with_remote_thread_store(
codex_home: &Path,
server_uri: &str,
) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
std::fs::write(
config_toml,
format!(
r#"
model = "mock-model"
approval_policy = "never"
sandbox_mode = "read-only"
experimental_thread_store_endpoint = "http://127.0.0.1:1"
model_provider = "mock_provider"
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"
wire_api = "responses"
request_max_retries = 0
stream_max_retries = 0
"#
),
)
}
fn create_config_toml_with_chatgpt_base_url(
codex_home: &Path,
server_uri: &str,

View File

@@ -31,7 +31,6 @@ use codex_app_server_protocol::ThreadSetNameResponse;
use codex_app_server_protocol::ThreadStartParams;
use codex_app_server_protocol::ThreadStartResponse;
use codex_app_server_protocol::ThreadStatus;
use codex_app_server_protocol::ThreadTurnsItemsListParams;
use codex_app_server_protocol::ThreadTurnsListParams;
use codex_app_server_protocol::ThreadTurnsListResponse;
use codex_app_server_protocol::TurnItemsView;
@@ -47,7 +46,6 @@ use codex_core::config::ConfigBuilder;
use codex_exec_server::EnvironmentManager;
use codex_feedback::CodexFeedback;
use codex_protocol::models::BaseInstructions;
use codex_protocol::protocol::AgentMessageEvent;
use codex_protocol::protocol::EventMsg;
use codex_protocol::protocol::RolloutItem;
use codex_protocol::protocol::SessionSource as ProtocolSessionSource;
@@ -225,7 +223,6 @@ async fn thread_turns_list_can_page_backward_and_forward() -> Result<()> {
cursor: None,
limit: Some(2),
sort_direction: Some(SortDirection::Desc),
items_view: None,
})
.await?;
let read_resp: JSONRPCResponse = timeout(
@@ -241,7 +238,7 @@ async fn thread_turns_list_can_page_backward_and_forward() -> Result<()> {
assert_eq!(turn_user_texts(&data), vec!["third", "second"]);
assert!(
data.iter()
.all(|turn| turn.items_view == TurnItemsView::Summary)
.all(|turn| turn.items_view == TurnItemsView::Full)
);
let next_cursor = next_cursor.expect("expected nextCursor for older turns");
let backwards_cursor = backwards_cursor.expect("expected backwardsCursor for newest turn");
@@ -252,7 +249,6 @@ async fn thread_turns_list_can_page_backward_and_forward() -> Result<()> {
cursor: Some(next_cursor),
limit: Some(10),
sort_direction: Some(SortDirection::Desc),
items_view: None,
})
.await?;
let read_resp: JSONRPCResponse = timeout(
@@ -271,7 +267,6 @@ async fn thread_turns_list_can_page_backward_and_forward() -> Result<()> {
cursor: Some(backwards_cursor),
limit: Some(10),
sort_direction: Some(SortDirection::Asc),
items_view: None,
})
.await?;
let read_resp: JSONRPCResponse = timeout(
@@ -285,74 +280,6 @@ async fn thread_turns_list_can_page_backward_and_forward() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn thread_turns_list_supports_requested_items_view() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml(codex_home.path(), &server.uri())?;
let filename_ts = "2025-01-05T12-00-00";
let conversation_id = create_fake_rollout_with_text_elements(
codex_home.path(),
filename_ts,
"2025-01-05T12:00:00Z",
"first",
vec![],
Some("mock_provider"),
/*git_info*/ None,
)?;
let rollout_path = rollout_path(codex_home.path(), filename_ts, &conversation_id);
append_agent_message(rollout_path.as_path(), "2025-01-05T12:01:00Z", "draft")?;
append_agent_message(rollout_path.as_path(), "2025-01-05T12:02:00Z", "final")?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let full = read_single_turn_items_view(
&mut mcp,
conversation_id.as_str(),
Some(TurnItemsView::Full),
)
.await?;
assert_eq!(full.items_view, TurnItemsView::Full);
assert_eq!(
turn_agent_texts(std::slice::from_ref(&full)),
vec!["draft", "final"]
);
let summary = read_single_turn_items_view(
&mut mcp,
conversation_id.as_str(),
Some(TurnItemsView::Summary),
)
.await?;
assert_eq!(summary.items_view, TurnItemsView::Summary);
assert_eq!(
turn_user_texts(std::slice::from_ref(&summary)),
vec!["first"]
);
assert_eq!(
turn_agent_texts(std::slice::from_ref(&summary)),
vec!["final"]
);
let not_loaded = read_single_turn_items_view(
&mut mcp,
conversation_id.as_str(),
Some(TurnItemsView::NotLoaded),
)
.await?;
assert_eq!(not_loaded.items_view, TurnItemsView::NotLoaded);
assert!(not_loaded.items.is_empty());
assert_eq!(not_loaded.id, full.id);
assert_eq!(not_loaded.status, full.status);
assert_eq!(not_loaded.started_at, full.started_at);
assert_eq!(not_loaded.completed_at, full.completed_at);
assert_eq!(not_loaded.duration_ms, full.duration_ms);
Ok(())
}
#[tokio::test]
async fn thread_turns_list_reads_store_history_without_rollout_path() -> Result<()> {
let codex_home = TempDir::new()?;
@@ -407,7 +334,6 @@ async fn thread_turns_list_reads_store_history_without_rollout_path() -> Result<
cursor: None,
limit: Some(10),
sort_direction: Some(SortDirection::Asc),
items_view: None,
},
})
.await?
@@ -657,7 +583,6 @@ async fn thread_turns_list_rejects_cursor_when_anchor_turn_is_rolled_back() -> R
cursor: None,
limit: Some(2),
sort_direction: Some(SortDirection::Desc),
items_view: None,
})
.await?;
let read_resp: JSONRPCResponse = timeout(
@@ -682,7 +607,6 @@ async fn thread_turns_list_rejects_cursor_when_anchor_turn_is_rolled_back() -> R
cursor: Some(backwards_cursor),
limit: Some(10),
sort_direction: Some(SortDirection::Asc),
items_view: None,
})
.await?;
let read_err: JSONRPCError = timeout(
@@ -1039,7 +963,6 @@ async fn thread_turns_list_rejects_unmaterialized_loaded_thread() -> Result<()>
cursor: None,
limit: None,
sort_direction: None,
items_view: None,
})
.await?;
let read_err: JSONRPCError = timeout(
@@ -1060,39 +983,6 @@ async fn thread_turns_list_rejects_unmaterialized_loaded_thread() -> Result<()>
Ok(())
}
#[tokio::test]
async fn thread_turns_items_list_returns_unsupported() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml(codex_home.path(), &server.uri())?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let read_id = mcp
.send_thread_turns_items_list_request(ThreadTurnsItemsListParams {
thread_id: "thr_123".to_string(),
turn_id: "turn_456".to_string(),
cursor: None,
limit: None,
sort_direction: None,
})
.await?;
let read_err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(read_id)),
)
.await??;
assert_eq!(read_err.error.code, -32601);
assert_eq!(
read_err.error.message,
"thread/turns/items/list is not supported yet"
);
Ok(())
}
#[tokio::test]
async fn thread_read_reports_system_error_idle_flag_after_failed_turn() -> Result<()> {
let server = responses::start_mock_server().await;
@@ -1178,24 +1068,6 @@ fn append_user_message(path: &Path, timestamp: &str, text: &str) -> std::io::Res
)
}
fn append_agent_message(path: &Path, timestamp: &str, text: &str) -> anyhow::Result<()> {
let mut file = std::fs::OpenOptions::new().append(true).open(path)?;
writeln!(
file,
"{}",
json!({
"timestamp": timestamp,
"type": "event_msg",
"payload": serde_json::to_value(EventMsg::AgentMessage(AgentMessageEvent {
message: text.to_string(),
phase: None,
memory_citation: None,
}))?,
})
)?;
Ok(())
}
fn append_thread_rollback(path: &Path, timestamp: &str, num_turns: u32) -> std::io::Result<()> {
let mut file = std::fs::OpenOptions::new().append(true).open(path)?;
writeln!(
@@ -1212,31 +1084,6 @@ fn append_thread_rollback(path: &Path, timestamp: &str, num_turns: u32) -> std::
)
}
async fn read_single_turn_items_view(
mcp: &mut McpProcess,
thread_id: &str,
items_view: Option<TurnItemsView>,
) -> anyhow::Result<codex_app_server_protocol::Turn> {
let read_id = mcp
.send_thread_turns_list_request(ThreadTurnsListParams {
thread_id: thread_id.to_string(),
cursor: None,
limit: Some(10),
sort_direction: Some(SortDirection::Asc),
items_view,
})
.await?;
let read_resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(read_id)),
)
.await??;
let ThreadTurnsListResponse { mut data, .. } =
to_response::<ThreadTurnsListResponse>(read_resp)?;
assert_eq!(data.len(), 1);
Ok(data.remove(0))
}
fn turn_user_texts(turns: &[codex_app_server_protocol::Turn]) -> Vec<&str> {
turns
.iter()
@@ -1253,17 +1100,6 @@ fn turn_user_texts(turns: &[codex_app_server_protocol::Turn]) -> Vec<&str> {
.collect()
}
fn turn_agent_texts(turns: &[codex_app_server_protocol::Turn]) -> Vec<&str> {
turns
.iter()
.flat_map(|turn| &turn.items)
.filter_map(|item| match item {
ThreadItem::AgentMessage { text, .. } => Some(text.as_str()),
_ => None,
})
.collect()
}
struct InMemoryThreadStoreId {
store_id: String,
}

View File

@@ -95,6 +95,7 @@ use super::analytics::wait_for_analytics_payload;
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(25);
#[cfg(not(windows))]
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
const INTERNAL_ERROR_CODE: i64 = -32603;
const CODEX_5_2_INSTRUCTIONS_TEMPLATE_DEFAULT: &str = "You are Codex, a coding agent based on GPT-5. You and the user share the same workspace and collaborate to achieve the user's goals.";
fn normalized_existing_path(path: impl AsRef<Path>) -> Result<PathBuf> {
@@ -744,6 +745,37 @@ async fn thread_goal_clear_deletes_goal_and_notifies() -> Result<()> {
Ok(())
}
#[tokio::test]
async fn thread_resume_by_path_uses_remote_thread_store_error() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
let codex_home = TempDir::new()?;
create_config_toml_with_remote_thread_store(codex_home.path(), &server.uri())?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let resume_id = mcp
.send_thread_resume_request(ThreadResumeParams {
thread_id: "ignored-when-path-is-present".to_string(),
path: Some(PathBuf::from("sessions/2025/01/05/rollout.jsonl")),
..Default::default()
})
.await?;
let resume_err: JSONRPCError = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_error_message(RequestId::Integer(resume_id)),
)
.await??;
assert_eq!(resume_err.error.code, INTERNAL_ERROR_CODE);
assert_eq!(
resume_err.error.message,
"failed to read thread: thread-store internal error: remote thread store does not support read_thread_by_rollout_path"
);
Ok(())
}
#[tokio::test]
async fn thread_resume_emits_restored_token_usage_before_next_turn() -> Result<()> {
let server = create_mock_responses_server_repeating_assistant("Done").await;
@@ -2896,6 +2928,36 @@ stream_max_retries = 0
)
}
fn create_config_toml_with_remote_thread_store(
codex_home: &std::path::Path,
server_uri: &str,
) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
std::fs::write(
config_toml,
format!(
r#"
model = "gpt-5.3-codex"
approval_policy = "never"
sandbox_mode = "read-only"
experimental_thread_store_endpoint = "http://127.0.0.1:1"
model_provider = "mock_provider"
[features]
personality = true
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"
wire_api = "responses"
request_max_retries = 0
stream_max_retries = 0
"#
),
)
}
fn create_config_toml_with_chatgpt_base_url(
codex_home: &std::path::Path,
server_uri: &str,

View File

@@ -151,7 +151,6 @@ async fn thread_shell_command_history_responses_exclude_persisted_command_execut
cursor: None,
limit: None,
sort_direction: Some(SortDirection::Asc),
items_view: None,
})
.await?;
let turns_list_resp: JSONRPCResponse = timeout(

View File

@@ -552,7 +552,7 @@ async fn thread_start_emits_mcp_server_status_updated_notifications() -> Result<
.send_thread_start_request(ThreadStartParams::default())
.await?;
let _: ThreadStartResponse = to_response(
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(
timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(req_id)),
@@ -589,6 +589,7 @@ async fn thread_start_emits_mcp_server_status_updated_notifications() -> Result<
assert_eq!(
starting,
McpServerStatusUpdatedNotification {
thread_id: Some(thread.id.clone()),
name: "optional_broken".to_string(),
status: McpServerStartupState::Starting,
error: None,
@@ -621,6 +622,7 @@ async fn thread_start_emits_mcp_server_status_updated_notifications() -> Result<
let ServerNotification::McpServerStatusUpdated(failed) = failed else {
anyhow::bail!("unexpected notification variant");
};
assert_eq!(failed.thread_id.as_deref(), Some(thread.id.as_str()));
assert_eq!(failed.name, "optional_broken");
assert_eq!(failed.status, McpServerStartupState::Failed);
assert!(

View File

@@ -7,7 +7,6 @@ license.workspace = true
[lib]
name = "codex_apply_patch"
path = "src/lib.rs"
doctest = false
[[bin]]
name = "apply_patch"

View File

@@ -7,7 +7,6 @@ license.workspace = true
[lib]
name = "codex_arg0"
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

View File

@@ -14,6 +14,3 @@ tokio-util.workspace = true
[dev-dependencies]
pretty_assertions.workspace = true
[lib]
doctest = false

View File

@@ -13,7 +13,7 @@ path = "src/lib.rs"
workspace = true
[dependencies]
aws-config = { workspace = true, features = ["credentials-login"] }
aws-config = { workspace = true }
aws-credential-types = { workspace = true }
aws-sigv4 = { workspace = true }
aws-types = { workspace = true }

View File

@@ -7,7 +7,6 @@ publish = false
[lib]
path = "src/lib.rs"
doctest = false
[lints]
workspace = true

Some files were not shown because too many files have changed in this diff Show More