mirror of
https://github.com/openai/codex.git
synced 2026-02-18 06:43:47 +00:00
Compare commits
111 Commits
remove_pre
...
codex/remo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa28fcfb5c | ||
|
|
cab607befb | ||
|
|
281b0eae8b | ||
|
|
4ab44e2c5c | ||
|
|
31d4bfdde0 | ||
|
|
56cd85cd4b | ||
|
|
5ae84197b2 | ||
|
|
fcf16e97a6 | ||
|
|
77f74a5c17 | ||
|
|
b994b52994 | ||
|
|
846464e869 | ||
|
|
0fbe10a807 | ||
|
|
02e9006547 | ||
|
|
08f689843f | ||
|
|
b37555dd75 | ||
|
|
19afbc35c1 | ||
|
|
5b421bba34 | ||
|
|
beb5cb4f48 | ||
|
|
af434b4f71 | ||
|
|
cef7fbc494 | ||
|
|
e47045c806 | ||
|
|
50aea4b0dc | ||
|
|
e41536944e | ||
|
|
b3095679ed | ||
|
|
825a4af42f | ||
|
|
1d95656149 | ||
|
|
bdea9974d9 | ||
|
|
02abd9a8ea | ||
|
|
060a320e7d | ||
|
|
85034b189e | ||
|
|
fce4ad9cf4 | ||
|
|
db6aa80195 | ||
|
|
ebceb71db6 | ||
|
|
3164670101 | ||
|
|
b527ee2890 | ||
|
|
854e91e422 | ||
|
|
67e577da53 | ||
|
|
8156c57234 | ||
|
|
de93cef5b7 | ||
|
|
5b6911cb1b | ||
|
|
0d76d029b7 | ||
|
|
6cbb489e6e | ||
|
|
067f8b1be0 | ||
|
|
6b466df146 | ||
|
|
fb0aaf94de | ||
|
|
e4f8263798 | ||
|
|
a5e8e69d18 | ||
|
|
26a7cd21e2 | ||
|
|
395729910c | ||
|
|
6c0a924203 | ||
|
|
a4bb59884b | ||
|
|
ffef5ce5de | ||
|
|
e71760fc64 | ||
|
|
a02342c9e1 | ||
|
|
8468871e2b | ||
|
|
c54a4ec078 | ||
|
|
b934ffcaaa | ||
|
|
b98c810328 | ||
|
|
32da5eb358 | ||
|
|
851fcc377b | ||
|
|
12f69b893f | ||
|
|
99466f1f90 | ||
|
|
2383978a2c | ||
|
|
f687b074ca | ||
|
|
38c442ca7f | ||
|
|
c0749c349f | ||
|
|
561fc14045 | ||
|
|
db66d827be | ||
|
|
bc80a4a8ed | ||
|
|
e00080cea3 | ||
|
|
36541876f4 | ||
|
|
feae389942 | ||
|
|
e5e40e2d4b | ||
|
|
e6eb6be683 | ||
|
|
2bced810da | ||
|
|
fca5629e34 | ||
|
|
e6e4c5fa3a | ||
|
|
1e75173ebd | ||
|
|
abeafbdca1 | ||
|
|
f93037f55d | ||
|
|
35692e99c1 | ||
|
|
ebe359b876 | ||
|
|
537102e657 | ||
|
|
9cf7a07281 | ||
|
|
fc073c9c5b | ||
|
|
f24669d444 | ||
|
|
46b2da35d5 | ||
|
|
c37560069a | ||
|
|
8d97b5c246 | ||
|
|
2825ac85a8 | ||
|
|
aef4af1079 | ||
|
|
0dcfc59171 | ||
|
|
a7ce2a1c31 | ||
|
|
dfd1e199a0 | ||
|
|
76256a8cec | ||
|
|
75e79cf09a | ||
|
|
a4cc1a4a85 | ||
|
|
d7cb70ed26 | ||
|
|
4668feb43a | ||
|
|
5c3ca73914 | ||
|
|
466be55abc | ||
|
|
efc8d45750 | ||
|
|
22fa283511 | ||
|
|
66e0c3aaa3 | ||
|
|
545b266839 | ||
|
|
b3674dcce0 | ||
|
|
88c5ca2573 | ||
|
|
82acd815e4 | ||
|
|
f39f506700 | ||
|
|
f741fad5c0 | ||
|
|
ba6f7a9e15 |
@@ -1,3 +1,5 @@
|
||||
iTerm
|
||||
iTerm2
|
||||
psuedo
|
||||
psuedo
|
||||
te
|
||||
TE
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt,*.snap,*.snap.new,*meriyah.umd.min.js
|
||||
check-hidden = true
|
||||
ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm
|
||||
ignore-words-list = ratatui,ser,iTerm,iterm2,iterm,te,TE
|
||||
|
||||
7
.github/ISSUE_TEMPLATE/1-codex-app.yml
vendored
7
.github/ISSUE_TEMPLATE/1-codex-app.yml
vendored
@@ -21,6 +21,13 @@ body:
|
||||
label: What subscription do you have?
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: platform
|
||||
attributes:
|
||||
label: What platform is your computer?
|
||||
description: |
|
||||
For macOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
|
||||
18
.github/prompts/issue-deduplicator.txt
vendored
18
.github/prompts/issue-deduplicator.txt
vendored
@@ -1,18 +0,0 @@
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
|
||||
Instructions:
|
||||
- Load both files as JSON and review their contents carefully. The codex-existing-issues.json file is large, ensure you explore all of it.
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Only consider an issue a potential duplicate if there is a clear overlap in symptoms, feature requests, reproduction steps, or error messages.
|
||||
- Prioritize newer issues when similarity is comparable.
|
||||
- Ignore pull requests and issues whose similarity is tenuous.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
|
||||
Output requirements:
|
||||
- Respond with a JSON array of issue numbers (integers), ordered from most likely duplicate to least.
|
||||
- Include at most five numbers.
|
||||
- If you find no plausible duplicates, respond with `[]`.
|
||||
5
.github/workflows/bazel.yml
vendored
5
.github/workflows/bazel.yml
vendored
@@ -65,6 +65,11 @@ jobs:
|
||||
- name: Set up Bazel
|
||||
uses: bazelbuild/setup-bazelisk@v3
|
||||
|
||||
- name: Check MODULE.bazel.lock is up to date
|
||||
if: matrix.os == 'ubuntu-24.04' && matrix.target == 'x86_64-unknown-linux-gnu'
|
||||
shell: bash
|
||||
run: ./scripts/check-module-bazel-lock.sh
|
||||
|
||||
# TODO(mbolin): Bring this back once we have caching working. Currently,
|
||||
# we never seem to get a cache hit but we still end up paying the cost of
|
||||
# uploading at the end of the build, which takes over a minute!
|
||||
|
||||
248
.github/workflows/issue-deduplicator.yml
vendored
248
.github/workflows/issue-deduplicator.yml
vendored
@@ -15,34 +15,68 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
codex_output: ${{ steps.codex.outputs.final-message }}
|
||||
codex_output: ${{ steps.select-final.outputs.codex_output }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Prepare Codex inputs
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPO: ${{ github.repository }}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
|
||||
CURRENT_ISSUE_FILE=codex-current-issue.json
|
||||
EXISTING_ISSUES_FILE=codex-existing-issues.json
|
||||
EXISTING_ALL_FILE=codex-existing-issues-all.json
|
||||
EXISTING_OPEN_FILE=codex-existing-issues-open.json
|
||||
|
||||
gh issue list --repo "${{ github.repository }}" \
|
||||
--json number,title,body,createdAt \
|
||||
gh issue list --repo "$REPO" \
|
||||
--json number,title,body,createdAt,updatedAt,state,labels \
|
||||
--limit 1000 \
|
||||
--state all \
|
||||
--search "sort:created-desc" \
|
||||
| jq '.' \
|
||||
> "$EXISTING_ISSUES_FILE"
|
||||
| jq '[.[] | {
|
||||
number,
|
||||
title,
|
||||
body: ((.body // "")[0:4000]),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
state,
|
||||
labels: ((.labels // []) | map(.name))
|
||||
}]' \
|
||||
> "$EXISTING_ALL_FILE"
|
||||
|
||||
gh issue view "${{ github.event.issue.number }}" \
|
||||
--repo "${{ github.repository }}" \
|
||||
gh issue list --repo "$REPO" \
|
||||
--json number,title,body,createdAt,updatedAt,state,labels \
|
||||
--limit 1000 \
|
||||
--state open \
|
||||
--search "sort:created-desc" \
|
||||
| jq '[.[] | {
|
||||
number,
|
||||
title,
|
||||
body: ((.body // "")[0:4000]),
|
||||
createdAt,
|
||||
updatedAt,
|
||||
state,
|
||||
labels: ((.labels // []) | map(.name))
|
||||
}]' \
|
||||
> "$EXISTING_OPEN_FILE"
|
||||
|
||||
gh issue view "$ISSUE_NUMBER" \
|
||||
--repo "$REPO" \
|
||||
--json number,title,body \
|
||||
| jq '.' \
|
||||
| jq '{number, title, body: ((.body // "")[0:4000])}' \
|
||||
> "$CURRENT_ISSUE_FILE"
|
||||
|
||||
- id: codex
|
||||
echo "Prepared duplicate detection input files."
|
||||
echo "all_issue_count=$(jq 'length' "$EXISTING_ALL_FILE")"
|
||||
echo "open_issue_count=$(jq 'length' "$EXISTING_OPEN_FILE")"
|
||||
|
||||
# Prompt instructions are intentionally inline in this workflow. The old
|
||||
# .github/prompts/issue-deduplicator.txt file is obsolete and removed.
|
||||
- id: codex-all
|
||||
name: Find duplicates (pass 1, all issues)
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
@@ -52,14 +86,17 @@ jobs:
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt).
|
||||
- `codex-existing-issues-all.json`: JSON array of recent issues with states, timestamps, and labels.
|
||||
|
||||
Instructions:
|
||||
- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request.
|
||||
- Focus on the underlying intent and context of each issue—such as reported symptoms, feature requests, reproduction steps, or error messages—rather than relying solely on string similarity or synthetic metrics.
|
||||
- After your analysis, validate your results in 1-2 lines explaining your decision to return the selected matches.
|
||||
- When unsure, prefer returning fewer matches.
|
||||
- Include at most five numbers.
|
||||
- Prioritize concrete overlap in symptoms, reproduction details, error signatures, and user intent.
|
||||
- Prefer active unresolved issues when confidence is similar.
|
||||
- Closed issues can still be valid duplicates if they clearly match.
|
||||
- Return fewer matches rather than speculative ones.
|
||||
- If confidence is low, return an empty list.
|
||||
- Include at most five issue numbers.
|
||||
- After analysis, provide a short reason for your decision.
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
@@ -77,6 +114,179 @@ jobs:
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
- id: normalize-all
|
||||
name: Normalize pass 1 output
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ steps.codex-all.outputs.final-message }}
|
||||
CURRENT_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
|
||||
raw=${CODEX_OUTPUT//$'\r'/}
|
||||
parsed=false
|
||||
issues='[]'
|
||||
reason=''
|
||||
|
||||
if [ -n "$raw" ] && printf '%s' "$raw" | jq -e 'type == "object" and (.issues | type == "array")' >/dev/null 2>&1; then
|
||||
parsed=true
|
||||
issues=$(printf '%s' "$raw" | jq -c '[.issues[] | tostring]')
|
||||
reason=$(printf '%s' "$raw" | jq -r '.reason // ""')
|
||||
else
|
||||
reason='Pass 1 output was empty or invalid JSON.'
|
||||
fi
|
||||
|
||||
filtered=$(jq -cn --argjson issues "$issues" --arg current "$CURRENT_ISSUE_NUMBER" '[
|
||||
$issues[]
|
||||
| tostring
|
||||
| select(. != $current)
|
||||
] | reduce .[] as $issue ([]; if index($issue) then . else . + [$issue] end) | .[:5]')
|
||||
|
||||
has_matches=false
|
||||
if [ "$(jq 'length' <<< "$filtered")" -gt 0 ]; then
|
||||
has_matches=true
|
||||
fi
|
||||
|
||||
echo "Pass 1 parsed: $parsed"
|
||||
echo "Pass 1 matches after filtering: $(jq 'length' <<< "$filtered")"
|
||||
echo "Pass 1 reason: $reason"
|
||||
|
||||
{
|
||||
echo "issues_json=$filtered"
|
||||
echo "reason<<EOF"
|
||||
echo "$reason"
|
||||
echo "EOF"
|
||||
echo "has_matches=$has_matches"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- id: codex-open
|
||||
name: Find duplicates (pass 2, open issues)
|
||||
if: ${{ steps.normalize-all.outputs.has_matches != 'true' }}
|
||||
uses: openai/codex-action@main
|
||||
with:
|
||||
openai-api-key: ${{ secrets.CODEX_OPENAI_API_KEY }}
|
||||
allow-users: "*"
|
||||
prompt: |
|
||||
You are an assistant that triages new GitHub issues by identifying potential duplicates.
|
||||
|
||||
This is a fallback pass because a broad search did not find convincing matches.
|
||||
|
||||
You will receive the following JSON files located in the current working directory:
|
||||
- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body).
|
||||
- `codex-existing-issues-open.json`: JSON array of open issues only.
|
||||
|
||||
Instructions:
|
||||
- Search only these active unresolved issues for duplicates of the current issue.
|
||||
- Prioritize concrete overlap in symptoms, reproduction details, error signatures, and user intent.
|
||||
- Prefer fewer, higher-confidence matches.
|
||||
- If confidence is low, return an empty list.
|
||||
- Include at most five issue numbers.
|
||||
- After analysis, provide a short reason for your decision.
|
||||
|
||||
output-schema: |
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"issues": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"reason": { "type": "string" }
|
||||
},
|
||||
"required": ["issues", "reason"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
|
||||
- id: normalize-open
|
||||
name: Normalize pass 2 output
|
||||
if: ${{ steps.normalize-all.outputs.has_matches != 'true' }}
|
||||
env:
|
||||
CODEX_OUTPUT: ${{ steps.codex-open.outputs.final-message }}
|
||||
CURRENT_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
|
||||
raw=${CODEX_OUTPUT//$'\r'/}
|
||||
parsed=false
|
||||
issues='[]'
|
||||
reason=''
|
||||
|
||||
if [ -n "$raw" ] && printf '%s' "$raw" | jq -e 'type == "object" and (.issues | type == "array")' >/dev/null 2>&1; then
|
||||
parsed=true
|
||||
issues=$(printf '%s' "$raw" | jq -c '[.issues[] | tostring]')
|
||||
reason=$(printf '%s' "$raw" | jq -r '.reason // ""')
|
||||
else
|
||||
reason='Pass 2 output was empty or invalid JSON.'
|
||||
fi
|
||||
|
||||
filtered=$(jq -cn --argjson issues "$issues" --arg current "$CURRENT_ISSUE_NUMBER" '[
|
||||
$issues[]
|
||||
| tostring
|
||||
| select(. != $current)
|
||||
] | reduce .[] as $issue ([]; if index($issue) then . else . + [$issue] end) | .[:5]')
|
||||
|
||||
has_matches=false
|
||||
if [ "$(jq 'length' <<< "$filtered")" -gt 0 ]; then
|
||||
has_matches=true
|
||||
fi
|
||||
|
||||
echo "Pass 2 parsed: $parsed"
|
||||
echo "Pass 2 matches after filtering: $(jq 'length' <<< "$filtered")"
|
||||
echo "Pass 2 reason: $reason"
|
||||
|
||||
{
|
||||
echo "issues_json=$filtered"
|
||||
echo "reason<<EOF"
|
||||
echo "$reason"
|
||||
echo "EOF"
|
||||
echo "has_matches=$has_matches"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- id: select-final
|
||||
name: Select final duplicate set
|
||||
env:
|
||||
PASS1_ISSUES: ${{ steps.normalize-all.outputs.issues_json }}
|
||||
PASS1_REASON: ${{ steps.normalize-all.outputs.reason }}
|
||||
PASS2_ISSUES: ${{ steps.normalize-open.outputs.issues_json }}
|
||||
PASS2_REASON: ${{ steps.normalize-open.outputs.reason }}
|
||||
PASS1_HAS_MATCHES: ${{ steps.normalize-all.outputs.has_matches }}
|
||||
PASS2_HAS_MATCHES: ${{ steps.normalize-open.outputs.has_matches }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
|
||||
selected_issues='[]'
|
||||
selected_reason='No plausible duplicates found.'
|
||||
selected_pass='none'
|
||||
|
||||
if [ "$PASS1_HAS_MATCHES" = "true" ]; then
|
||||
selected_issues=${PASS1_ISSUES:-'[]'}
|
||||
selected_reason=${PASS1_REASON:-'Pass 1 found duplicates.'}
|
||||
selected_pass='all'
|
||||
fi
|
||||
|
||||
if [ "$PASS2_HAS_MATCHES" = "true" ]; then
|
||||
selected_issues=${PASS2_ISSUES:-'[]'}
|
||||
selected_reason=${PASS2_REASON:-'Pass 2 found duplicates.'}
|
||||
selected_pass='open-fallback'
|
||||
fi
|
||||
|
||||
final_json=$(jq -cn \
|
||||
--argjson issues "$selected_issues" \
|
||||
--arg reason "$selected_reason" \
|
||||
--arg pass "$selected_pass" \
|
||||
'{issues: $issues, reason: $reason, pass: $pass}')
|
||||
|
||||
echo "Final pass used: $selected_pass"
|
||||
echo "Final duplicate count: $(jq '.issues | length' <<< "$final_json")"
|
||||
echo "Final reason: $(jq -r '.reason' <<< "$final_json")"
|
||||
|
||||
{
|
||||
echo "codex_output<<EOF"
|
||||
echo "$final_json"
|
||||
echo "EOF"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
comment-on-issue:
|
||||
name: Comment with potential duplicates
|
||||
needs: gather-duplicates
|
||||
@@ -105,11 +315,17 @@ jobs:
|
||||
|
||||
const issues = Array.isArray(parsed?.issues) ? parsed.issues : [];
|
||||
const currentIssueNumber = String(context.payload.issue.number);
|
||||
const passUsed = typeof parsed?.pass === 'string' ? parsed.pass : 'unknown';
|
||||
const reason = typeof parsed?.reason === 'string' ? parsed.reason : '';
|
||||
|
||||
console.log(`Current issue number: ${currentIssueNumber}`);
|
||||
console.log(`Pass used: ${passUsed}`);
|
||||
if (reason) {
|
||||
console.log(`Reason: ${reason}`);
|
||||
}
|
||||
console.log(issues);
|
||||
|
||||
const filteredIssues = issues.filter((value) => String(value) !== currentIssueNumber);
|
||||
const filteredIssues = [...new Set(issues.map((value) => String(value)))].filter((value) => value !== currentIssueNumber).slice(0, 5);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
core.info('Codex reported no potential duplicates.');
|
||||
|
||||
8
.github/workflows/rust-ci.yml
vendored
8
.github/workflows/rust-ci.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
- uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -196,7 +196,7 @@ jobs:
|
||||
fi
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends "${packages[@]}"
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
- name: Install DotSlash
|
||||
uses: facebook/install-dotslash@v2
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
2
.github/workflows/rust-release-windows.yml
vendored
2
.github/workflows/rust-release-windows.yml
vendored
@@ -82,7 +82,7 @@ jobs:
|
||||
Write-Host "Total RAM: $ramGiB GiB"
|
||||
Write-Host "Disk usage:"
|
||||
Get-PSDrive -PSProvider FileSystem | Format-Table -AutoSize Name, @{Name='Size(GB)';Expression={[math]::Round(($_.Used + $_.Free) / 1GB, 1)}}, @{Name='Free(GB)';Expression={[math]::Round($_.Free / 1GB, 1)}}
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
2
.github/workflows/rust-release.yml
vendored
2
.github/workflows/rust-release.yml
vendored
@@ -123,7 +123,7 @@ jobs:
|
||||
sudo apt-get update -y
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
|
||||
- name: build codex
|
||||
run: cargo build --bin codex
|
||||
|
||||
217
.github/workflows/shell-tool-mcp.yml
vendored
217
.github/workflows/shell-tool-mcp.yml
vendored
@@ -105,7 +105,7 @@ jobs:
|
||||
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y libubsan1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.93
|
||||
- uses: dtolnay/rust-toolchain@1.93.0
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -251,11 +251,11 @@ jobs:
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext libncursesw5-dev
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
dnf install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
dnf install -y git gcc gcc-c++ make bison autoconf gettext ncurses-devel
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum install -y git gcc gcc-c++ make bison autoconf gettext
|
||||
yum install -y git gcc gcc-c++ make bison autoconf gettext ncurses-devel
|
||||
else
|
||||
echo "Unsupported package manager in container"
|
||||
exit 1
|
||||
@@ -329,6 +329,210 @@ jobs:
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
zsh-linux:
|
||||
name: Build zsh (Linux) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: ubuntu:24.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: ubuntu:22.04
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: debian:12
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: debian:11
|
||||
- runner: ubuntu-24.04
|
||||
target: x86_64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-24.04
|
||||
image: arm64v8/ubuntu:24.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-22.04
|
||||
image: arm64v8/ubuntu:22.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: ubuntu-20.04
|
||||
image: arm64v8/ubuntu:20.04
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-12
|
||||
image: arm64v8/debian:12
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: debian-11
|
||||
image: arm64v8/debian:11
|
||||
- runner: ubuntu-24.04-arm
|
||||
target: aarch64-unknown-linux-musl
|
||||
variant: centos-9
|
||||
image: quay.io/centos/centos:stream9
|
||||
steps:
|
||||
- name: Install build prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y git build-essential bison autoconf gettext libncursesw5-dev
|
||||
elif command -v dnf >/dev/null 2>&1; then
|
||||
dnf install -y git gcc gcc-c++ make bison autoconf gettext ncurses-devel
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
yum install -y git gcc gcc-c++ make bison autoconf gettext ncurses-devel
|
||||
else
|
||||
echo "Unsupported package manager in container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched zsh
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone https://git.code.sf.net/p/zsh/code /tmp/zsh
|
||||
cd /tmp/zsh
|
||||
git checkout 77045ef899e53b9598bebc5a41db93a548a40ca6
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/zsh-exec-wrapper.patch"
|
||||
./Util/preconfig
|
||||
./configure
|
||||
cores="$(command -v nproc >/dev/null 2>&1 && nproc || getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/zsh/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp Src/zsh "$dest/zsh"
|
||||
|
||||
- name: Smoke test zsh exec wrapper
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tmpdir="$(mktemp -d)"
|
||||
cat > "$tmpdir/exec-wrapper" <<'EOF'
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
: "${CODEX_WRAPPER_LOG:?missing CODEX_WRAPPER_LOG}"
|
||||
printf '%s\n' "$@" > "$CODEX_WRAPPER_LOG"
|
||||
file="$1"
|
||||
shift
|
||||
if [[ "$#" -eq 0 ]]; then
|
||||
exec "$file"
|
||||
fi
|
||||
arg0="$1"
|
||||
shift
|
||||
exec -a "$arg0" "$file" "$@"
|
||||
EOF
|
||||
chmod +x "$tmpdir/exec-wrapper"
|
||||
|
||||
CODEX_WRAPPER_LOG="$tmpdir/wrapper.log" \
|
||||
EXEC_WRAPPER="$tmpdir/exec-wrapper" \
|
||||
/tmp/zsh/Src/zsh -fc '/bin/echo smoke-zsh' > "$tmpdir/stdout.txt"
|
||||
|
||||
grep -Fx "smoke-zsh" "$tmpdir/stdout.txt"
|
||||
grep -Fx "/bin/echo" "$tmpdir/wrapper.log"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-zsh-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
zsh-darwin:
|
||||
name: Build zsh (macOS) - ${{ matrix.variant }} - ${{ matrix.target }}
|
||||
needs: metadata
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: macos-15-xlarge
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-15
|
||||
- runner: macos-14
|
||||
target: aarch64-apple-darwin
|
||||
variant: macos-14
|
||||
steps:
|
||||
- name: Install build prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if ! command -v autoconf >/dev/null 2>&1; then
|
||||
brew install autoconf
|
||||
fi
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Build patched zsh
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git clone https://git.code.sf.net/p/zsh/code /tmp/zsh
|
||||
cd /tmp/zsh
|
||||
git checkout 77045ef899e53b9598bebc5a41db93a548a40ca6
|
||||
git apply "${GITHUB_WORKSPACE}/shell-tool-mcp/patches/zsh-exec-wrapper.patch"
|
||||
./Util/preconfig
|
||||
./configure
|
||||
cores="$(getconf _NPROCESSORS_ONLN)"
|
||||
make -j"${cores}"
|
||||
|
||||
dest="${GITHUB_WORKSPACE}/artifacts/vendor/${{ matrix.target }}/zsh/${{ matrix.variant }}"
|
||||
mkdir -p "$dest"
|
||||
cp Src/zsh "$dest/zsh"
|
||||
|
||||
- name: Smoke test zsh exec wrapper
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tmpdir="$(mktemp -d)"
|
||||
cat > "$tmpdir/exec-wrapper" <<'EOF'
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
: "${CODEX_WRAPPER_LOG:?missing CODEX_WRAPPER_LOG}"
|
||||
printf '%s\n' "$@" > "$CODEX_WRAPPER_LOG"
|
||||
file="$1"
|
||||
shift
|
||||
if [[ "$#" -eq 0 ]]; then
|
||||
exec "$file"
|
||||
fi
|
||||
arg0="$1"
|
||||
shift
|
||||
exec -a "$arg0" "$file" "$@"
|
||||
EOF
|
||||
chmod +x "$tmpdir/exec-wrapper"
|
||||
|
||||
CODEX_WRAPPER_LOG="$tmpdir/wrapper.log" \
|
||||
EXEC_WRAPPER="$tmpdir/exec-wrapper" \
|
||||
/tmp/zsh/Src/zsh -fc '/bin/echo smoke-zsh' > "$tmpdir/stdout.txt"
|
||||
|
||||
grep -Fx "smoke-zsh" "$tmpdir/stdout.txt"
|
||||
grep -Fx "/bin/echo" "$tmpdir/wrapper.log"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: shell-tool-mcp-zsh-${{ matrix.target }}-${{ matrix.variant }}
|
||||
path: artifacts/**
|
||||
if-no-files-found: error
|
||||
|
||||
package:
|
||||
name: Package npm module
|
||||
needs:
|
||||
@@ -336,6 +540,8 @@ jobs:
|
||||
- rust-binaries
|
||||
- bash-linux
|
||||
- bash-darwin
|
||||
- zsh-linux
|
||||
- zsh-darwin
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PACKAGE_VERSION: ${{ needs.metadata.outputs.version }}
|
||||
@@ -409,7 +615,8 @@ jobs:
|
||||
chmod +x \
|
||||
"$staging"/vendor/*/codex-exec-mcp-server \
|
||||
"$staging"/vendor/*/codex-execve-wrapper \
|
||||
"$staging"/vendor/*/bash/*/bash
|
||||
"$staging"/vendor/*/bash/*/bash \
|
||||
"$staging"/vendor/*/zsh/*/zsh
|
||||
|
||||
- name: Create npm tarball
|
||||
shell: bash
|
||||
|
||||
15
AGENTS.md
15
AGENTS.md
@@ -15,6 +15,10 @@ In the codex-rs folder where the rust code lives:
|
||||
- When writing tests, prefer comparing the equality of entire objects over fields one by one.
|
||||
- When making a change that adds or changes an API, ensure that the documentation in the `docs/` folder is up to date if applicable.
|
||||
- If you change `ConfigToml` or nested config types, run `just write-config-schema` to update `codex-rs/core/config.schema.json`.
|
||||
- If you change Rust dependencies (`Cargo.toml` or `Cargo.lock`), run `just bazel-lock-update` from the
|
||||
repo root to refresh `MODULE.bazel.lock`, and include that lockfile update in the same change.
|
||||
- After dependency changes, run `just bazel-lock-check` from the repo root so lockfile drift is caught
|
||||
locally before CI.
|
||||
- Do not create small helper methods that are referenced only once.
|
||||
|
||||
Run `just fmt` (in `codex-rs` directory) automatically after you have finished making Rust code changes; do not ask for approval to run it. Additionally, run the tests:
|
||||
@@ -60,7 +64,14 @@ See `codex-rs/tui/styles.md`.
|
||||
|
||||
### Snapshot tests
|
||||
|
||||
This repo uses snapshot tests (via `insta`), especially in `codex-rs/tui`, to validate rendered output. When UI or text output changes intentionally, update the snapshots as follows:
|
||||
This repo uses snapshot tests (via `insta`), especially in `codex-rs/tui`, to validate rendered output.
|
||||
|
||||
**Requirement:** any change that affects user-visible UI (including adding new UI) must include
|
||||
corresponding `insta` snapshot coverage (add a new snapshot test if one doesn't exist yet, or
|
||||
update the existing snapshot). Review and accept snapshot updates as part of the PR so UI impact
|
||||
is easy to review and future diffs stay visual.
|
||||
|
||||
When UI or text output changes intentionally, update the snapshots as follows:
|
||||
|
||||
- Run tests to generate any updated snapshots:
|
||||
- `cargo test -p codex-tui`
|
||||
@@ -158,3 +169,5 @@ These guidelines apply to app-server protocol work in `codex-rs`, especially:
|
||||
`just write-app-server-schema`
|
||||
(and `just write-app-server-schema --experimental` when experimental API fixtures are affected).
|
||||
- Validate with `cargo test -p codex-app-server-protocol`.
|
||||
- Avoid boilerplate tests that only assert experimental field markers for individual
|
||||
request fields in `common.rs`; rely on schema generation/tests and behavioral coverage instead.
|
||||
|
||||
4
MODULE.bazel.lock
generated
4
MODULE.bazel.lock
generated
File diff suppressed because one or more lines are too long
@@ -5,6 +5,7 @@
|
||||
</p>
|
||||
</br>
|
||||
If you want Codex in your code editor (VS Code, Cursor, Windsurf), <a href="https://developers.openai.com/codex/ide">install in your IDE.</a>
|
||||
</br>If you want the desktop app experience, run <code>codex app</code> or visit <a href="https://chatgpt.com/codex?app-landing-page=true">the Codex App page</a>.
|
||||
</br>If you are looking for the <em>cloud-based agent</em> from OpenAI, <strong>Codex Web</strong>, go to <a href="https://chatgpt.com/codex">chatgpt.com/codex</a>.</p>
|
||||
|
||||
---
|
||||
|
||||
17
codex-rs/Cargo.lock
generated
17
codex-rs/Cargo.lock
generated
@@ -1401,6 +1401,7 @@ dependencies = [
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"similar",
|
||||
"strum_macros 0.27.2",
|
||||
"tempfile",
|
||||
@@ -1419,6 +1420,8 @@ dependencies = [
|
||||
"codex-protocol",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tungstenite",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -1692,6 +1695,7 @@ dependencies = [
|
||||
"codex-utils-home-dir",
|
||||
"codex-utils-pty",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-sanitizer",
|
||||
"codex-utils-string",
|
||||
"codex-windows-sandbox",
|
||||
"core-foundation 0.9.4",
|
||||
@@ -1708,6 +1712,7 @@ dependencies = [
|
||||
"include_dir",
|
||||
"indexmap 2.13.0",
|
||||
"indoc",
|
||||
"insta",
|
||||
"keyring",
|
||||
"landlock",
|
||||
"libc",
|
||||
@@ -1720,7 +1725,6 @@ dependencies = [
|
||||
"predicates",
|
||||
"pretty_assertions",
|
||||
"rand 0.9.2",
|
||||
"regex",
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"rmcp",
|
||||
@@ -2034,6 +2038,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"base64 0.22.1",
|
||||
"clap",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-rustls-provider",
|
||||
@@ -2293,6 +2298,7 @@ dependencies = [
|
||||
"codex-utils-oss",
|
||||
"codex-utils-pty",
|
||||
"codex-utils-sandbox-summary",
|
||||
"codex-utils-sleep-inhibitor",
|
||||
"codex-windows-sandbox",
|
||||
"color-eyre",
|
||||
"crossterm",
|
||||
@@ -2492,6 +2498,15 @@ dependencies = [
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-sleep-inhibitor"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"core-foundation 0.9.4",
|
||||
"libc",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-string"
|
||||
version = "0.0.0"
|
||||
|
||||
@@ -54,6 +54,7 @@ members = [
|
||||
"utils/elapsed",
|
||||
"utils/sandbox-summary",
|
||||
"utils/sanitizer",
|
||||
"utils/sleep-inhibitor",
|
||||
"utils/approval-presets",
|
||||
"utils/oss",
|
||||
"utils/fuzzy-match",
|
||||
@@ -131,6 +132,7 @@ codex-utils-readiness = { path = "utils/readiness" }
|
||||
codex-utils-rustls-provider = { path = "utils/rustls-provider" }
|
||||
codex-utils-sandbox-summary = { path = "utils/sandbox-summary" }
|
||||
codex-utils-sanitizer = { path = "utils/sanitizer" }
|
||||
codex-utils-sleep-inhibitor = { path = "utils/sleep-inhibitor" }
|
||||
codex-utils-string = { path = "utils/string" }
|
||||
codex-windows-sandbox = { path = "windows-sandbox-rs" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
@@ -340,7 +342,6 @@ ignored = [
|
||||
"icu_provider",
|
||||
"openssl-sys",
|
||||
"codex-utils-readiness",
|
||||
"codex-utils-sanitizer",
|
||||
"codex-secrets",
|
||||
]
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ codex-utils-absolute-path = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
shlex = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
|
||||
@@ -88,7 +88,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
@@ -1128,6 +1128,13 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"includeHidden": {
|
||||
"description": "When true, include models that are hidden from the default picker list.",
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a reasonable server-side value.",
|
||||
"format": "uint32",
|
||||
@@ -2565,6 +2572,13 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"cwd": {
|
||||
"description": "Optional cwd filter; when set, only threads whose session cwd exactly matches this path are returned.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a reasonable server-side value.",
|
||||
"format": "uint32",
|
||||
|
||||
@@ -104,7 +104,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
@@ -1349,6 +1349,14 @@
|
||||
"default": "agent",
|
||||
"description": "Where the command originated. Defaults to Agent for backward compatibility."
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExecCommandStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this command execution."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr",
|
||||
"type": "string"
|
||||
@@ -1377,6 +1385,7 @@
|
||||
"exit_code",
|
||||
"formatted_output",
|
||||
"parsed_cmd",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"turn_id",
|
||||
@@ -1429,6 +1438,17 @@
|
||||
"description": "The command's working directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"network_approval_context": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkApprovalContext"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional network context for a blocked request that can be approved."
|
||||
},
|
||||
"parsed_cmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
@@ -1805,6 +1825,14 @@
|
||||
"description": "The changes that were applied (mirrors PatchApplyBeginEvent::changes).",
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PatchApplyStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this patch application."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr (parser errors, IO failures, etc.).",
|
||||
"type": "string"
|
||||
@@ -1832,6 +1860,7 @@
|
||||
},
|
||||
"required": [
|
||||
"call_id",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"success",
|
||||
@@ -2873,6 +2902,14 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecCommandStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecOutputStream": {
|
||||
"enum": [
|
||||
"stdout",
|
||||
@@ -3289,6 +3326,30 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"NetworkApprovalContext": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"protocol": {
|
||||
"$ref": "#/definitions/NetworkApprovalProtocol"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"host",
|
||||
"protocol"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"NetworkApprovalProtocol": {
|
||||
"enum": [
|
||||
"http",
|
||||
"https",
|
||||
"socks5_tcp",
|
||||
"socks5_udp"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ParsedCommand": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -3400,6 +3461,14 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"PatchApplyStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PlanItemArg": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
@@ -6185,6 +6254,14 @@
|
||||
"default": "agent",
|
||||
"description": "Where the command originated. Defaults to Agent for backward compatibility."
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExecCommandStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this command execution."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr",
|
||||
"type": "string"
|
||||
@@ -6213,6 +6290,7 @@
|
||||
"exit_code",
|
||||
"formatted_output",
|
||||
"parsed_cmd",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"turn_id",
|
||||
@@ -6265,6 +6343,17 @@
|
||||
"description": "The command's working directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"network_approval_context": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkApprovalContext"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional network context for a blocked request that can be approved."
|
||||
},
|
||||
"parsed_cmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
@@ -6641,6 +6730,14 @@
|
||||
"description": "The changes that were applied (mirrors PatchApplyBeginEvent::changes).",
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PatchApplyStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this patch application."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr (parser errors, IO failures, etc.).",
|
||||
"type": "string"
|
||||
@@ -6668,6 +6765,7 @@
|
||||
},
|
||||
"required": [
|
||||
"call_id",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"success",
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"sessionId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sessionId"
|
||||
],
|
||||
"title": "FuzzyFileSearchSessionCompletedNotification",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"FuzzyFileSearchResult": {
|
||||
"description": "Superset of [`codex_file_search::FileMatch`]",
|
||||
"properties": {
|
||||
"file_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"indices": {
|
||||
"items": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"root": {
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"file_name",
|
||||
"path",
|
||||
"root",
|
||||
"score"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"files": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchResult"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"query": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"files",
|
||||
"query",
|
||||
"sessionId"
|
||||
],
|
||||
"title": "FuzzyFileSearchSessionUpdatedNotification",
|
||||
"type": "object"
|
||||
}
|
||||
@@ -193,6 +193,11 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"isEnabled": {
|
||||
"default": true,
|
||||
"description": "Whether this app is enabled in config.toml. Example: ```toml [apps.bad_app] enabled = false ```",
|
||||
"type": "boolean"
|
||||
},
|
||||
"logoUrl": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -241,7 +246,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
@@ -1965,6 +1970,14 @@
|
||||
"default": "agent",
|
||||
"description": "Where the command originated. Defaults to Agent for backward compatibility."
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExecCommandStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this command execution."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr",
|
||||
"type": "string"
|
||||
@@ -1993,6 +2006,7 @@
|
||||
"exit_code",
|
||||
"formatted_output",
|
||||
"parsed_cmd",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"turn_id",
|
||||
@@ -2045,6 +2059,17 @@
|
||||
"description": "The command's working directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"network_approval_context": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkApprovalContext"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional network context for a blocked request that can be approved."
|
||||
},
|
||||
"parsed_cmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
@@ -2421,6 +2446,14 @@
|
||||
"description": "The changes that were applied (mirrors PatchApplyBeginEvent::changes).",
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PatchApplyStatus2"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this patch application."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr (parser errors, IO failures, etc.).",
|
||||
"type": "string"
|
||||
@@ -2448,6 +2481,7 @@
|
||||
},
|
||||
"required": [
|
||||
"call_id",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"success",
|
||||
@@ -3489,6 +3523,14 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecCommandStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecOutputStream": {
|
||||
"enum": [
|
||||
"stdout",
|
||||
@@ -3684,6 +3726,76 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"FuzzyFileSearchResult": {
|
||||
"description": "Superset of [`codex_file_search::FileMatch`]",
|
||||
"properties": {
|
||||
"file_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"indices": {
|
||||
"items": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
},
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"path": {
|
||||
"type": "string"
|
||||
},
|
||||
"root": {
|
||||
"type": "string"
|
||||
},
|
||||
"score": {
|
||||
"format": "uint32",
|
||||
"minimum": 0.0,
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"file_name",
|
||||
"path",
|
||||
"root",
|
||||
"score"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"FuzzyFileSearchSessionCompletedNotification": {
|
||||
"properties": {
|
||||
"sessionId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sessionId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"FuzzyFileSearchSessionUpdatedNotification": {
|
||||
"properties": {
|
||||
"files": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchResult"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"query": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"files",
|
||||
"query",
|
||||
"sessionId"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"GhostCommit": {
|
||||
"description": "Details of a ghost commit created from a repository state.",
|
||||
"properties": {
|
||||
@@ -4106,6 +4218,30 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"NetworkApprovalContext": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"protocol": {
|
||||
"$ref": "#/definitions/NetworkApprovalProtocol"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"host",
|
||||
"protocol"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"NetworkApprovalProtocol": {
|
||||
"enum": [
|
||||
"http",
|
||||
"https",
|
||||
"socks5_tcp",
|
||||
"socks5_udp"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ParsedCommand": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -4226,6 +4362,14 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PatchApplyStatus2": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PatchChangeKind": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -5899,7 +6043,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -8171,6 +8316,46 @@
|
||||
"title": "ConfigWarningNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"fuzzyFileSearch/sessionUpdated"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionUpdatedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchSessionUpdatedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionUpdatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"fuzzyFileSearch/sessionCompleted"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionCompletedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchSessionCompletedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionCompletedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.",
|
||||
"properties": {
|
||||
|
||||
@@ -208,7 +208,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
@@ -3362,6 +3362,14 @@
|
||||
"default": "agent",
|
||||
"description": "Where the command originated. Defaults to Agent for backward compatibility."
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExecCommandStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this command execution."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr",
|
||||
"type": "string"
|
||||
@@ -3390,6 +3398,7 @@
|
||||
"exit_code",
|
||||
"formatted_output",
|
||||
"parsed_cmd",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"turn_id",
|
||||
@@ -3442,6 +3451,17 @@
|
||||
"description": "The command's working directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"network_approval_context": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkApprovalContext"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional network context for a blocked request that can be approved."
|
||||
},
|
||||
"parsed_cmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
@@ -3818,6 +3838,14 @@
|
||||
"description": "The changes that were applied (mirrors PatchApplyBeginEvent::changes).",
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/v2/PatchApplyStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this patch application."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr (parser errors, IO failures, etc.).",
|
||||
"type": "string"
|
||||
@@ -3845,6 +3873,7 @@
|
||||
},
|
||||
"required": [
|
||||
"call_id",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"success",
|
||||
@@ -4942,6 +4971,14 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecCommandStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecOneOffCommandParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -5386,6 +5423,43 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"FuzzyFileSearchSessionCompletedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"sessionId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"sessionId"
|
||||
],
|
||||
"title": "FuzzyFileSearchSessionCompletedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"FuzzyFileSearchSessionUpdatedNotification": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
"files": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchResult"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"query": {
|
||||
"type": "string"
|
||||
},
|
||||
"sessionId": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"files",
|
||||
"query",
|
||||
"sessionId"
|
||||
],
|
||||
"title": "FuzzyFileSearchSessionUpdatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
"GetAuthStatusParams": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"properties": {
|
||||
@@ -6200,6 +6274,30 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"NetworkApprovalContext": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"protocol": {
|
||||
"$ref": "#/definitions/NetworkApprovalProtocol"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"host",
|
||||
"protocol"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"NetworkApprovalProtocol": {
|
||||
"enum": [
|
||||
"http",
|
||||
"https",
|
||||
"socks5_tcp",
|
||||
"socks5_udp"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"NewConversationParams": {
|
||||
"properties": {
|
||||
"approvalPolicy": {
|
||||
@@ -6422,6 +6520,14 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"PatchApplyStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PlanItemArg": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
@@ -8425,6 +8531,46 @@
|
||||
"title": "ConfigWarningNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"fuzzyFileSearch/sessionUpdated"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionUpdatedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchSessionUpdatedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionUpdatedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"method": {
|
||||
"enum": [
|
||||
"fuzzyFileSearch/sessionCompleted"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionCompletedNotificationMethod",
|
||||
"type": "string"
|
||||
},
|
||||
"params": {
|
||||
"$ref": "#/definitions/FuzzyFileSearchSessionCompletedNotification"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method",
|
||||
"params"
|
||||
],
|
||||
"title": "FuzzyFileSearch/sessionCompletedNotification",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.",
|
||||
"properties": {
|
||||
@@ -9048,7 +9194,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -10123,6 +10270,11 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"isEnabled": {
|
||||
"default": true,
|
||||
"description": "Whether this app is enabled in config.toml. Example: ```toml [apps.bad_app] enabled = false ```",
|
||||
"type": "boolean"
|
||||
},
|
||||
"logoUrl": {
|
||||
"type": [
|
||||
"string",
|
||||
@@ -12494,6 +12646,9 @@
|
||||
"displayName": {
|
||||
"type": "string"
|
||||
},
|
||||
"hidden": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -12534,6 +12689,7 @@
|
||||
"defaultReasoningEffort",
|
||||
"description",
|
||||
"displayName",
|
||||
"hidden",
|
||||
"id",
|
||||
"isDefault",
|
||||
"model",
|
||||
@@ -12551,6 +12707,13 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"includeHidden": {
|
||||
"description": "When true, include models that are hidden from the default picker list.",
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a reasonable server-side value.",
|
||||
"format": "uint32",
|
||||
@@ -14397,7 +14560,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
@@ -15251,6 +15415,13 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"cwd": {
|
||||
"description": "Optional cwd filter; when set, only threads whose session cwd exactly matches this path are returned.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a reasonable server-side value.",
|
||||
"format": "uint32",
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
|
||||
@@ -104,7 +104,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
@@ -1349,6 +1349,14 @@
|
||||
"default": "agent",
|
||||
"description": "Where the command originated. Defaults to Agent for backward compatibility."
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExecCommandStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this command execution."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr",
|
||||
"type": "string"
|
||||
@@ -1377,6 +1385,7 @@
|
||||
"exit_code",
|
||||
"formatted_output",
|
||||
"parsed_cmd",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"turn_id",
|
||||
@@ -1429,6 +1438,17 @@
|
||||
"description": "The command's working directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"network_approval_context": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkApprovalContext"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional network context for a blocked request that can be approved."
|
||||
},
|
||||
"parsed_cmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
@@ -1805,6 +1825,14 @@
|
||||
"description": "The changes that were applied (mirrors PatchApplyBeginEvent::changes).",
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PatchApplyStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this patch application."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr (parser errors, IO failures, etc.).",
|
||||
"type": "string"
|
||||
@@ -1832,6 +1860,7 @@
|
||||
},
|
||||
"required": [
|
||||
"call_id",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"success",
|
||||
@@ -2873,6 +2902,14 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecCommandStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecOutputStream": {
|
||||
"enum": [
|
||||
"stdout",
|
||||
@@ -3289,6 +3326,30 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"NetworkApprovalContext": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"protocol": {
|
||||
"$ref": "#/definitions/NetworkApprovalProtocol"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"host",
|
||||
"protocol"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"NetworkApprovalProtocol": {
|
||||
"enum": [
|
||||
"http",
|
||||
"https",
|
||||
"socks5_tcp",
|
||||
"socks5_udp"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ParsedCommand": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -3400,6 +3461,14 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"PatchApplyStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PlanItemArg": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
|
||||
@@ -113,7 +113,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
|
||||
@@ -113,7 +113,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
|
||||
@@ -104,7 +104,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
@@ -1349,6 +1349,14 @@
|
||||
"default": "agent",
|
||||
"description": "Where the command originated. Defaults to Agent for backward compatibility."
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExecCommandStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this command execution."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr",
|
||||
"type": "string"
|
||||
@@ -1377,6 +1385,7 @@
|
||||
"exit_code",
|
||||
"formatted_output",
|
||||
"parsed_cmd",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"turn_id",
|
||||
@@ -1429,6 +1438,17 @@
|
||||
"description": "The command's working directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"network_approval_context": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkApprovalContext"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional network context for a blocked request that can be approved."
|
||||
},
|
||||
"parsed_cmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
@@ -1805,6 +1825,14 @@
|
||||
"description": "The changes that were applied (mirrors PatchApplyBeginEvent::changes).",
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PatchApplyStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this patch application."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr (parser errors, IO failures, etc.).",
|
||||
"type": "string"
|
||||
@@ -1832,6 +1860,7 @@
|
||||
},
|
||||
"required": [
|
||||
"call_id",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"success",
|
||||
@@ -2873,6 +2902,14 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecCommandStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecOutputStream": {
|
||||
"enum": [
|
||||
"stdout",
|
||||
@@ -3289,6 +3326,30 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"NetworkApprovalContext": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"protocol": {
|
||||
"$ref": "#/definitions/NetworkApprovalProtocol"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"host",
|
||||
"protocol"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"NetworkApprovalProtocol": {
|
||||
"enum": [
|
||||
"http",
|
||||
"https",
|
||||
"socks5_tcp",
|
||||
"socks5_udp"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ParsedCommand": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -3400,6 +3461,14 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"PatchApplyStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PlanItemArg": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
|
||||
@@ -104,7 +104,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "*All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox.",
|
||||
"description": "DEPRECATED: *All* commands are auto‑approved, but they are expected to run inside a sandbox where network access is disabled and writes are confined to a specific set of paths. If the command fails, it will be escalated to the user to approve execution without a sandbox. Prefer `OnRequest` for interactive runs or `Never` for non-interactive runs.",
|
||||
"enum": [
|
||||
"on-failure"
|
||||
],
|
||||
@@ -1349,6 +1349,14 @@
|
||||
"default": "agent",
|
||||
"description": "Where the command originated. Defaults to Agent for backward compatibility."
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExecCommandStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this command execution."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr",
|
||||
"type": "string"
|
||||
@@ -1377,6 +1385,7 @@
|
||||
"exit_code",
|
||||
"formatted_output",
|
||||
"parsed_cmd",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"turn_id",
|
||||
@@ -1429,6 +1438,17 @@
|
||||
"description": "The command's working directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"network_approval_context": {
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/definitions/NetworkApprovalContext"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Optional network context for a blocked request that can be approved."
|
||||
},
|
||||
"parsed_cmd": {
|
||||
"items": {
|
||||
"$ref": "#/definitions/ParsedCommand"
|
||||
@@ -1805,6 +1825,14 @@
|
||||
"description": "The changes that were applied (mirrors PatchApplyBeginEvent::changes).",
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/PatchApplyStatus"
|
||||
}
|
||||
],
|
||||
"description": "Completion status for this patch application."
|
||||
},
|
||||
"stderr": {
|
||||
"description": "Captured stderr (parser errors, IO failures, etc.).",
|
||||
"type": "string"
|
||||
@@ -1832,6 +1860,7 @@
|
||||
},
|
||||
"required": [
|
||||
"call_id",
|
||||
"status",
|
||||
"stderr",
|
||||
"stdout",
|
||||
"success",
|
||||
@@ -2873,6 +2902,14 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecCommandStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExecOutputStream": {
|
||||
"enum": [
|
||||
"stdout",
|
||||
@@ -3289,6 +3326,30 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"NetworkApprovalContext": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"protocol": {
|
||||
"$ref": "#/definitions/NetworkApprovalProtocol"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"host",
|
||||
"protocol"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"NetworkApprovalProtocol": {
|
||||
"enum": [
|
||||
"http",
|
||||
"https",
|
||||
"socks5_tcp",
|
||||
"socks5_udp"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ParsedCommand": {
|
||||
"oneOf": [
|
||||
{
|
||||
@@ -3400,6 +3461,14 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"PatchApplyStatus": {
|
||||
"enum": [
|
||||
"completed",
|
||||
"failed",
|
||||
"declined"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"PlanItemArg": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
|
||||
@@ -29,6 +29,11 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"isEnabled": {
|
||||
"default": true,
|
||||
"description": "Whether this app is enabled in config.toml. Example: ```toml [apps.bad_app] enabled = false ```",
|
||||
"type": "boolean"
|
||||
},
|
||||
"logoUrl": {
|
||||
"type": [
|
||||
"string",
|
||||
|
||||
@@ -29,6 +29,11 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"isEnabled": {
|
||||
"default": true,
|
||||
"description": "Whether this app is enabled in config.toml. Example: ```toml [apps.bad_app] enabled = false ```",
|
||||
"type": "boolean"
|
||||
},
|
||||
"logoUrl": {
|
||||
"type": [
|
||||
"string",
|
||||
|
||||
@@ -8,6 +8,13 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"includeHidden": {
|
||||
"description": "When true, include models that are hidden from the default picker list.",
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a reasonable server-side value.",
|
||||
"format": "uint32",
|
||||
|
||||
@@ -31,6 +31,9 @@
|
||||
"displayName": {
|
||||
"type": "string"
|
||||
},
|
||||
"hidden": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -71,6 +74,7 @@
|
||||
"defaultReasoningEffort",
|
||||
"description",
|
||||
"displayName",
|
||||
"hidden",
|
||||
"id",
|
||||
"isDefault",
|
||||
"model",
|
||||
|
||||
@@ -666,7 +666,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -39,6 +39,13 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"cwd": {
|
||||
"description": "Optional cwd filter; when set, only threads whose session cwd exactly matches this path are returned.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"limit": {
|
||||
"description": "Optional page size; defaults to a reasonable server-side value.",
|
||||
"format": "uint32",
|
||||
|
||||
@@ -472,7 +472,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -472,7 +472,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -666,7 +666,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -472,7 +472,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -666,7 +666,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -472,7 +472,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -472,7 +472,8 @@
|
||||
{
|
||||
"enum": [
|
||||
"review",
|
||||
"compact"
|
||||
"compact",
|
||||
"memory_consolidation"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ExecPolicyAmendment } from "./ExecPolicyAmendment";
|
||||
import type { NetworkApprovalContext } from "./NetworkApprovalContext";
|
||||
import type { ParsedCommand } from "./ParsedCommand";
|
||||
|
||||
export type ExecApprovalRequestEvent = {
|
||||
@@ -26,6 +27,10 @@ cwd: string,
|
||||
* Optional human-readable reason for the approval (e.g. retry without sandbox).
|
||||
*/
|
||||
reason: string | null,
|
||||
/**
|
||||
* Optional network context for a blocked request that can be approved.
|
||||
*/
|
||||
network_approval_context?: NetworkApprovalContext,
|
||||
/**
|
||||
* Proposed execpolicy amendment that can be applied to allow future runs.
|
||||
*/
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ExecCommandSource } from "./ExecCommandSource";
|
||||
import type { ExecCommandStatus } from "./ExecCommandStatus";
|
||||
import type { ParsedCommand } from "./ParsedCommand";
|
||||
|
||||
export type ExecCommandEndEvent = {
|
||||
@@ -56,4 +57,8 @@ duration: string,
|
||||
/**
|
||||
* Formatted output from the command, as seen by the model.
|
||||
*/
|
||||
formatted_output: string, };
|
||||
formatted_output: string,
|
||||
/**
|
||||
* Completion status for this command execution.
|
||||
*/
|
||||
status: ExecCommandStatus, };
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type ExecCommandStatus = "completed" | "failed" | "declined";
|
||||
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type FuzzyFileSearchSessionCompletedNotification = { sessionId: string, };
|
||||
@@ -0,0 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { FuzzyFileSearchResult } from "./FuzzyFileSearchResult";
|
||||
|
||||
export type FuzzyFileSearchSessionUpdatedNotification = { sessionId: string, query: string, files: Array<FuzzyFileSearchResult>, };
|
||||
@@ -0,0 +1,6 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { NetworkApprovalProtocol } from "./NetworkApprovalProtocol";
|
||||
|
||||
export type NetworkApprovalContext = { host: string, protocol: NetworkApprovalProtocol, };
|
||||
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type NetworkApprovalProtocol = "http" | "https" | "socks5_tcp" | "socks5_udp";
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { FileChange } from "./FileChange";
|
||||
import type { PatchApplyStatus } from "./PatchApplyStatus";
|
||||
|
||||
export type PatchApplyEndEvent = {
|
||||
/**
|
||||
@@ -28,4 +29,8 @@ success: boolean,
|
||||
/**
|
||||
* The changes that were applied (mirrors PatchApplyBeginEvent::changes).
|
||||
*/
|
||||
changes: { [key in string]?: FileChange }, };
|
||||
changes: { [key in string]?: FileChange },
|
||||
/**
|
||||
* Completion status for this patch application.
|
||||
*/
|
||||
status: PatchApplyStatus, };
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
// GENERATED CODE! DO NOT MODIFY BY HAND!
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type PatchApplyStatus = "completed" | "failed" | "declined";
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { AuthStatusChangeNotification } from "./AuthStatusChangeNotification";
|
||||
import type { FuzzyFileSearchSessionCompletedNotification } from "./FuzzyFileSearchSessionCompletedNotification";
|
||||
import type { FuzzyFileSearchSessionUpdatedNotification } from "./FuzzyFileSearchSessionUpdatedNotification";
|
||||
import type { LoginChatGptCompleteNotification } from "./LoginChatGptCompleteNotification";
|
||||
import type { SessionConfiguredNotification } from "./SessionConfiguredNotification";
|
||||
import type { AccountLoginCompletedNotification } from "./v2/AccountLoginCompletedNotification";
|
||||
@@ -37,4 +39,4 @@ import type { WindowsWorldWritableWarningNotification } from "./v2/WindowsWorldW
|
||||
/**
|
||||
* Notification sent from the server to the client.
|
||||
*/
|
||||
export type ServerNotification = { "method": "error", "params": ErrorNotification } | { "method": "thread/started", "params": ThreadStartedNotification } | { "method": "thread/name/updated", "params": ThreadNameUpdatedNotification } | { "method": "thread/tokenUsage/updated", "params": ThreadTokenUsageUpdatedNotification } | { "method": "turn/started", "params": TurnStartedNotification } | { "method": "turn/completed", "params": TurnCompletedNotification } | { "method": "turn/diff/updated", "params": TurnDiffUpdatedNotification } | { "method": "turn/plan/updated", "params": TurnPlanUpdatedNotification } | { "method": "item/started", "params": ItemStartedNotification } | { "method": "item/completed", "params": ItemCompletedNotification } | { "method": "rawResponseItem/completed", "params": RawResponseItemCompletedNotification } | { "method": "item/agentMessage/delta", "params": AgentMessageDeltaNotification } | { "method": "item/plan/delta", "params": PlanDeltaNotification } | { "method": "item/commandExecution/outputDelta", "params": CommandExecutionOutputDeltaNotification } | { "method": "item/commandExecution/terminalInteraction", "params": TerminalInteractionNotification } | { "method": "item/fileChange/outputDelta", "params": FileChangeOutputDeltaNotification } | { "method": "item/mcpToolCall/progress", "params": McpToolCallProgressNotification } | { "method": "mcpServer/oauthLogin/completed", "params": McpServerOauthLoginCompletedNotification } | { "method": "account/updated", "params": AccountUpdatedNotification } | { "method": "account/rateLimits/updated", "params": AccountRateLimitsUpdatedNotification } | { "method": "app/list/updated", "params": AppListUpdatedNotification } | { "method": "item/reasoning/summaryTextDelta", "params": ReasoningSummaryTextDeltaNotification } | { "method": "item/reasoning/summaryPartAdded", "params": ReasoningSummaryPartAddedNotification } | { "method": "item/reasoning/textDelta", "params": ReasoningTextDeltaNotification } | { "method": "thread/compacted", "params": ContextCompactedNotification } | { "method": "deprecationNotice", "params": DeprecationNoticeNotification } | { "method": "configWarning", "params": ConfigWarningNotification } | { "method": "windows/worldWritableWarning", "params": WindowsWorldWritableWarningNotification } | { "method": "account/login/completed", "params": AccountLoginCompletedNotification } | { "method": "authStatusChange", "params": AuthStatusChangeNotification } | { "method": "loginChatGptComplete", "params": LoginChatGptCompleteNotification } | { "method": "sessionConfigured", "params": SessionConfiguredNotification };
|
||||
export type ServerNotification = { "method": "error", "params": ErrorNotification } | { "method": "thread/started", "params": ThreadStartedNotification } | { "method": "thread/name/updated", "params": ThreadNameUpdatedNotification } | { "method": "thread/tokenUsage/updated", "params": ThreadTokenUsageUpdatedNotification } | { "method": "turn/started", "params": TurnStartedNotification } | { "method": "turn/completed", "params": TurnCompletedNotification } | { "method": "turn/diff/updated", "params": TurnDiffUpdatedNotification } | { "method": "turn/plan/updated", "params": TurnPlanUpdatedNotification } | { "method": "item/started", "params": ItemStartedNotification } | { "method": "item/completed", "params": ItemCompletedNotification } | { "method": "rawResponseItem/completed", "params": RawResponseItemCompletedNotification } | { "method": "item/agentMessage/delta", "params": AgentMessageDeltaNotification } | { "method": "item/plan/delta", "params": PlanDeltaNotification } | { "method": "item/commandExecution/outputDelta", "params": CommandExecutionOutputDeltaNotification } | { "method": "item/commandExecution/terminalInteraction", "params": TerminalInteractionNotification } | { "method": "item/fileChange/outputDelta", "params": FileChangeOutputDeltaNotification } | { "method": "item/mcpToolCall/progress", "params": McpToolCallProgressNotification } | { "method": "mcpServer/oauthLogin/completed", "params": McpServerOauthLoginCompletedNotification } | { "method": "account/updated", "params": AccountUpdatedNotification } | { "method": "account/rateLimits/updated", "params": AccountRateLimitsUpdatedNotification } | { "method": "app/list/updated", "params": AppListUpdatedNotification } | { "method": "item/reasoning/summaryTextDelta", "params": ReasoningSummaryTextDeltaNotification } | { "method": "item/reasoning/summaryPartAdded", "params": ReasoningSummaryPartAddedNotification } | { "method": "item/reasoning/textDelta", "params": ReasoningTextDeltaNotification } | { "method": "thread/compacted", "params": ContextCompactedNotification } | { "method": "deprecationNotice", "params": DeprecationNoticeNotification } | { "method": "configWarning", "params": ConfigWarningNotification } | { "method": "fuzzyFileSearch/sessionUpdated", "params": FuzzyFileSearchSessionUpdatedNotification } | { "method": "fuzzyFileSearch/sessionCompleted", "params": FuzzyFileSearchSessionCompletedNotification } | { "method": "windows/worldWritableWarning", "params": WindowsWorldWritableWarningNotification } | { "method": "account/login/completed", "params": AccountLoginCompletedNotification } | { "method": "authStatusChange", "params": AuthStatusChangeNotification } | { "method": "loginChatGptComplete", "params": LoginChatGptCompleteNotification } | { "method": "sessionConfigured", "params": SessionConfiguredNotification };
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ThreadId } from "./ThreadId";
|
||||
|
||||
export type SubAgentSource = "review" | "compact" | { "thread_spawn": { parent_thread_id: ThreadId, depth: number, } } | { "other": string };
|
||||
export type SubAgentSource = "review" | "compact" | { "thread_spawn": { parent_thread_id: ThreadId, depth: number, } } | "memory_consolidation" | { "other": string };
|
||||
|
||||
@@ -62,6 +62,7 @@ export type { ExecCommandBeginEvent } from "./ExecCommandBeginEvent";
|
||||
export type { ExecCommandEndEvent } from "./ExecCommandEndEvent";
|
||||
export type { ExecCommandOutputDeltaEvent } from "./ExecCommandOutputDeltaEvent";
|
||||
export type { ExecCommandSource } from "./ExecCommandSource";
|
||||
export type { ExecCommandStatus } from "./ExecCommandStatus";
|
||||
export type { ExecOneOffCommandParams } from "./ExecOneOffCommandParams";
|
||||
export type { ExecOneOffCommandResponse } from "./ExecOneOffCommandResponse";
|
||||
export type { ExecOutputStream } from "./ExecOutputStream";
|
||||
@@ -77,6 +78,8 @@ export type { FunctionCallOutputPayload } from "./FunctionCallOutputPayload";
|
||||
export type { FuzzyFileSearchParams } from "./FuzzyFileSearchParams";
|
||||
export type { FuzzyFileSearchResponse } from "./FuzzyFileSearchResponse";
|
||||
export type { FuzzyFileSearchResult } from "./FuzzyFileSearchResult";
|
||||
export type { FuzzyFileSearchSessionCompletedNotification } from "./FuzzyFileSearchSessionCompletedNotification";
|
||||
export type { FuzzyFileSearchSessionUpdatedNotification } from "./FuzzyFileSearchSessionUpdatedNotification";
|
||||
export type { GetAuthStatusParams } from "./GetAuthStatusParams";
|
||||
export type { GetAuthStatusResponse } from "./GetAuthStatusResponse";
|
||||
export type { GetConversationSummaryParams } from "./GetConversationSummaryParams";
|
||||
@@ -123,11 +126,14 @@ export type { McpToolCallEndEvent } from "./McpToolCallEndEvent";
|
||||
export type { MessagePhase } from "./MessagePhase";
|
||||
export type { ModeKind } from "./ModeKind";
|
||||
export type { NetworkAccess } from "./NetworkAccess";
|
||||
export type { NetworkApprovalContext } from "./NetworkApprovalContext";
|
||||
export type { NetworkApprovalProtocol } from "./NetworkApprovalProtocol";
|
||||
export type { NewConversationParams } from "./NewConversationParams";
|
||||
export type { NewConversationResponse } from "./NewConversationResponse";
|
||||
export type { ParsedCommand } from "./ParsedCommand";
|
||||
export type { PatchApplyBeginEvent } from "./PatchApplyBeginEvent";
|
||||
export type { PatchApplyEndEvent } from "./PatchApplyEndEvent";
|
||||
export type { PatchApplyStatus } from "./PatchApplyStatus";
|
||||
export type { Personality } from "./Personality";
|
||||
export type { PlanDeltaEvent } from "./PlanDeltaEvent";
|
||||
export type { PlanItem } from "./PlanItem";
|
||||
|
||||
@@ -5,4 +5,13 @@
|
||||
/**
|
||||
* EXPERIMENTAL - app metadata returned by app-list APIs.
|
||||
*/
|
||||
export type AppInfo = { id: string, name: string, description: string | null, logoUrl: string | null, logoUrlDark: string | null, distributionChannel: string | null, installUrl: string | null, isAccessible: boolean, };
|
||||
export type AppInfo = { id: string, name: string, description: string | null, logoUrl: string | null, logoUrlDark: string | null, distributionChannel: string | null, installUrl: string | null, isAccessible: boolean,
|
||||
/**
|
||||
* Whether this app is enabled in config.toml.
|
||||
* Example:
|
||||
* ```toml
|
||||
* [apps.bad_app]
|
||||
* enabled = false
|
||||
* ```
|
||||
*/
|
||||
isEnabled: boolean, };
|
||||
|
||||
@@ -5,4 +5,4 @@ import type { InputModality } from "../InputModality";
|
||||
import type { ReasoningEffort } from "../ReasoningEffort";
|
||||
import type { ReasoningEffortOption } from "./ReasoningEffortOption";
|
||||
|
||||
export type Model = { id: string, model: string, upgrade: string | null, displayName: string, description: string, supportedReasoningEfforts: Array<ReasoningEffortOption>, defaultReasoningEffort: ReasoningEffort, inputModalities: Array<InputModality>, supportsPersonality: boolean, isDefault: boolean, };
|
||||
export type Model = { id: string, model: string, upgrade: string | null, displayName: string, description: string, hidden: boolean, supportedReasoningEfforts: Array<ReasoningEffortOption>, defaultReasoningEffort: ReasoningEffort, inputModalities: Array<InputModality>, supportsPersonality: boolean, isDefault: boolean, };
|
||||
|
||||
@@ -10,4 +10,8 @@ cursor?: string | null,
|
||||
/**
|
||||
* Optional page size; defaults to a reasonable server-side value.
|
||||
*/
|
||||
limit?: number | null, };
|
||||
limit?: number | null,
|
||||
/**
|
||||
* When true, include models that are hidden from the default picker list.
|
||||
*/
|
||||
includeHidden?: boolean | null, };
|
||||
|
||||
@@ -21,4 +21,8 @@ export type ThreadForkParams = {threadId: string, /**
|
||||
path?: string | null, /**
|
||||
* Configuration overrides for the forked thread, if any.
|
||||
*/
|
||||
model?: string | null, modelProvider?: string | null, cwd?: string | null, approvalPolicy?: AskForApproval | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null};
|
||||
model?: string | null, modelProvider?: string | null, cwd?: string | null, approvalPolicy?: AskForApproval | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null, /**
|
||||
* If true, persist additional rollout EventMsg variants required to
|
||||
* reconstruct a richer thread history on subsequent resume/fork/read.
|
||||
*/
|
||||
persistExtendedHistory: boolean};
|
||||
|
||||
@@ -31,4 +31,9 @@ sourceKinds?: Array<ThreadSourceKind> | null,
|
||||
* Optional archived filter; when set to true, only archived threads are returned.
|
||||
* If false or null, only non-archived threads are returned.
|
||||
*/
|
||||
archived?: boolean | null, };
|
||||
archived?: boolean | null,
|
||||
/**
|
||||
* Optional cwd filter; when set, only threads whose session cwd exactly
|
||||
* matches this path are returned.
|
||||
*/
|
||||
cwd?: string | null, };
|
||||
|
||||
@@ -30,4 +30,8 @@ history?: Array<ResponseItem> | null, /**
|
||||
path?: string | null, /**
|
||||
* Configuration overrides for the resumed thread, if any.
|
||||
*/
|
||||
model?: string | null, modelProvider?: string | null, cwd?: string | null, approvalPolicy?: AskForApproval | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null};
|
||||
model?: string | null, modelProvider?: string | null, cwd?: string | null, approvalPolicy?: AskForApproval | null, sandbox?: SandboxMode | null, config?: { [key in string]?: JsonValue } | null, baseInstructions?: string | null, developerInstructions?: string | null, personality?: Personality | null, /**
|
||||
* If true, persist additional rollout EventMsg variants required to
|
||||
* reconstruct a richer thread history on subsequent resume/fork/read.
|
||||
*/
|
||||
persistExtendedHistory: boolean};
|
||||
|
||||
@@ -10,4 +10,8 @@ export type ThreadStartParams = {model?: string | null, modelProvider?: string |
|
||||
* If true, opt into emitting raw Responses API items on the event stream.
|
||||
* This is for internal use only (e.g. Codex Cloud).
|
||||
*/
|
||||
experimentalRawEvents: boolean};
|
||||
experimentalRawEvents: boolean, /**
|
||||
* If true, persist additional rollout EventMsg variants required to
|
||||
* reconstruct a richer thread history on resume/fork/read.
|
||||
*/
|
||||
persistExtendedHistory: boolean};
|
||||
|
||||
@@ -458,6 +458,21 @@ client_request_definitions! {
|
||||
params: FuzzyFileSearchParams,
|
||||
response: FuzzyFileSearchResponse,
|
||||
},
|
||||
#[experimental("fuzzyFileSearch/sessionStart")]
|
||||
FuzzyFileSearchSessionStart => "fuzzyFileSearch/sessionStart" {
|
||||
params: FuzzyFileSearchSessionStartParams,
|
||||
response: FuzzyFileSearchSessionStartResponse,
|
||||
},
|
||||
#[experimental("fuzzyFileSearch/sessionUpdate")]
|
||||
FuzzyFileSearchSessionUpdate => "fuzzyFileSearch/sessionUpdate" {
|
||||
params: FuzzyFileSearchSessionUpdateParams,
|
||||
response: FuzzyFileSearchSessionUpdateResponse,
|
||||
},
|
||||
#[experimental("fuzzyFileSearch/sessionStop")]
|
||||
FuzzyFileSearchSessionStop => "fuzzyFileSearch/sessionStop" {
|
||||
params: FuzzyFileSearchSessionStopParams,
|
||||
response: FuzzyFileSearchSessionStopResponse,
|
||||
},
|
||||
/// Execute a command (argv vector) under the server's sandbox.
|
||||
ExecOneOffCommand {
|
||||
params: v1::ExecOneOffCommandParams,
|
||||
@@ -702,6 +717,54 @@ pub struct FuzzyFileSearchResponse {
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchSessionStartParams {
|
||||
pub session_id: String,
|
||||
pub roots: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Default)]
|
||||
pub struct FuzzyFileSearchSessionStartResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchSessionUpdateParams {
|
||||
pub session_id: String,
|
||||
pub query: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Default)]
|
||||
pub struct FuzzyFileSearchSessionUpdateResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchSessionStopParams {
|
||||
pub session_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS, Default)]
|
||||
pub struct FuzzyFileSearchSessionStopResponse {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchSessionUpdatedNotification {
|
||||
pub session_id: String,
|
||||
pub query: String,
|
||||
pub files: Vec<FuzzyFileSearchResult>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(rename_all = "camelCase")]
|
||||
pub struct FuzzyFileSearchSessionCompletedNotification {
|
||||
pub session_id: String,
|
||||
}
|
||||
|
||||
server_notification_definitions! {
|
||||
/// NEW NOTIFICATIONS
|
||||
Error => "error" (v2::ErrorNotification),
|
||||
@@ -734,6 +797,8 @@ server_notification_definitions! {
|
||||
ContextCompacted => "thread/compacted" (v2::ContextCompactedNotification),
|
||||
DeprecationNotice => "deprecationNotice" (v2::DeprecationNoticeNotification),
|
||||
ConfigWarning => "configWarning" (v2::ConfigWarningNotification),
|
||||
FuzzyFileSearchSessionUpdated => "fuzzyFileSearch/sessionUpdated" (FuzzyFileSearchSessionUpdatedNotification),
|
||||
FuzzyFileSearchSessionCompleted => "fuzzyFileSearch/sessionCompleted" (FuzzyFileSearchSessionCompletedNotification),
|
||||
|
||||
/// Notifies the user of world-writable directories on Windows, which cannot be protected by the sandbox.
|
||||
WindowsWorldWritableWarning => "windows/worldWritableWarning" (v2::WindowsWorldWritableWarningNotification),
|
||||
@@ -1170,7 +1235,8 @@ mod tests {
|
||||
"id": 6,
|
||||
"params": {
|
||||
"limit": null,
|
||||
"cursor": null
|
||||
"cursor": null,
|
||||
"includeHidden": null
|
||||
}
|
||||
}),
|
||||
serde_json::to_value(&request)?,
|
||||
@@ -1266,17 +1332,4 @@ mod tests {
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
assert_eq!(reason, Some("mock/experimentalMethod"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread_start_mock_field_is_marked_experimental() {
|
||||
let request = ClientRequest::ThreadStart {
|
||||
request_id: RequestId::Integer(1),
|
||||
params: v2::ThreadStartParams {
|
||||
mock_experimental_field: Some("mock".to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
let reason = crate::experimental_api::ExperimentalApi::experimental_reason(&request);
|
||||
assert_eq!(reason, Some("thread/start.mockExperimentalField"));
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,9 @@ use codex_protocol::protocol::AgentStatus as CoreAgentStatus;
|
||||
use codex_protocol::protocol::AskForApproval as CoreAskForApproval;
|
||||
use codex_protocol::protocol::CodexErrorInfo as CoreCodexErrorInfo;
|
||||
use codex_protocol::protocol::CreditsSnapshot as CoreCreditsSnapshot;
|
||||
use codex_protocol::protocol::ExecCommandStatus as CoreExecCommandStatus;
|
||||
use codex_protocol::protocol::NetworkAccess as CoreNetworkAccess;
|
||||
use codex_protocol::protocol::PatchApplyStatus as CorePatchApplyStatus;
|
||||
use codex_protocol::protocol::RateLimitSnapshot as CoreRateLimitSnapshot;
|
||||
use codex_protocol::protocol::RateLimitWindow as CoreRateLimitWindow;
|
||||
use codex_protocol::protocol::ReadOnlyAccess as CoreReadOnlyAccess;
|
||||
@@ -1108,6 +1110,9 @@ pub struct ModelListParams {
|
||||
/// Optional page size; defaults to a reasonable server-side value.
|
||||
#[ts(optional = nullable)]
|
||||
pub limit: Option<u32>,
|
||||
/// When true, include models that are hidden from the default picker list.
|
||||
#[ts(optional = nullable)]
|
||||
pub include_hidden: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1119,6 +1124,7 @@ pub struct Model {
|
||||
pub upgrade: Option<String>,
|
||||
pub display_name: String,
|
||||
pub description: String,
|
||||
pub hidden: bool,
|
||||
pub supported_reasoning_efforts: Vec<ReasoningEffortOption>,
|
||||
pub default_reasoning_effort: ReasoningEffort,
|
||||
#[serde(default = "default_input_modalities")]
|
||||
@@ -1288,6 +1294,14 @@ pub struct AppInfo {
|
||||
pub install_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub is_accessible: bool,
|
||||
/// Whether this app is enabled in config.toml.
|
||||
/// Example:
|
||||
/// ```toml
|
||||
/// [apps.bad_app]
|
||||
/// enabled = false
|
||||
/// ```
|
||||
#[serde(default = "default_enabled")]
|
||||
pub is_enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1422,6 +1436,11 @@ pub struct ThreadStartParams {
|
||||
#[experimental("thread/start.experimentalRawEvents")]
|
||||
#[serde(default)]
|
||||
pub experimental_raw_events: bool,
|
||||
/// If true, persist additional rollout EventMsg variants required to
|
||||
/// reconstruct a richer thread history on resume/fork/read.
|
||||
#[experimental("thread/start.persistFullHistory")]
|
||||
#[serde(default)]
|
||||
pub persist_extended_history: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1503,6 +1522,11 @@ pub struct ThreadResumeParams {
|
||||
pub developer_instructions: Option<String>,
|
||||
#[ts(optional = nullable)]
|
||||
pub personality: Option<Personality>,
|
||||
/// If true, persist additional rollout EventMsg variants required to
|
||||
/// reconstruct a richer thread history on subsequent resume/fork/read.
|
||||
#[experimental("thread/resume.persistFullHistory")]
|
||||
#[serde(default)]
|
||||
pub persist_extended_history: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1556,6 +1580,11 @@ pub struct ThreadForkParams {
|
||||
pub base_instructions: Option<String>,
|
||||
#[ts(optional = nullable)]
|
||||
pub developer_instructions: Option<String>,
|
||||
/// If true, persist additional rollout EventMsg variants required to
|
||||
/// reconstruct a richer thread history on subsequent resume/fork/read.
|
||||
#[experimental("thread/fork.persistFullHistory")]
|
||||
#[serde(default)]
|
||||
pub persist_extended_history: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
@@ -1683,6 +1712,10 @@ pub struct ThreadListParams {
|
||||
/// If false or null, only non-archived threads are returned.
|
||||
#[ts(optional = nullable)]
|
||||
pub archived: Option<bool>,
|
||||
/// Optional cwd filter; when set, only threads whose session cwd exactly
|
||||
/// matches this path are returned.
|
||||
#[ts(optional = nullable)]
|
||||
pub cwd: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema, TS)]
|
||||
@@ -2622,6 +2655,22 @@ pub enum CommandExecutionStatus {
|
||||
Declined,
|
||||
}
|
||||
|
||||
impl From<CoreExecCommandStatus> for CommandExecutionStatus {
|
||||
fn from(value: CoreExecCommandStatus) -> Self {
|
||||
Self::from(&value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&CoreExecCommandStatus> for CommandExecutionStatus {
|
||||
fn from(value: &CoreExecCommandStatus) -> Self {
|
||||
match value {
|
||||
CoreExecCommandStatus::Completed => CommandExecutionStatus::Completed,
|
||||
CoreExecCommandStatus::Failed => CommandExecutionStatus::Failed,
|
||||
CoreExecCommandStatus::Declined => CommandExecutionStatus::Declined,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
@@ -2662,6 +2711,22 @@ pub enum PatchApplyStatus {
|
||||
Declined,
|
||||
}
|
||||
|
||||
impl From<CorePatchApplyStatus> for PatchApplyStatus {
|
||||
fn from(value: CorePatchApplyStatus) -> Self {
|
||||
Self::from(&value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&CorePatchApplyStatus> for PatchApplyStatus {
|
||||
fn from(value: &CorePatchApplyStatus) -> Self {
|
||||
match value {
|
||||
CorePatchApplyStatus::Completed => PatchApplyStatus::Completed,
|
||||
CorePatchApplyStatus::Failed => PatchApplyStatus::Failed,
|
||||
CorePatchApplyStatus::Declined => PatchApplyStatus::Declined,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export_to = "v2/")]
|
||||
|
||||
@@ -14,4 +14,6 @@ codex-app-server-protocol = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
tungstenite = { workspace = true }
|
||||
url = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
|
||||
@@ -1,2 +1,49 @@
|
||||
# App Server Test Client
|
||||
Exercises simple `codex app-server` flows end-to-end, logging JSON-RPC messages sent between client and server to stdout.
|
||||
Quickstart for running and hitting `codex app-server`.
|
||||
|
||||
## Quickstart
|
||||
|
||||
Run from `<reporoot>/codex-rs`.
|
||||
|
||||
```bash
|
||||
# 1) Build debug codex binary
|
||||
cargo build -p codex-cli --bin codex
|
||||
|
||||
# 2) Start websocket app-server in background
|
||||
cargo run -p codex-app-server-test-client -- \
|
||||
--codex-bin ./target/debug/codex \
|
||||
serve --listen ws://127.0.0.1:4222 --kill
|
||||
|
||||
# 3) Call app-server (defaults to ws://127.0.0.1:4222)
|
||||
cargo run -p codex-app-server-test-client -- model-list
|
||||
```
|
||||
|
||||
## Testing Thread Rejoin Behavior
|
||||
|
||||
Build and start an app server using commands above. The app-server log is written to `/tmp/codex-app-server-test-client/app-server.log`
|
||||
|
||||
### 1) Get a thread id
|
||||
|
||||
Create at least one thread, then list threads:
|
||||
|
||||
```bash
|
||||
cargo run -p codex-app-server-test-client -- send-message-v2 "seed thread for rejoin test"
|
||||
cargo run -p codex-app-server-test-client -- thread-list --limit 5
|
||||
```
|
||||
|
||||
Copy a thread id from the `thread-list` output.
|
||||
|
||||
### 2) Rejoin while a turn is in progress (two terminals)
|
||||
|
||||
Terminal A:
|
||||
|
||||
```bash
|
||||
cargo run --bin codex-app-server-test-client -- \
|
||||
resume-message-v2 <THREAD_ID> "respond with thorough docs on the rust core"
|
||||
```
|
||||
|
||||
Terminal B (while Terminal A is still streaming):
|
||||
|
||||
```bash
|
||||
cargo run --bin codex-app-server-test-client -- thread-resume <THREAD_ID>
|
||||
```
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::fs;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::net::TcpStream;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Child;
|
||||
@@ -53,6 +55,8 @@ use codex_app_server_protocol::SendUserMessageParams;
|
||||
use codex_app_server_protocol::SendUserMessageResponse;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_app_server_protocol::ServerRequest;
|
||||
use codex_app_server_protocol::ThreadListParams;
|
||||
use codex_app_server_protocol::ThreadListResponse;
|
||||
use codex_app_server_protocol::ThreadResumeParams;
|
||||
use codex_app_server_protocol::ThreadResumeResponse;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
@@ -67,15 +71,28 @@ use codex_protocol::protocol::EventMsg;
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
use tungstenite::Message;
|
||||
use tungstenite::WebSocket;
|
||||
use tungstenite::connect;
|
||||
use tungstenite::stream::MaybeTlsStream;
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Minimal launcher that initializes the Codex app-server and logs the handshake.
|
||||
#[derive(Parser)]
|
||||
#[command(author = "Codex", version, about = "Bootstrap Codex app-server", long_about = None)]
|
||||
struct Cli {
|
||||
/// Path to the `codex` CLI binary.
|
||||
#[arg(long, env = "CODEX_BIN", default_value = "codex")]
|
||||
codex_bin: PathBuf,
|
||||
/// Path to the `codex` CLI binary. When set, requests use stdio by
|
||||
/// spawning `codex app-server` as a child process.
|
||||
#[arg(long, env = "CODEX_BIN", global = true)]
|
||||
codex_bin: Option<PathBuf>,
|
||||
|
||||
/// Existing websocket server URL to connect to.
|
||||
///
|
||||
/// If neither `--codex-bin` nor `--url` is provided, defaults to
|
||||
/// `ws://127.0.0.1:4222`.
|
||||
#[arg(long, env = "CODEX_APP_SERVER_URL", global = true)]
|
||||
url: Option<String>,
|
||||
|
||||
/// Forwarded to the `codex` CLI as `--config key=value`. Repeatable.
|
||||
///
|
||||
@@ -105,6 +122,18 @@ struct Cli {
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum CliCommand {
|
||||
/// Start `codex app-server` on a websocket endpoint in the background.
|
||||
///
|
||||
/// Logs are written to:
|
||||
/// `/tmp/codex-app-server-test-client/`
|
||||
Serve {
|
||||
/// WebSocket listen URL passed to `codex app-server --listen`.
|
||||
#[arg(long, default_value = "ws://127.0.0.1:4222")]
|
||||
listen: String,
|
||||
/// Kill any process listening on the same port before starting.
|
||||
#[arg(long, default_value_t = false)]
|
||||
kill: bool,
|
||||
},
|
||||
/// Send a user message through the Codex app-server.
|
||||
SendMessage {
|
||||
/// User message to send to Codex.
|
||||
@@ -122,6 +151,13 @@ enum CliCommand {
|
||||
/// User message to send to Codex.
|
||||
user_message: String,
|
||||
},
|
||||
/// Resume a V2 thread and continuously stream notifications/events.
|
||||
///
|
||||
/// This command does not auto-exit; stop it with SIGINT/SIGTERM/SIGKILL.
|
||||
ThreadResume {
|
||||
/// Existing thread id to resume.
|
||||
thread_id: String,
|
||||
},
|
||||
/// Start a V2 turn that elicits an ExecCommand approval.
|
||||
#[command(name = "trigger-cmd-approval")]
|
||||
TriggerCmdApproval {
|
||||
@@ -151,11 +187,19 @@ enum CliCommand {
|
||||
/// List the available models from the Codex app-server.
|
||||
#[command(name = "model-list")]
|
||||
ModelList,
|
||||
/// List stored threads from the Codex app-server.
|
||||
#[command(name = "thread-list")]
|
||||
ThreadList {
|
||||
/// Number of threads to return.
|
||||
#[arg(long, default_value_t = 20)]
|
||||
limit: u32,
|
||||
},
|
||||
}
|
||||
|
||||
pub fn run() -> Result<()> {
|
||||
let Cli {
|
||||
codex_bin,
|
||||
url,
|
||||
config_overrides,
|
||||
dynamic_tools,
|
||||
command,
|
||||
@@ -164,59 +208,222 @@ pub fn run() -> Result<()> {
|
||||
let dynamic_tools = parse_dynamic_tools_arg(&dynamic_tools)?;
|
||||
|
||||
match command {
|
||||
CliCommand::Serve { listen, kill } => {
|
||||
ensure_dynamic_tools_unused(&dynamic_tools, "serve")?;
|
||||
let codex_bin = codex_bin.unwrap_or_else(|| PathBuf::from("codex"));
|
||||
serve(&codex_bin, &config_overrides, &listen, kill)
|
||||
}
|
||||
CliCommand::SendMessage { user_message } => {
|
||||
ensure_dynamic_tools_unused(&dynamic_tools, "send-message")?;
|
||||
send_message(&codex_bin, &config_overrides, user_message)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
send_message(&endpoint, &config_overrides, user_message)
|
||||
}
|
||||
CliCommand::SendMessageV2 { user_message } => {
|
||||
send_message_v2(&codex_bin, &config_overrides, user_message, &dynamic_tools)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
send_message_v2_endpoint(&endpoint, &config_overrides, user_message, &dynamic_tools)
|
||||
}
|
||||
CliCommand::ResumeMessageV2 {
|
||||
thread_id,
|
||||
user_message,
|
||||
} => resume_message_v2(
|
||||
&codex_bin,
|
||||
&config_overrides,
|
||||
thread_id,
|
||||
user_message,
|
||||
&dynamic_tools,
|
||||
),
|
||||
} => {
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
resume_message_v2(
|
||||
&endpoint,
|
||||
&config_overrides,
|
||||
thread_id,
|
||||
user_message,
|
||||
&dynamic_tools,
|
||||
)
|
||||
}
|
||||
CliCommand::ThreadResume { thread_id } => {
|
||||
ensure_dynamic_tools_unused(&dynamic_tools, "thread-resume")?;
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
thread_resume_follow(&endpoint, &config_overrides, thread_id)
|
||||
}
|
||||
CliCommand::TriggerCmdApproval { user_message } => {
|
||||
trigger_cmd_approval(&codex_bin, &config_overrides, user_message, &dynamic_tools)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
trigger_cmd_approval(&endpoint, &config_overrides, user_message, &dynamic_tools)
|
||||
}
|
||||
CliCommand::TriggerPatchApproval { user_message } => {
|
||||
trigger_patch_approval(&codex_bin, &config_overrides, user_message, &dynamic_tools)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
trigger_patch_approval(&endpoint, &config_overrides, user_message, &dynamic_tools)
|
||||
}
|
||||
CliCommand::NoTriggerCmdApproval => {
|
||||
no_trigger_cmd_approval(&codex_bin, &config_overrides, &dynamic_tools)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
no_trigger_cmd_approval(&endpoint, &config_overrides, &dynamic_tools)
|
||||
}
|
||||
CliCommand::SendFollowUpV2 {
|
||||
first_message,
|
||||
follow_up_message,
|
||||
} => send_follow_up_v2(
|
||||
&codex_bin,
|
||||
&config_overrides,
|
||||
first_message,
|
||||
follow_up_message,
|
||||
&dynamic_tools,
|
||||
),
|
||||
} => {
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
send_follow_up_v2(
|
||||
&endpoint,
|
||||
&config_overrides,
|
||||
first_message,
|
||||
follow_up_message,
|
||||
&dynamic_tools,
|
||||
)
|
||||
}
|
||||
CliCommand::TestLogin => {
|
||||
ensure_dynamic_tools_unused(&dynamic_tools, "test-login")?;
|
||||
test_login(&codex_bin, &config_overrides)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
test_login(&endpoint, &config_overrides)
|
||||
}
|
||||
CliCommand::GetAccountRateLimits => {
|
||||
ensure_dynamic_tools_unused(&dynamic_tools, "get-account-rate-limits")?;
|
||||
get_account_rate_limits(&codex_bin, &config_overrides)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
get_account_rate_limits(&endpoint, &config_overrides)
|
||||
}
|
||||
CliCommand::ModelList => {
|
||||
ensure_dynamic_tools_unused(&dynamic_tools, "model-list")?;
|
||||
model_list(&codex_bin, &config_overrides)
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
model_list(&endpoint, &config_overrides)
|
||||
}
|
||||
CliCommand::ThreadList { limit } => {
|
||||
ensure_dynamic_tools_unused(&dynamic_tools, "thread-list")?;
|
||||
let endpoint = resolve_endpoint(codex_bin, url)?;
|
||||
thread_list(&endpoint, &config_overrides, limit)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn send_message(codex_bin: &Path, config_overrides: &[String], user_message: String) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
enum Endpoint {
|
||||
SpawnCodex(PathBuf),
|
||||
ConnectWs(String),
|
||||
}
|
||||
|
||||
fn resolve_endpoint(codex_bin: Option<PathBuf>, url: Option<String>) -> Result<Endpoint> {
|
||||
if codex_bin.is_some() && url.is_some() {
|
||||
bail!("--codex-bin and --url are mutually exclusive");
|
||||
}
|
||||
if let Some(codex_bin) = codex_bin {
|
||||
return Ok(Endpoint::SpawnCodex(codex_bin));
|
||||
}
|
||||
if let Some(url) = url {
|
||||
return Ok(Endpoint::ConnectWs(url));
|
||||
}
|
||||
Ok(Endpoint::ConnectWs("ws://127.0.0.1:4222".to_string()))
|
||||
}
|
||||
|
||||
fn serve(codex_bin: &Path, config_overrides: &[String], listen: &str, kill: bool) -> Result<()> {
|
||||
let runtime_dir = PathBuf::from("/tmp/codex-app-server-test-client");
|
||||
fs::create_dir_all(&runtime_dir)
|
||||
.with_context(|| format!("failed to create runtime dir {}", runtime_dir.display()))?;
|
||||
let log_path = runtime_dir.join("app-server.log");
|
||||
if kill {
|
||||
kill_listeners_on_same_port(listen)?;
|
||||
}
|
||||
|
||||
let log_file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&log_path)
|
||||
.with_context(|| format!("failed to open log file {}", log_path.display()))?;
|
||||
let log_file_stderr = log_file
|
||||
.try_clone()
|
||||
.with_context(|| format!("failed to clone log file handle {}", log_path.display()))?;
|
||||
|
||||
let mut cmdline = format!(
|
||||
"tail -f /dev/null | RUST_BACKTRACE=full RUST_LOG=warn,codex_=trace {}",
|
||||
shell_quote(&codex_bin.display().to_string())
|
||||
);
|
||||
for override_kv in config_overrides {
|
||||
cmdline.push_str(&format!(" --config {}", shell_quote(override_kv)));
|
||||
}
|
||||
cmdline.push_str(&format!(" app-server --listen {}", shell_quote(listen)));
|
||||
|
||||
let child = Command::new("nohup")
|
||||
.arg("sh")
|
||||
.arg("-c")
|
||||
.arg(cmdline)
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::from(log_file))
|
||||
.stderr(Stdio::from(log_file_stderr))
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to start `{}` app-server", codex_bin.display()))?;
|
||||
|
||||
let pid = child.id();
|
||||
|
||||
println!("started codex app-server");
|
||||
println!("listen: {listen}");
|
||||
println!("pid: {pid} (launcher process)");
|
||||
println!("log: {}", log_path.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn kill_listeners_on_same_port(listen: &str) -> Result<()> {
|
||||
let url = Url::parse(listen).with_context(|| format!("invalid --listen URL `{listen}`"))?;
|
||||
let port = url
|
||||
.port_or_known_default()
|
||||
.with_context(|| format!("unable to infer port from --listen URL `{listen}`"))?;
|
||||
|
||||
let output = Command::new("lsof")
|
||||
.arg("-nP")
|
||||
.arg(format!("-tiTCP:{port}"))
|
||||
.arg("-sTCP:LISTEN")
|
||||
.output()
|
||||
.with_context(|| format!("failed to run lsof for port {port}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let pids: Vec<u32> = String::from_utf8_lossy(&output.stdout)
|
||||
.lines()
|
||||
.filter_map(|line| line.trim().parse::<u32>().ok())
|
||||
.collect();
|
||||
|
||||
if pids.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for pid in pids {
|
||||
println!("killing listener pid {pid} on port {port}");
|
||||
let pid_str = pid.to_string();
|
||||
let term_status = Command::new("kill")
|
||||
.arg(&pid_str)
|
||||
.status()
|
||||
.with_context(|| format!("failed to send SIGTERM to pid {pid}"))?;
|
||||
if !term_status.success() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
thread::sleep(Duration::from_millis(300));
|
||||
|
||||
let output = Command::new("lsof")
|
||||
.arg("-nP")
|
||||
.arg(format!("-tiTCP:{port}"))
|
||||
.arg("-sTCP:LISTEN")
|
||||
.output()
|
||||
.with_context(|| format!("failed to re-check listeners on port {port}"))?;
|
||||
if !output.status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
let remaining: Vec<u32> = String::from_utf8_lossy(&output.stdout)
|
||||
.lines()
|
||||
.filter_map(|line| line.trim().parse::<u32>().ok())
|
||||
.collect();
|
||||
for pid in remaining {
|
||||
println!("force killing remaining listener pid {pid} on port {port}");
|
||||
let _ = Command::new("kill").arg("-9").arg(pid.to_string()).status();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn shell_quote(input: &str) -> String {
|
||||
format!("'{}'", input.replace('\'', "'\\''"))
|
||||
}
|
||||
|
||||
fn send_message(
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -242,9 +449,19 @@ pub fn send_message_v2(
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
) -> Result<()> {
|
||||
let endpoint = Endpoint::SpawnCodex(codex_bin.to_path_buf());
|
||||
send_message_v2_endpoint(&endpoint, config_overrides, user_message, dynamic_tools)
|
||||
}
|
||||
|
||||
fn send_message_v2_endpoint(
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
) -> Result<()> {
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
endpoint,
|
||||
config_overrides,
|
||||
user_message,
|
||||
None,
|
||||
@@ -254,7 +471,7 @@ pub fn send_message_v2(
|
||||
}
|
||||
|
||||
fn resume_message_v2(
|
||||
codex_bin: &Path,
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
thread_id: String,
|
||||
user_message: String,
|
||||
@@ -262,7 +479,7 @@ fn resume_message_v2(
|
||||
) -> Result<()> {
|
||||
ensure_dynamic_tools_unused(dynamic_tools, "resume-message-v2")?;
|
||||
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -288,8 +505,28 @@ fn resume_message_v2(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn thread_resume_follow(
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
thread_id: String,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let resume_response = client.thread_resume(ThreadResumeParams {
|
||||
thread_id,
|
||||
..Default::default()
|
||||
})?;
|
||||
println!("< thread/resume response: {resume_response:?}");
|
||||
println!("< streaming notifications until process is terminated");
|
||||
|
||||
client.stream_notifications_forever()
|
||||
}
|
||||
|
||||
fn trigger_cmd_approval(
|
||||
codex_bin: &Path,
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
@@ -298,7 +535,7 @@ fn trigger_cmd_approval(
|
||||
"Run `touch /tmp/should-trigger-approval` so I can confirm the file exists.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
endpoint,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
@@ -310,7 +547,7 @@ fn trigger_cmd_approval(
|
||||
}
|
||||
|
||||
fn trigger_patch_approval(
|
||||
codex_bin: &Path,
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
user_message: Option<String>,
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
@@ -319,7 +556,7 @@ fn trigger_patch_approval(
|
||||
"Create a file named APPROVAL_DEMO.txt containing a short hello message using apply_patch.";
|
||||
let message = user_message.unwrap_or_else(|| default_prompt.to_string());
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
endpoint,
|
||||
config_overrides,
|
||||
message,
|
||||
Some(AskForApproval::OnRequest),
|
||||
@@ -331,13 +568,13 @@ fn trigger_patch_approval(
|
||||
}
|
||||
|
||||
fn no_trigger_cmd_approval(
|
||||
codex_bin: &Path,
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
) -> Result<()> {
|
||||
let prompt = "Run `touch should_not_trigger_approval.txt`";
|
||||
send_message_v2_with_policies(
|
||||
codex_bin,
|
||||
endpoint,
|
||||
config_overrides,
|
||||
prompt.to_string(),
|
||||
None,
|
||||
@@ -347,14 +584,14 @@ fn no_trigger_cmd_approval(
|
||||
}
|
||||
|
||||
fn send_message_v2_with_policies(
|
||||
codex_bin: &Path,
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
user_message: String,
|
||||
approval_policy: Option<AskForApproval>,
|
||||
sandbox_policy: Option<SandboxPolicy>,
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -385,13 +622,13 @@ fn send_message_v2_with_policies(
|
||||
}
|
||||
|
||||
fn send_follow_up_v2(
|
||||
codex_bin: &Path,
|
||||
endpoint: &Endpoint,
|
||||
config_overrides: &[String],
|
||||
first_message: String,
|
||||
follow_up_message: String,
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -431,8 +668,8 @@ fn send_follow_up_v2(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn test_login(codex_bin: &Path, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
fn test_login(endpoint: &Endpoint, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -461,8 +698,8 @@ fn test_login(codex_bin: &Path, config_overrides: &[String]) -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_account_rate_limits(codex_bin: &Path, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
fn get_account_rate_limits(endpoint: &Endpoint, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -473,8 +710,8 @@ fn get_account_rate_limits(codex_bin: &Path, config_overrides: &[String]) -> Res
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn model_list(codex_bin: &Path, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::spawn(codex_bin, config_overrides)?;
|
||||
fn model_list(endpoint: &Endpoint, config_overrides: &[String]) -> Result<()> {
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
@@ -485,6 +722,26 @@ fn model_list(codex_bin: &Path, config_overrides: &[String]) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn thread_list(endpoint: &Endpoint, config_overrides: &[String], limit: u32) -> Result<()> {
|
||||
let mut client = CodexClient::connect(endpoint, config_overrides)?;
|
||||
|
||||
let initialize = client.initialize()?;
|
||||
println!("< initialize response: {initialize:?}");
|
||||
|
||||
let response = client.thread_list(ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(limit),
|
||||
sort_key: None,
|
||||
model_providers: None,
|
||||
source_kinds: None,
|
||||
archived: None,
|
||||
cwd: None,
|
||||
})?;
|
||||
println!("< thread/list response: {response:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_dynamic_tools_unused(
|
||||
dynamic_tools: &Option<Vec<DynamicToolSpec>>,
|
||||
command: &str,
|
||||
@@ -519,15 +776,32 @@ fn parse_dynamic_tools_arg(dynamic_tools: &Option<String>) -> Result<Option<Vec<
|
||||
Ok(Some(tools))
|
||||
}
|
||||
|
||||
enum ClientTransport {
|
||||
Stdio {
|
||||
child: Child,
|
||||
stdin: Option<ChildStdin>,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
},
|
||||
WebSocket {
|
||||
url: String,
|
||||
socket: Box<WebSocket<MaybeTlsStream<TcpStream>>>,
|
||||
},
|
||||
}
|
||||
|
||||
struct CodexClient {
|
||||
child: Child,
|
||||
stdin: Option<ChildStdin>,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
transport: ClientTransport,
|
||||
pending_notifications: VecDeque<JSONRPCNotification>,
|
||||
}
|
||||
|
||||
impl CodexClient {
|
||||
fn spawn(codex_bin: &Path, config_overrides: &[String]) -> Result<Self> {
|
||||
fn connect(endpoint: &Endpoint, config_overrides: &[String]) -> Result<Self> {
|
||||
match endpoint {
|
||||
Endpoint::SpawnCodex(codex_bin) => Self::spawn_stdio(codex_bin, config_overrides),
|
||||
Endpoint::ConnectWs(url) => Self::connect_websocket(url),
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_stdio(codex_bin: &Path, config_overrides: &[String]) -> Result<Self> {
|
||||
let codex_bin_display = codex_bin.display();
|
||||
let mut cmd = Command::new(codex_bin);
|
||||
for override_kv in config_overrides {
|
||||
@@ -551,9 +825,27 @@ impl CodexClient {
|
||||
.context("codex app-server stdout unavailable")?;
|
||||
|
||||
Ok(Self {
|
||||
child: codex_app_server,
|
||||
stdin: Some(stdin),
|
||||
stdout: BufReader::new(stdout),
|
||||
transport: ClientTransport::Stdio {
|
||||
child: codex_app_server,
|
||||
stdin: Some(stdin),
|
||||
stdout: BufReader::new(stdout),
|
||||
},
|
||||
pending_notifications: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn connect_websocket(url: &str) -> Result<Self> {
|
||||
let parsed = Url::parse(url).with_context(|| format!("invalid websocket URL `{url}`"))?;
|
||||
let (socket, _response) = connect(parsed.as_str()).with_context(|| {
|
||||
format!(
|
||||
"failed to connect to websocket app-server at `{url}`; if no server is running, start one with `codex-app-server-test-client serve --listen {url}`"
|
||||
)
|
||||
})?;
|
||||
Ok(Self {
|
||||
transport: ClientTransport::WebSocket {
|
||||
url: url.to_string(),
|
||||
socket: Box::new(socket),
|
||||
},
|
||||
pending_notifications: VecDeque::new(),
|
||||
})
|
||||
}
|
||||
@@ -575,7 +867,16 @@ impl CodexClient {
|
||||
},
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "initialize")
|
||||
let response: InitializeResponse = self.send_request(request, request_id, "initialize")?;
|
||||
|
||||
// Complete the initialize handshake.
|
||||
let initialized = JSONRPCMessage::Notification(JSONRPCNotification {
|
||||
method: "initialized".to_string(),
|
||||
params: None,
|
||||
});
|
||||
self.write_jsonrpc_message(initialized)?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn start_thread(&mut self) -> Result<NewConversationResponse> {
|
||||
@@ -701,6 +1002,16 @@ impl CodexClient {
|
||||
self.send_request(request, request_id, "model/list")
|
||||
}
|
||||
|
||||
fn thread_list(&mut self, params: ThreadListParams) -> Result<ThreadListResponse> {
|
||||
let request_id = self.request_id();
|
||||
let request = ClientRequest::ThreadList {
|
||||
request_id: request_id.clone(),
|
||||
params,
|
||||
};
|
||||
|
||||
self.send_request(request, request_id, "thread/list")
|
||||
}
|
||||
|
||||
fn stream_conversation(&mut self, conversation_id: &ThreadId) -> Result<()> {
|
||||
loop {
|
||||
let notification = self.next_notification()?;
|
||||
@@ -835,6 +1146,12 @@ impl CodexClient {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stream_notifications_forever(&mut self) -> Result<()> {
|
||||
loop {
|
||||
let _ = self.next_notification()?;
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_event(
|
||||
&self,
|
||||
notification: JSONRPCNotification,
|
||||
@@ -882,17 +1199,7 @@ impl CodexClient {
|
||||
let request_json = serde_json::to_string(request)?;
|
||||
let request_pretty = serde_json::to_string_pretty(request)?;
|
||||
print_multiline_with_prefix("> ", &request_pretty);
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{request_json}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush request to codex app-server")?;
|
||||
} else {
|
||||
bail!("codex app-server stdin closed");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
self.write_payload(&request_json)
|
||||
}
|
||||
|
||||
fn wait_for_response<T>(&mut self, request_id: RequestId, method: &str) -> Result<T>
|
||||
@@ -947,17 +1254,8 @@ impl CodexClient {
|
||||
|
||||
fn read_jsonrpc_message(&mut self) -> Result<JSONRPCMessage> {
|
||||
loop {
|
||||
let mut response_line = String::new();
|
||||
let bytes = self
|
||||
.stdout
|
||||
.read_line(&mut response_line)
|
||||
.context("failed to read from codex app-server")?;
|
||||
|
||||
if bytes == 0 {
|
||||
bail!("codex app-server closed stdout");
|
||||
}
|
||||
|
||||
let trimmed = response_line.trim();
|
||||
let raw = self.read_payload()?;
|
||||
let trimmed = raw.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
@@ -1086,16 +1384,56 @@ impl CodexClient {
|
||||
let payload = serde_json::to_string(&message)?;
|
||||
let pretty = serde_json::to_string_pretty(&message)?;
|
||||
print_multiline_with_prefix("> ", &pretty);
|
||||
self.write_payload(&payload)
|
||||
}
|
||||
|
||||
if let Some(stdin) = self.stdin.as_mut() {
|
||||
writeln!(stdin, "{payload}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush response to codex app-server")?;
|
||||
return Ok(());
|
||||
fn write_payload(&mut self, payload: &str) -> Result<()> {
|
||||
match &mut self.transport {
|
||||
ClientTransport::Stdio { stdin, .. } => {
|
||||
if let Some(stdin) = stdin.as_mut() {
|
||||
writeln!(stdin, "{payload}")?;
|
||||
stdin
|
||||
.flush()
|
||||
.context("failed to flush payload to codex app-server")?;
|
||||
return Ok(());
|
||||
}
|
||||
bail!("codex app-server stdin closed")
|
||||
}
|
||||
ClientTransport::WebSocket { socket, url } => {
|
||||
socket
|
||||
.send(Message::Text(payload.to_string().into()))
|
||||
.with_context(|| format!("failed to write websocket message to `{url}`"))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bail!("codex app-server stdin closed")
|
||||
fn read_payload(&mut self) -> Result<String> {
|
||||
match &mut self.transport {
|
||||
ClientTransport::Stdio { stdout, .. } => {
|
||||
let mut response_line = String::new();
|
||||
let bytes = stdout
|
||||
.read_line(&mut response_line)
|
||||
.context("failed to read from codex app-server")?;
|
||||
if bytes == 0 {
|
||||
bail!("codex app-server closed stdout");
|
||||
}
|
||||
Ok(response_line)
|
||||
}
|
||||
ClientTransport::WebSocket { socket, url } => loop {
|
||||
let frame = socket
|
||||
.read()
|
||||
.with_context(|| format!("failed to read websocket message from `{url}`"))?;
|
||||
match frame {
|
||||
Message::Text(text) => return Ok(text.to_string()),
|
||||
Message::Binary(_) | Message::Ping(_) | Message::Pong(_) => continue,
|
||||
Message::Close(_) => {
|
||||
bail!("websocket app-server at `{url}` closed the connection")
|
||||
}
|
||||
Message::Frame(_) => continue,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1107,21 +1445,25 @@ fn print_multiline_with_prefix(prefix: &str, payload: &str) {
|
||||
|
||||
impl Drop for CodexClient {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.stdin.take();
|
||||
let ClientTransport::Stdio { child, stdin, .. } = &mut self.transport else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
let _ = stdin.take();
|
||||
|
||||
if let Ok(Some(status)) = child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
if let Ok(Some(status)) = self.child.try_wait() {
|
||||
if let Ok(Some(status)) = child.try_wait() {
|
||||
println!("[codex app-server exited: {status}]");
|
||||
return;
|
||||
}
|
||||
|
||||
let _ = self.child.kill();
|
||||
let _ = self.child.wait();
|
||||
let _ = child.kill();
|
||||
let _ = child.wait();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ Example with notification opt-out:
|
||||
- `thread/start` — create a new thread; emits `thread/started` and auto-subscribes you to turn/item events for that thread.
|
||||
- `thread/resume` — reopen an existing thread by id so subsequent `turn/start` calls append to it.
|
||||
- `thread/fork` — fork an existing thread into a new thread id by copying the stored history; emits `thread/started` and auto-subscribes you to turn/item events for the new thread.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders` filtering.
|
||||
- `thread/list` — page through stored rollouts; supports cursor-based pagination and optional `modelProviders`, `sourceKinds`, `archived`, and `cwd` filters.
|
||||
- `thread/loaded/list` — list the thread ids currently loaded in memory.
|
||||
- `thread/read` — read a stored thread by id without resuming it; optionally include turns via `includeTurns`.
|
||||
- `thread/archive` — move a thread’s rollout file into the archived directory; returns `{}` on success.
|
||||
@@ -131,7 +131,7 @@ Example with notification opt-out:
|
||||
- `turn/interrupt` — request cancellation of an in-flight turn by `(thread_id, turn_id)`; success is an empty `{}` response and the turn finishes with `status: "interrupted"`.
|
||||
- `review/start` — kick off Codex’s automated reviewer for a thread; responds like `turn/start` and emits `item/started`/`item/completed` notifications with `enteredReviewMode` and `exitedReviewMode` items, plus a final assistant `agentMessage` containing the review.
|
||||
- `command/exec` — run a single command under the server sandbox without starting a thread/turn (handy for utilities and validation).
|
||||
- `model/list` — list available models (with reasoning effort options and optional `upgrade` model ids).
|
||||
- `model/list` — list available models (set `includeHidden: true` to include entries with `hidden: true`), with reasoning effort options and optional `upgrade` model ids.
|
||||
- `experimentalFeature/list` — list feature flags with stage metadata (`beta`, `underDevelopment`, `stable`, etc.), enabled/default-enabled state, and cursor pagination. For non-beta flags, `displayName`/`description`/`announcement` are `null`.
|
||||
- `collaborationMode/list` — list available collaboration mode presets (experimental, no pagination).
|
||||
- `skills/list` — list skills for one or more `cwd` values (optional `forceReload`).
|
||||
@@ -209,6 +209,8 @@ To branch from a stored session, call `thread/fork` with the `thread.id`. This c
|
||||
{ "method": "thread/started", "params": { "thread": { … } } }
|
||||
```
|
||||
|
||||
Experimental API: `thread/start`, `thread/resume`, and `thread/fork` accept `persistExtendedHistory: true` to persist a richer subset of ThreadItems for non-lossy history when calling `thread/read`, `thread/resume`, and `thread/fork` later. This does not backfill events that were not persisted previously.
|
||||
|
||||
### Example: List threads (with pagination & filters)
|
||||
|
||||
`thread/list` lets you render a history UI. Results default to `createdAt` (newest first) descending. Pass any combination of:
|
||||
@@ -219,6 +221,7 @@ To branch from a stored session, call `thread/fork` with the `thread.id`. This c
|
||||
- `modelProviders` — restrict results to specific providers; unset, null, or an empty array will include all providers.
|
||||
- `sourceKinds` — restrict results to specific sources; omit or pass `[]` for interactive sessions only (`cli`, `vscode`).
|
||||
- `archived` — when `true`, list archived threads only. When `false` or `null`, list non-archived threads (default).
|
||||
- `cwd` — restrict results to threads whose session cwd exactly matches this path.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -532,6 +535,13 @@ Examples:
|
||||
- Opt out of legacy session setup event: `codex/event/session_configured`
|
||||
- Opt out of streamed agent text deltas: `item/agentMessage/delta`
|
||||
|
||||
### Fuzzy file search events (experimental)
|
||||
|
||||
The fuzzy file search session API emits per-query notifications:
|
||||
|
||||
- `fuzzyFileSearch/sessionUpdated` — `{ sessionId, query, files }` with the current matching files for the active query.
|
||||
- `fuzzyFileSearch/sessionCompleted` — `{ sessionId, query }` once indexing/matching for that query has completed.
|
||||
|
||||
### Turn events
|
||||
|
||||
The app-server streams JSON-RPC notifications while a turn is running. Each turn starts with `turn/started` (initial `turn`) and ends with `turn/completed` (final `turn` status). Token usage events stream separately via `thread/tokenUsage/updated`. Clients subscribe to the events they care about, rendering each item incrementally as updates arrive. The per-item lifecycle is always: `item/started` → zero or more item-specific deltas → `item/completed`.
|
||||
@@ -547,7 +557,7 @@ Today both notifications carry an empty `items` array even when item events were
|
||||
|
||||
`ThreadItem` is the tagged union carried in turn responses and `item/*` notifications. Currently we support events for the following items:
|
||||
|
||||
- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`).
|
||||
- `userMessage` — `{id, content}` where `content` is a list of user inputs (`text`, `image`, or `localImage`). Cyber model-routing warnings are surfaced as synthetic `userMessage` items with `text` prefixed by `Warning:`.
|
||||
- `agentMessage` — `{id, text}` containing the accumulated agent reply.
|
||||
- `plan` — `{id, text}` emitted for plan-mode turns; plan text can stream via `item/plan/delta` (experimental).
|
||||
- `reasoning` — `{id, summary, content}` where `summary` holds streamed reasoning summaries (applicable for most OpenAI models) and `content` holds raw reasoning blocks (applicable for e.g. open source models).
|
||||
@@ -761,7 +771,7 @@ To enable or disable a skill by path:
|
||||
|
||||
## Apps
|
||||
|
||||
Use `app/list` to fetch available apps (connectors). Each entry includes metadata like the app `id`, display `name`, `installUrl`, and whether it is currently accessible.
|
||||
Use `app/list` to fetch available apps (connectors). Each entry includes metadata like the app `id`, display `name`, `installUrl`, whether it is currently accessible, and whether it is enabled in config.
|
||||
|
||||
```json
|
||||
{ "method": "app/list", "id": 50, "params": {
|
||||
@@ -780,7 +790,8 @@ Use `app/list` to fetch available apps (connectors). Each entry includes metadat
|
||||
"logoUrlDark": null,
|
||||
"distributionChannel": null,
|
||||
"installUrl": "https://chatgpt.com/apps/demo-app/demo-app",
|
||||
"isAccessible": true
|
||||
"isAccessible": true,
|
||||
"isEnabled": true
|
||||
}
|
||||
],
|
||||
"nextCursor": null
|
||||
@@ -806,7 +817,8 @@ The server also emits `app/list/updated` notifications whenever either source (a
|
||||
"logoUrlDark": null,
|
||||
"distributionChannel": null,
|
||||
"installUrl": "https://chatgpt.com/apps/demo-app/demo-app",
|
||||
"isAccessible": true
|
||||
"isAccessible": true,
|
||||
"isEnabled": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use crate::codex_message_processor::read_summary_from_rollout;
|
||||
use crate::codex_message_processor::summary_to_thread;
|
||||
use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
use crate::error_code::INVALID_REQUEST_ERROR_CODE;
|
||||
use crate::outgoing_message::ClientRequestResult;
|
||||
use crate::outgoing_message::ThreadScopedOutgoingMessageSender;
|
||||
use crate::thread_state::ThreadState;
|
||||
use crate::thread_state::TurnSummary;
|
||||
@@ -67,6 +68,7 @@ use codex_app_server_protocol::TurnInterruptResponse;
|
||||
use codex_app_server_protocol::TurnPlanStep;
|
||||
use codex_app_server_protocol::TurnPlanUpdatedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_app_server_protocol::build_turns_from_rollout_items;
|
||||
use codex_core::CodexThread;
|
||||
use codex_core::parse_command::shlex_join;
|
||||
@@ -94,6 +96,8 @@ use codex_protocol::request_user_input::RequestUserInputAnswer as CoreRequestUse
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse as CoreRequestUserInputResponse;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
@@ -121,6 +125,35 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
EventMsg::TurnComplete(_ev) => {
|
||||
handle_turn_complete(conversation_id, event_turn_id, &outgoing, &thread_state).await;
|
||||
}
|
||||
EventMsg::Warning(warning_event) => {
|
||||
if matches!(api_version, ApiVersion::V2)
|
||||
&& is_safety_check_downgrade_warning(&warning_event.message)
|
||||
{
|
||||
let item = ThreadItem::UserMessage {
|
||||
id: warning_item_id(&event_turn_id, &warning_event.message),
|
||||
content: vec![V2UserInput::Text {
|
||||
text: format!("Warning: {}", warning_event.message),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
};
|
||||
let started = ItemStartedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item: item.clone(),
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemStarted(started))
|
||||
.await;
|
||||
let completed = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item,
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(completed))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
EventMsg::ApplyPatchApprovalRequest(ApplyPatchApprovalRequestEvent {
|
||||
call_id,
|
||||
turn_id,
|
||||
@@ -205,6 +238,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
reason,
|
||||
proposed_execpolicy_amendment,
|
||||
parsed_cmd,
|
||||
..
|
||||
}) => match api_version {
|
||||
ApiVersion::V1 => {
|
||||
let params = ExecCommandApprovalParams {
|
||||
@@ -717,6 +751,10 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.await;
|
||||
};
|
||||
|
||||
if !ev.affects_turn_status() {
|
||||
return;
|
||||
}
|
||||
|
||||
let turn_error = TurnError {
|
||||
message: ev.message,
|
||||
codex_error_info: ev.codex_error_info.map(V2CodexErrorInfo::from),
|
||||
@@ -887,11 +925,7 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
// and emit the corresponding EventMsg, we repurpose the call_id as the item_id.
|
||||
let item_id = patch_end_event.call_id.clone();
|
||||
|
||||
let status = if patch_end_event.success {
|
||||
PatchApplyStatus::Completed
|
||||
} else {
|
||||
PatchApplyStatus::Failed
|
||||
};
|
||||
let status: PatchApplyStatus = (&patch_end_event.status).into();
|
||||
let changes = convert_patch_changes(&patch_end_event.changes);
|
||||
complete_file_change_item(
|
||||
conversation_id,
|
||||
@@ -998,14 +1032,11 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
aggregated_output,
|
||||
exit_code,
|
||||
duration,
|
||||
status,
|
||||
..
|
||||
} = exec_command_end_event;
|
||||
|
||||
let status = if exit_code == 0 {
|
||||
CommandExecutionStatus::Completed
|
||||
} else {
|
||||
CommandExecutionStatus::Failed
|
||||
};
|
||||
let status: CommandExecutionStatus = (&status).into();
|
||||
let command_actions = parsed_cmd
|
||||
.into_iter()
|
||||
.map(V2ParsedCommand::from)
|
||||
@@ -1287,6 +1318,18 @@ async fn complete_command_execution_item(
|
||||
.await;
|
||||
}
|
||||
|
||||
fn is_safety_check_downgrade_warning(message: &str) -> bool {
|
||||
message.contains("Your account was flagged for potentially high-risk cyber activity")
|
||||
&& message.contains("apply for trusted access: https://chatgpt.com/cyber")
|
||||
}
|
||||
|
||||
fn warning_item_id(turn_id: &str, message: &str) -> String {
|
||||
let mut hasher = std::collections::hash_map::DefaultHasher::new();
|
||||
message.hash(&mut hasher);
|
||||
let digest = hasher.finish();
|
||||
format!("{turn_id}-warning-{digest:x}")
|
||||
}
|
||||
|
||||
async fn maybe_emit_raw_response_item_completed(
|
||||
api_version: ApiVersion,
|
||||
conversation_id: ThreadId,
|
||||
@@ -1411,12 +1454,25 @@ async fn handle_error(
|
||||
|
||||
async fn on_patch_approval_response(
|
||||
call_id: String,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
receiver: oneshot::Receiver<ClientRequestResult>,
|
||||
codex: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
Ok(value) => value,
|
||||
Ok(Ok(value)) => value,
|
||||
Ok(Err(err)) => {
|
||||
error!("request failed with client error: {err:?}");
|
||||
if let Err(submit_err) = codex
|
||||
.submit(Op::PatchApproval {
|
||||
id: call_id.clone(),
|
||||
decision: ReviewDecision::Denied,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit denied PatchApproval after request failure: {submit_err}");
|
||||
}
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
if let Err(submit_err) = codex
|
||||
@@ -1454,12 +1510,16 @@ async fn on_patch_approval_response(
|
||||
async fn on_exec_approval_response(
|
||||
call_id: String,
|
||||
turn_id: String,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
receiver: oneshot::Receiver<ClientRequestResult>,
|
||||
conversation: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
Ok(value) => value,
|
||||
Ok(Ok(value)) => value,
|
||||
Ok(Err(err)) => {
|
||||
error!("request failed with client error: {err:?}");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
return;
|
||||
@@ -1491,12 +1551,28 @@ async fn on_exec_approval_response(
|
||||
|
||||
async fn on_request_user_input_response(
|
||||
event_turn_id: String,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
receiver: oneshot::Receiver<ClientRequestResult>,
|
||||
conversation: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
Ok(value) => value,
|
||||
Ok(Ok(value)) => value,
|
||||
Ok(Err(err)) => {
|
||||
error!("request failed with client error: {err:?}");
|
||||
let empty = CoreRequestUserInputResponse {
|
||||
answers: HashMap::new(),
|
||||
};
|
||||
if let Err(err) = conversation
|
||||
.submit(Op::UserInputAnswer {
|
||||
id: event_turn_id,
|
||||
response: empty,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit UserInputAnswer: {err}");
|
||||
}
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
let empty = CoreRequestUserInputResponse {
|
||||
@@ -1631,14 +1707,14 @@ async fn on_file_change_request_approval_response(
|
||||
conversation_id: ThreadId,
|
||||
item_id: String,
|
||||
changes: Vec<FileUpdateChange>,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
receiver: oneshot::Receiver<ClientRequestResult>,
|
||||
codex: Arc<CodexThread>,
|
||||
outgoing: ThreadScopedOutgoingMessageSender,
|
||||
thread_state: Arc<Mutex<ThreadState>>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let (decision, completion_status) = match response {
|
||||
Ok(value) => {
|
||||
Ok(Ok(value)) => {
|
||||
let response = serde_json::from_value::<FileChangeRequestApprovalResponse>(value)
|
||||
.unwrap_or_else(|err| {
|
||||
error!("failed to deserialize FileChangeRequestApprovalResponse: {err}");
|
||||
@@ -1653,6 +1729,10 @@ async fn on_file_change_request_approval_response(
|
||||
// Only short-circuit on declines/cancels/failures.
|
||||
(decision, completion_status)
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
error!("request failed with client error: {err:?}");
|
||||
(ReviewDecision::Denied, Some(PatchApplyStatus::Failed))
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
(ReviewDecision::Denied, Some(PatchApplyStatus::Failed))
|
||||
@@ -1691,13 +1771,13 @@ async fn on_command_execution_request_approval_response(
|
||||
command: String,
|
||||
cwd: PathBuf,
|
||||
command_actions: Vec<V2ParsedCommand>,
|
||||
receiver: oneshot::Receiver<JsonValue>,
|
||||
receiver: oneshot::Receiver<ClientRequestResult>,
|
||||
conversation: Arc<CodexThread>,
|
||||
outgoing: ThreadScopedOutgoingMessageSender,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let (decision, completion_status) = match response {
|
||||
Ok(value) => {
|
||||
Ok(Ok(value)) => {
|
||||
let response = serde_json::from_value::<CommandExecutionRequestApprovalResponse>(value)
|
||||
.unwrap_or_else(|err| {
|
||||
error!("failed to deserialize CommandExecutionRequestApprovalResponse: {err}");
|
||||
@@ -1732,6 +1812,10 @@ async fn on_command_execution_request_approval_response(
|
||||
};
|
||||
(decision, completion_status)
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
error!("request failed with client error: {err:?}");
|
||||
(ReviewDecision::Denied, Some(CommandExecutionStatus::Failed))
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
(ReviewDecision::Denied, Some(CommandExecutionStatus::Failed))
|
||||
@@ -1976,6 +2060,18 @@ mod tests {
|
||||
assert_eq!(item, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn safety_check_downgrade_warning_detection_matches_expected_message() {
|
||||
let warning = "Your account was flagged for potentially high-risk cyber activity and this request was routed to gpt-5.2 as a fallback. To regain access to gpt-5.3-codex, apply for trusted access: https://chatgpt.com/cyber\nLearn more: https://developers.openai.com/codex/concepts/cyber-safety";
|
||||
assert!(is_safety_check_downgrade_warning(warning));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn safety_check_downgrade_warning_detection_ignores_other_warnings() {
|
||||
let warning = "Model metadata for `mock-model` not found. Defaulting to fallback metadata; this can degrade performance and cause issues.";
|
||||
assert!(!is_safety_check_downgrade_warning(warning));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handle_error_records_message() -> Result<()> {
|
||||
let conversation_id = ThreadId::new();
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,14 +7,35 @@ use std::sync::Arc;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::error;
|
||||
|
||||
use crate::outgoing_message::ClientRequestResult;
|
||||
|
||||
pub(crate) async fn on_call_response(
|
||||
call_id: String,
|
||||
receiver: oneshot::Receiver<serde_json::Value>,
|
||||
receiver: oneshot::Receiver<ClientRequestResult>,
|
||||
conversation: Arc<CodexThread>,
|
||||
) {
|
||||
let response = receiver.await;
|
||||
let value = match response {
|
||||
Ok(value) => value,
|
||||
Ok(Ok(value)) => value,
|
||||
Ok(Err(err)) => {
|
||||
error!("request failed with client error: {err:?}");
|
||||
let fallback = CoreDynamicToolResponse {
|
||||
content_items: vec![CoreDynamicToolCallOutputContentItem::InputText {
|
||||
text: "dynamic tool request failed".to_string(),
|
||||
}],
|
||||
success: false,
|
||||
};
|
||||
if let Err(err) = conversation
|
||||
.submit(Op::DynamicToolResponse {
|
||||
id: call_id.clone(),
|
||||
response: fallback,
|
||||
})
|
||||
.await
|
||||
{
|
||||
error!("failed to submit DynamicToolResponse: {err}");
|
||||
}
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
error!("request failed: {err:?}");
|
||||
let fallback = CoreDynamicToolResponse {
|
||||
|
||||
@@ -1,12 +1,19 @@
|
||||
use std::num::NonZero;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use codex_app_server_protocol::FuzzyFileSearchResult;
|
||||
use codex_app_server_protocol::FuzzyFileSearchSessionCompletedNotification;
|
||||
use codex_app_server_protocol::FuzzyFileSearchSessionUpdatedNotification;
|
||||
use codex_app_server_protocol::ServerNotification;
|
||||
use codex_file_search as file_search;
|
||||
use tracing::warn;
|
||||
|
||||
use crate::outgoing_message::OutgoingMessageSender;
|
||||
|
||||
const MATCH_LIMIT: usize = 50;
|
||||
const MAX_THREADS: usize = 12;
|
||||
|
||||
@@ -77,3 +84,164 @@ pub(crate) async fn run_fuzzy_file_search(
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
pub(crate) struct FuzzyFileSearchSession {
|
||||
session: file_search::FileSearchSession,
|
||||
shared: Arc<SessionShared>,
|
||||
}
|
||||
|
||||
impl FuzzyFileSearchSession {
|
||||
pub(crate) fn update_query(&self, query: String) {
|
||||
if self.shared.canceled.load(Ordering::Relaxed) {
|
||||
return;
|
||||
}
|
||||
{
|
||||
#[expect(clippy::unwrap_used)]
|
||||
let mut latest_query = self.shared.latest_query.lock().unwrap();
|
||||
*latest_query = query.clone();
|
||||
}
|
||||
self.session.update_query(&query);
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for FuzzyFileSearchSession {
|
||||
fn drop(&mut self) {
|
||||
self.shared.canceled.store(true, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn start_fuzzy_file_search_session(
|
||||
session_id: String,
|
||||
roots: Vec<String>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
) -> anyhow::Result<FuzzyFileSearchSession> {
|
||||
#[expect(clippy::expect_used)]
|
||||
let limit = NonZero::new(MATCH_LIMIT).expect("MATCH_LIMIT should be a valid non-zero usize");
|
||||
let cores = std::thread::available_parallelism()
|
||||
.map(std::num::NonZero::get)
|
||||
.unwrap_or(1);
|
||||
let threads = cores.min(MAX_THREADS);
|
||||
#[expect(clippy::expect_used)]
|
||||
let threads = NonZero::new(threads.max(1)).expect("threads should be non-zero");
|
||||
let search_dirs: Vec<PathBuf> = roots.iter().map(PathBuf::from).collect();
|
||||
let canceled = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let shared = Arc::new(SessionShared {
|
||||
session_id,
|
||||
latest_query: Mutex::new(String::new()),
|
||||
outgoing,
|
||||
runtime: tokio::runtime::Handle::current(),
|
||||
canceled: canceled.clone(),
|
||||
});
|
||||
|
||||
let reporter = Arc::new(SessionReporterImpl {
|
||||
shared: shared.clone(),
|
||||
});
|
||||
let session = file_search::create_session(
|
||||
search_dirs,
|
||||
file_search::FileSearchOptions {
|
||||
limit,
|
||||
threads,
|
||||
compute_indices: true,
|
||||
..Default::default()
|
||||
},
|
||||
reporter,
|
||||
Some(canceled),
|
||||
)?;
|
||||
|
||||
Ok(FuzzyFileSearchSession { session, shared })
|
||||
}
|
||||
|
||||
struct SessionShared {
|
||||
session_id: String,
|
||||
latest_query: Mutex<String>,
|
||||
outgoing: Arc<OutgoingMessageSender>,
|
||||
runtime: tokio::runtime::Handle,
|
||||
canceled: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
struct SessionReporterImpl {
|
||||
shared: Arc<SessionShared>,
|
||||
}
|
||||
|
||||
impl SessionReporterImpl {
|
||||
fn send_snapshot(&self, snapshot: &file_search::FileSearchSnapshot) {
|
||||
if self.shared.canceled.load(Ordering::Relaxed) {
|
||||
return;
|
||||
}
|
||||
|
||||
let query = {
|
||||
#[expect(clippy::unwrap_used)]
|
||||
self.shared.latest_query.lock().unwrap().clone()
|
||||
};
|
||||
if snapshot.query != query {
|
||||
return;
|
||||
}
|
||||
|
||||
let files = if query.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
collect_files(snapshot)
|
||||
};
|
||||
|
||||
let notification = ServerNotification::FuzzyFileSearchSessionUpdated(
|
||||
FuzzyFileSearchSessionUpdatedNotification {
|
||||
session_id: self.shared.session_id.clone(),
|
||||
query,
|
||||
files,
|
||||
},
|
||||
);
|
||||
let outgoing = self.shared.outgoing.clone();
|
||||
self.shared.runtime.spawn(async move {
|
||||
outgoing.send_server_notification(notification).await;
|
||||
});
|
||||
}
|
||||
|
||||
fn send_complete(&self) {
|
||||
if self.shared.canceled.load(Ordering::Relaxed) {
|
||||
return;
|
||||
}
|
||||
let session_id = self.shared.session_id.clone();
|
||||
let outgoing = self.shared.outgoing.clone();
|
||||
self.shared.runtime.spawn(async move {
|
||||
let notification = ServerNotification::FuzzyFileSearchSessionCompleted(
|
||||
FuzzyFileSearchSessionCompletedNotification { session_id },
|
||||
);
|
||||
outgoing.send_server_notification(notification).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl file_search::SessionReporter for SessionReporterImpl {
|
||||
fn on_update(&self, snapshot: &file_search::FileSearchSnapshot) {
|
||||
self.send_snapshot(snapshot);
|
||||
}
|
||||
|
||||
fn on_complete(&self) {
|
||||
self.send_complete();
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_files(snapshot: &file_search::FileSearchSnapshot) -> Vec<FuzzyFileSearchResult> {
|
||||
let mut files = snapshot
|
||||
.matches
|
||||
.iter()
|
||||
.map(|m| {
|
||||
let file_name = m.path.file_name().unwrap_or_default();
|
||||
FuzzyFileSearchResult {
|
||||
root: m.root.to_string_lossy().to_string(),
|
||||
path: m.path.to_string_lossy().to_string(),
|
||||
file_name: file_name.to_string_lossy().to_string(),
|
||||
score: m.score,
|
||||
indices: m.indices.clone(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
files.sort_by(file_search::cmp_by_score_desc_then_path_asc::<
|
||||
FuzzyFileSearchResult,
|
||||
_,
|
||||
_,
|
||||
>(|f| f.score, |f| f.path.as_str()));
|
||||
files
|
||||
}
|
||||
|
||||
@@ -86,9 +86,20 @@ impl ExternalAuthRefresher for ExternalAuthRefreshBridge {
|
||||
.await;
|
||||
|
||||
let result = match timeout(EXTERNAL_AUTH_REFRESH_TIMEOUT, rx).await {
|
||||
Ok(result) => result.map_err(|err| {
|
||||
std::io::Error::other(format!("auth refresh request canceled: {err}"))
|
||||
})?,
|
||||
Ok(result) => {
|
||||
// Two failure scenarios:
|
||||
// 1) `oneshot::Receiver` failed (sender dropped) => request canceled/channel closed.
|
||||
// 2) client answered with JSON-RPC error payload => propagate code/message.
|
||||
let result = result.map_err(|err| {
|
||||
std::io::Error::other(format!("auth refresh request canceled: {err}"))
|
||||
})?;
|
||||
result.map_err(|err| {
|
||||
std::io::Error::other(format!(
|
||||
"auth refresh request failed: code={} message={}",
|
||||
err.code, err.message
|
||||
))
|
||||
})?
|
||||
}
|
||||
Err(_) => {
|
||||
let _canceled = self.outgoing.cancel_request(&request_id).await;
|
||||
return Err(std::io::Error::other(format!(
|
||||
|
||||
@@ -8,12 +8,16 @@ use codex_core::models_manager::manager::RefreshStrategy;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ReasoningEffortPreset;
|
||||
|
||||
pub async fn supported_models(thread_manager: Arc<ThreadManager>, config: &Config) -> Vec<Model> {
|
||||
pub async fn supported_models(
|
||||
thread_manager: Arc<ThreadManager>,
|
||||
config: &Config,
|
||||
include_hidden: bool,
|
||||
) -> Vec<Model> {
|
||||
thread_manager
|
||||
.list_models(config, RefreshStrategy::OnlineIfUncached)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter(|preset| preset.show_in_picker)
|
||||
.filter(|preset| include_hidden || preset.show_in_picker)
|
||||
.map(model_from_preset)
|
||||
.collect()
|
||||
}
|
||||
@@ -25,6 +29,7 @@ fn model_from_preset(preset: ModelPreset) -> Model {
|
||||
upgrade: preset.upgrade.map(|upgrade| upgrade.id),
|
||||
display_name: preset.display_name.to_string(),
|
||||
description: preset.description.to_string(),
|
||||
hidden: !preset.show_in_picker,
|
||||
supported_reasoning_efforts: reasoning_efforts_from_preset(
|
||||
preset.supported_reasoning_efforts,
|
||||
),
|
||||
|
||||
@@ -20,6 +20,8 @@ use crate::error_code::INTERNAL_ERROR_CODE;
|
||||
#[cfg(test)]
|
||||
use codex_protocol::account::PlanType;
|
||||
|
||||
pub(crate) type ClientRequestResult = std::result::Result<Result, JSONRPCErrorError>;
|
||||
|
||||
/// Stable identifier for a transport connection.
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
|
||||
pub(crate) struct ConnectionId(pub(crate) u64);
|
||||
@@ -46,7 +48,7 @@ pub(crate) enum OutgoingEnvelope {
|
||||
pub(crate) struct OutgoingMessageSender {
|
||||
next_server_request_id: AtomicI64,
|
||||
sender: mpsc::Sender<OutgoingEnvelope>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<Result>>>,
|
||||
request_id_to_callback: Mutex<HashMap<RequestId, oneshot::Sender<ClientRequestResult>>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -69,7 +71,7 @@ impl ThreadScopedOutgoingMessageSender {
|
||||
pub(crate) async fn send_request(
|
||||
&self,
|
||||
payload: ServerRequestPayload,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
) -> oneshot::Receiver<ClientRequestResult> {
|
||||
if self.connection_ids.is_empty() {
|
||||
let (_tx, rx) = oneshot::channel();
|
||||
return rx;
|
||||
@@ -118,7 +120,7 @@ impl OutgoingMessageSender {
|
||||
&self,
|
||||
connection_ids: &[ConnectionId],
|
||||
request: ServerRequestPayload,
|
||||
) -> oneshot::Receiver<Result> {
|
||||
) -> oneshot::Receiver<ClientRequestResult> {
|
||||
let (_id, rx) = self
|
||||
.send_request_with_id_to_connections(connection_ids, request)
|
||||
.await;
|
||||
@@ -128,7 +130,7 @@ impl OutgoingMessageSender {
|
||||
pub(crate) async fn send_request_with_id(
|
||||
&self,
|
||||
request: ServerRequestPayload,
|
||||
) -> (RequestId, oneshot::Receiver<Result>) {
|
||||
) -> (RequestId, oneshot::Receiver<ClientRequestResult>) {
|
||||
self.send_request_with_id_to_connections(&[], request).await
|
||||
}
|
||||
|
||||
@@ -136,7 +138,7 @@ impl OutgoingMessageSender {
|
||||
&self,
|
||||
connection_ids: &[ConnectionId],
|
||||
request: ServerRequestPayload,
|
||||
) -> (RequestId, oneshot::Receiver<Result>) {
|
||||
) -> (RequestId, oneshot::Receiver<ClientRequestResult>) {
|
||||
let id = RequestId::Integer(self.next_server_request_id.fetch_add(1, Ordering::Relaxed));
|
||||
let outgoing_message_id = id.clone();
|
||||
let (tx_approve, rx_approve) = oneshot::channel();
|
||||
@@ -190,7 +192,7 @@ impl OutgoingMessageSender {
|
||||
|
||||
match entry {
|
||||
Some((id, sender)) => {
|
||||
if let Err(err) = sender.send(result) {
|
||||
if let Err(err) = sender.send(Ok(result)) {
|
||||
warn!("could not notify callback for {id:?} due to: {err:?}");
|
||||
}
|
||||
}
|
||||
@@ -207,8 +209,11 @@ impl OutgoingMessageSender {
|
||||
};
|
||||
|
||||
match entry {
|
||||
Some((id, _sender)) => {
|
||||
Some((id, sender)) => {
|
||||
warn!("client responded with error for {id:?}: {error:?}");
|
||||
if let Err(err) = sender.send(Err(error)) {
|
||||
warn!("could not notify callback for {id:?} due to: {err:?}");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
warn!("could not find callback for {id:?}");
|
||||
@@ -390,11 +395,13 @@ mod tests {
|
||||
use codex_app_server_protocol::AccountLoginCompletedNotification;
|
||||
use codex_app_server_protocol::AccountRateLimitsUpdatedNotification;
|
||||
use codex_app_server_protocol::AccountUpdatedNotification;
|
||||
use codex_app_server_protocol::ApplyPatchApprovalParams;
|
||||
use codex_app_server_protocol::AuthMode;
|
||||
use codex_app_server_protocol::ConfigWarningNotification;
|
||||
use codex_app_server_protocol::LoginChatGptCompleteNotification;
|
||||
use codex_app_server_protocol::RateLimitSnapshot;
|
||||
use codex_app_server_protocol::RateLimitWindow;
|
||||
use codex_protocol::ThreadId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tokio::time::timeout;
|
||||
@@ -609,4 +616,38 @@ mod tests {
|
||||
other => panic!("expected targeted error envelope, got: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn notify_client_error_forwards_error_to_waiter() {
|
||||
let (tx, _rx) = mpsc::channel::<OutgoingEnvelope>(4);
|
||||
let outgoing = OutgoingMessageSender::new(tx);
|
||||
|
||||
let (request_id, wait_for_result) = outgoing
|
||||
.send_request_with_id(ServerRequestPayload::ApplyPatchApproval(
|
||||
ApplyPatchApprovalParams {
|
||||
conversation_id: ThreadId::new(),
|
||||
call_id: "call-id".to_string(),
|
||||
file_changes: HashMap::new(),
|
||||
reason: None,
|
||||
grant_root: None,
|
||||
},
|
||||
))
|
||||
.await;
|
||||
|
||||
let error = JSONRPCErrorError {
|
||||
code: INTERNAL_ERROR_CODE,
|
||||
message: "refresh failed".to_string(),
|
||||
data: None,
|
||||
};
|
||||
|
||||
outgoing
|
||||
.notify_client_error(request_id, error.clone())
|
||||
.await;
|
||||
|
||||
let result = timeout(Duration::from_secs(1), wait_for_result)
|
||||
.await
|
||||
.expect("wait should not time out")
|
||||
.expect("waiter should receive a callback");
|
||||
assert_eq!(result, Err(error));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -678,6 +678,78 @@ impl McpProcess {
|
||||
self.send_request("fuzzyFileSearch", Some(params)).await
|
||||
}
|
||||
|
||||
pub async fn send_fuzzy_file_search_session_start_request(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
roots: Vec<String>,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"sessionId": session_id,
|
||||
"roots": roots,
|
||||
});
|
||||
self.send_request("fuzzyFileSearch/sessionStart", Some(params))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn start_fuzzy_file_search_session(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
roots: Vec<String>,
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
let request_id = self
|
||||
.send_fuzzy_file_search_session_start_request(session_id, roots)
|
||||
.await?;
|
||||
self.read_stream_until_response_message(RequestId::Integer(request_id))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_fuzzy_file_search_session_update_request(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
query: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"sessionId": session_id,
|
||||
"query": query,
|
||||
});
|
||||
self.send_request("fuzzyFileSearch/sessionUpdate", Some(params))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_fuzzy_file_search_session(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
query: &str,
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
let request_id = self
|
||||
.send_fuzzy_file_search_session_update_request(session_id, query)
|
||||
.await?;
|
||||
self.read_stream_until_response_message(RequestId::Integer(request_id))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_fuzzy_file_search_session_stop_request(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
) -> anyhow::Result<i64> {
|
||||
let params = serde_json::json!({
|
||||
"sessionId": session_id,
|
||||
});
|
||||
self.send_request("fuzzyFileSearch/sessionStop", Some(params))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn stop_fuzzy_file_search_session(
|
||||
&mut self,
|
||||
session_id: &str,
|
||||
) -> anyhow::Result<JSONRPCResponse> {
|
||||
let request_id = self
|
||||
.send_fuzzy_file_search_session_stop_request(session_id)
|
||||
.await?;
|
||||
self.read_stream_until_response_message(RequestId::Integer(request_id))
|
||||
.await
|
||||
}
|
||||
|
||||
async fn send_request(
|
||||
&mut self,
|
||||
method: &str,
|
||||
|
||||
@@ -41,6 +41,7 @@ fn preset_to_info(preset: &ModelPreset, priority: i32) -> ModelInfo {
|
||||
experimental_supported_tools: Vec::new(),
|
||||
input_modalities: default_input_modalities(),
|
||||
prefer_websockets: false,
|
||||
used_fallback_model_metadata: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use app_test_support::McpProcess;
|
||||
use codex_app_server_protocol::FuzzyFileSearchSessionCompletedNotification;
|
||||
use codex_app_server_protocol::FuzzyFileSearchSessionUpdatedNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -9,6 +11,154 @@ use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const SHORT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_millis(500);
|
||||
const STOP_GRACE_PERIOD: std::time::Duration = std::time::Duration::from_millis(250);
|
||||
const SESSION_UPDATED_METHOD: &str = "fuzzyFileSearch/sessionUpdated";
|
||||
const SESSION_COMPLETED_METHOD: &str = "fuzzyFileSearch/sessionCompleted";
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum FileExpectation {
|
||||
Any,
|
||||
Empty,
|
||||
NonEmpty,
|
||||
}
|
||||
|
||||
async fn initialized_mcp(codex_home: &TempDir) -> Result<McpProcess> {
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
Ok(mcp)
|
||||
}
|
||||
|
||||
async fn wait_for_session_updated(
|
||||
mcp: &mut McpProcess,
|
||||
session_id: &str,
|
||||
query: &str,
|
||||
file_expectation: FileExpectation,
|
||||
) -> Result<FuzzyFileSearchSessionUpdatedNotification> {
|
||||
for _ in 0..20 {
|
||||
let notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message(SESSION_UPDATED_METHOD),
|
||||
)
|
||||
.await??;
|
||||
let params = notification
|
||||
.params
|
||||
.ok_or_else(|| anyhow!("missing notification params"))?;
|
||||
let payload = serde_json::from_value::<FuzzyFileSearchSessionUpdatedNotification>(params)?;
|
||||
if payload.session_id != session_id || payload.query != query {
|
||||
continue;
|
||||
}
|
||||
let files_match = match file_expectation {
|
||||
FileExpectation::Any => true,
|
||||
FileExpectation::Empty => payload.files.is_empty(),
|
||||
FileExpectation::NonEmpty => !payload.files.is_empty(),
|
||||
};
|
||||
if files_match {
|
||||
return Ok(payload);
|
||||
}
|
||||
}
|
||||
anyhow::bail!(
|
||||
"did not receive expected session update for sessionId={session_id}, query={query}"
|
||||
);
|
||||
}
|
||||
|
||||
async fn wait_for_session_completed(
|
||||
mcp: &mut McpProcess,
|
||||
session_id: &str,
|
||||
) -> Result<FuzzyFileSearchSessionCompletedNotification> {
|
||||
for _ in 0..20 {
|
||||
let notification = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message(SESSION_COMPLETED_METHOD),
|
||||
)
|
||||
.await??;
|
||||
let params = notification
|
||||
.params
|
||||
.ok_or_else(|| anyhow!("missing notification params"))?;
|
||||
let payload =
|
||||
serde_json::from_value::<FuzzyFileSearchSessionCompletedNotification>(params)?;
|
||||
if payload.session_id == session_id {
|
||||
return Ok(payload);
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::bail!("did not receive expected session completion for sessionId={session_id}");
|
||||
}
|
||||
|
||||
async fn assert_update_request_fails_for_missing_session(
|
||||
mcp: &mut McpProcess,
|
||||
session_id: &str,
|
||||
query: &str,
|
||||
) -> Result<()> {
|
||||
let request_id = mcp
|
||||
.send_fuzzy_file_search_session_update_request(session_id, query)
|
||||
.await?;
|
||||
let err = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_error_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
assert_eq!(err.error.code, -32600);
|
||||
assert_eq!(
|
||||
err.error.message,
|
||||
format!("fuzzy file search session not found: {session_id}")
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn assert_no_session_updates_for(
|
||||
mcp: &mut McpProcess,
|
||||
session_id: &str,
|
||||
grace_period: std::time::Duration,
|
||||
duration: std::time::Duration,
|
||||
) -> Result<()> {
|
||||
let grace_deadline = tokio::time::Instant::now() + grace_period;
|
||||
loop {
|
||||
let now = tokio::time::Instant::now();
|
||||
if now >= grace_deadline {
|
||||
break;
|
||||
}
|
||||
let remaining = grace_deadline - now;
|
||||
match timeout(
|
||||
remaining,
|
||||
mcp.read_stream_until_notification_message(SESSION_UPDATED_METHOD),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Err(_) => break,
|
||||
Ok(Err(err)) => return Err(err),
|
||||
Ok(Ok(_)) => {}
|
||||
}
|
||||
}
|
||||
|
||||
let deadline = tokio::time::Instant::now() + duration;
|
||||
loop {
|
||||
let now = tokio::time::Instant::now();
|
||||
if now >= deadline {
|
||||
return Ok(());
|
||||
}
|
||||
let remaining = deadline - now;
|
||||
match timeout(
|
||||
remaining,
|
||||
mcp.read_stream_until_notification_message(SESSION_UPDATED_METHOD),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Err(_) => return Ok(()),
|
||||
Ok(Err(err)) => return Err(err),
|
||||
Ok(Ok(notification)) => {
|
||||
let params = notification
|
||||
.params
|
||||
.ok_or_else(|| anyhow!("missing notification params"))?;
|
||||
let payload =
|
||||
serde_json::from_value::<FuzzyFileSearchSessionUpdatedNotification>(params)?;
|
||||
if payload.session_id == session_id {
|
||||
anyhow::bail!("received unexpected session update after stop: {payload:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> {
|
||||
@@ -125,3 +275,246 @@ async fn test_fuzzy_file_search_accepts_cancellation_token() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_session_streams_updates() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let session_id = "session-1";
|
||||
|
||||
mcp.start_fuzzy_file_search_session(session_id, vec![root_path.clone()])
|
||||
.await?;
|
||||
mcp.update_fuzzy_file_search_session(session_id, "alp")
|
||||
.await?;
|
||||
|
||||
let payload =
|
||||
wait_for_session_updated(&mut mcp, session_id, "alp", FileExpectation::NonEmpty).await?;
|
||||
assert_eq!(payload.files.len(), 1);
|
||||
assert_eq!(payload.files[0].root, root_path);
|
||||
assert_eq!(payload.files[0].path, "alpha.txt");
|
||||
let completed = wait_for_session_completed(&mut mcp, session_id).await?;
|
||||
assert_eq!(completed.session_id, session_id);
|
||||
|
||||
mcp.stop_fuzzy_file_search_session(session_id).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_session_no_updates_after_complete_until_query_edited() -> Result<()>
|
||||
{
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let session_id = "session-complete-invariant";
|
||||
mcp.start_fuzzy_file_search_session(session_id, vec![root_path])
|
||||
.await?;
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_id, "alp")
|
||||
.await?;
|
||||
wait_for_session_updated(&mut mcp, session_id, "alp", FileExpectation::NonEmpty).await?;
|
||||
wait_for_session_completed(&mut mcp, session_id).await?;
|
||||
assert_no_session_updates_for(&mut mcp, session_id, STOP_GRACE_PERIOD, SHORT_READ_TIMEOUT)
|
||||
.await?;
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_id, "alpha")
|
||||
.await?;
|
||||
wait_for_session_updated(&mut mcp, session_id, "alpha", FileExpectation::NonEmpty).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_session_update_before_start_errors() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
assert_update_request_fails_for_missing_session(&mut mcp, "missing", "alp").await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_session_update_works_without_waiting_for_start_response()
|
||||
-> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let session_id = "session-no-wait";
|
||||
|
||||
let start_request_id = mcp
|
||||
.send_fuzzy_file_search_session_start_request(session_id, vec![root_path.clone()])
|
||||
.await?;
|
||||
let update_request_id = mcp
|
||||
.send_fuzzy_file_search_session_update_request(session_id, "alp")
|
||||
.await?;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(update_request_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let payload =
|
||||
wait_for_session_updated(&mut mcp, session_id, "alp", FileExpectation::NonEmpty).await?;
|
||||
assert_eq!(payload.files.len(), 1);
|
||||
assert_eq!(payload.files[0].root, root_path);
|
||||
assert_eq!(payload.files[0].path, "alpha.txt");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_session_multiple_query_updates_work() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
std::fs::write(root.path().join("alphabet.txt"), "contents")?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let session_id = "session-multi-update";
|
||||
mcp.start_fuzzy_file_search_session(session_id, vec![root_path.clone()])
|
||||
.await?;
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_id, "alp")
|
||||
.await?;
|
||||
let alp_payload =
|
||||
wait_for_session_updated(&mut mcp, session_id, "alp", FileExpectation::NonEmpty).await?;
|
||||
assert_eq!(
|
||||
alp_payload.files.iter().all(|file| file.root == root_path),
|
||||
true
|
||||
);
|
||||
wait_for_session_completed(&mut mcp, session_id).await?;
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_id, "zzzz")
|
||||
.await?;
|
||||
let zzzz_payload =
|
||||
wait_for_session_updated(&mut mcp, session_id, "zzzz", FileExpectation::Any).await?;
|
||||
assert_eq!(zzzz_payload.query, "zzzz");
|
||||
assert_eq!(zzzz_payload.files.is_empty(), true);
|
||||
wait_for_session_completed(&mut mcp, session_id).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_session_update_after_stop_fails() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let session_id = "session-stop-fail";
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
mcp.start_fuzzy_file_search_session(session_id, vec![root_path])
|
||||
.await?;
|
||||
mcp.stop_fuzzy_file_search_session(session_id).await?;
|
||||
|
||||
assert_update_request_fails_for_missing_session(&mut mcp, session_id, "alp").await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_session_stops_sending_updates_after_stop() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
for i in 0..10_000 {
|
||||
let file_path = root.path().join(format!("file-{i:04}.txt"));
|
||||
std::fs::write(file_path, "contents")?;
|
||||
}
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let session_id = "session-stop-no-updates";
|
||||
mcp.start_fuzzy_file_search_session(session_id, vec![root_path])
|
||||
.await?;
|
||||
mcp.update_fuzzy_file_search_session(session_id, "file-")
|
||||
.await?;
|
||||
wait_for_session_updated(&mut mcp, session_id, "file-", FileExpectation::NonEmpty).await?;
|
||||
|
||||
mcp.stop_fuzzy_file_search_session(session_id).await?;
|
||||
|
||||
assert_no_session_updates_for(&mut mcp, session_id, STOP_GRACE_PERIOD, SHORT_READ_TIMEOUT)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_two_sessions_are_independent() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let root_a = TempDir::new()?;
|
||||
let root_b = TempDir::new()?;
|
||||
std::fs::write(root_a.path().join("alpha.txt"), "contents")?;
|
||||
std::fs::write(root_b.path().join("beta.txt"), "contents")?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let root_a_path = root_a.path().to_string_lossy().to_string();
|
||||
let root_b_path = root_b.path().to_string_lossy().to_string();
|
||||
let session_a = "session-a";
|
||||
let session_b = "session-b";
|
||||
|
||||
mcp.start_fuzzy_file_search_session(session_a, vec![root_a_path.clone()])
|
||||
.await?;
|
||||
mcp.start_fuzzy_file_search_session(session_b, vec![root_b_path.clone()])
|
||||
.await?;
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_a, "alp")
|
||||
.await?;
|
||||
|
||||
let session_a_update =
|
||||
wait_for_session_updated(&mut mcp, session_a, "alp", FileExpectation::NonEmpty).await?;
|
||||
assert_eq!(session_a_update.files.len(), 1);
|
||||
assert_eq!(session_a_update.files[0].root, root_a_path);
|
||||
assert_eq!(session_a_update.files[0].path, "alpha.txt");
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_b, "bet")
|
||||
.await?;
|
||||
let session_b_update =
|
||||
wait_for_session_updated(&mut mcp, session_b, "bet", FileExpectation::NonEmpty).await?;
|
||||
assert_eq!(session_b_update.files.len(), 1);
|
||||
assert_eq!(session_b_update.files[0].root, root_b_path);
|
||||
assert_eq!(session_b_update.files[0].path, "beta.txt");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_fuzzy_file_search_query_cleared_sends_blank_snapshot() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
let root = TempDir::new()?;
|
||||
std::fs::write(root.path().join("alpha.txt"), "contents")?;
|
||||
let mut mcp = initialized_mcp(&codex_home).await?;
|
||||
|
||||
let root_path = root.path().to_string_lossy().to_string();
|
||||
let session_id = "session-clear-query";
|
||||
mcp.start_fuzzy_file_search_session(session_id, vec![root_path])
|
||||
.await?;
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_id, "alp")
|
||||
.await?;
|
||||
wait_for_session_updated(&mut mcp, session_id, "alp", FileExpectation::NonEmpty).await?;
|
||||
|
||||
mcp.update_fuzzy_file_search_session(session_id, "").await?;
|
||||
let payload =
|
||||
wait_for_session_updated(&mut mcp, session_id, "", FileExpectation::Empty).await?;
|
||||
assert_eq!(payload.files.is_empty(), true);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -477,7 +477,6 @@ fn assert_permissions_message(item: &ResponseItem) {
|
||||
&SandboxPolicy::DangerFullAccess,
|
||||
AskForApproval::Never,
|
||||
&Policy::empty(),
|
||||
false,
|
||||
&PathBuf::from("/tmp"),
|
||||
)
|
||||
.into_text();
|
||||
@@ -564,6 +563,7 @@ fn append_rollout_turn_context(path: &Path, timestamp: &str, model: &str) -> std
|
||||
cwd: PathBuf::from("/"),
|
||||
approval_policy: AskForApproval::Never,
|
||||
sandbox_policy: SandboxPolicy::DangerFullAccess,
|
||||
network: None,
|
||||
model: model.to_string(),
|
||||
personality: None,
|
||||
collaboration_mode: None,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex as StdMutex;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
@@ -86,6 +87,7 @@ async fn list_apps_uses_thread_feature_flag_when_thread_id_is_provided() -> Resu
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}];
|
||||
let tools = vec![connector_tool("beta", "Beta App")?];
|
||||
let (server_url, server_handle) =
|
||||
@@ -173,6 +175,78 @@ connectors = false
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_apps_reports_is_enabled_from_config() -> Result<()> {
|
||||
let connectors = vec![AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "Beta".to_string(),
|
||||
description: Some("Beta connector".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}];
|
||||
let tools = vec![connector_tool("beta", "Beta App")?];
|
||||
let (server_url, server_handle) =
|
||||
start_apps_server_with_delays(connectors, tools, Duration::ZERO, Duration::ZERO).await?;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
std::fs::write(
|
||||
codex_home.path().join("config.toml"),
|
||||
format!(
|
||||
r#"
|
||||
chatgpt_base_url = "{server_url}"
|
||||
|
||||
[features]
|
||||
connectors = true
|
||||
|
||||
[apps.beta]
|
||||
enabled = false
|
||||
"#
|
||||
),
|
||||
)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("chatgpt-token")
|
||||
.account_id("account-123")
|
||||
.chatgpt_user_id("user-123")
|
||||
.chatgpt_account_id("account-123"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_apps_list_request(AppsListParams {
|
||||
limit: None,
|
||||
cursor: None,
|
||||
thread_id: None,
|
||||
force_refetch: false,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let AppsListResponse {
|
||||
data: response_data,
|
||||
next_cursor,
|
||||
} = to_response(response)?;
|
||||
assert!(next_cursor.is_none());
|
||||
assert_eq!(response_data.len(), 1);
|
||||
assert_eq!(response_data[0].id, "beta");
|
||||
assert!(!response_data[0].is_enabled);
|
||||
|
||||
server_handle.abort();
|
||||
let _ = server_handle.await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_apps_emits_updates_and_returns_after_both_lists_load() -> Result<()> {
|
||||
let connectors = vec![
|
||||
@@ -185,6 +259,7 @@ async fn list_apps_emits_updates_and_returns_after_both_lists_load() -> Result<(
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
@@ -195,6 +270,7 @@ async fn list_apps_emits_updates_and_returns_after_both_lists_load() -> Result<(
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -239,6 +315,7 @@ async fn list_apps_emits_updates_and_returns_after_both_lists_load() -> Result<(
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta-app/beta".to_string()),
|
||||
is_accessible: true,
|
||||
is_enabled: true,
|
||||
}];
|
||||
|
||||
let first_update = read_app_list_updated_notification(&mut mcp).await?;
|
||||
@@ -254,6 +331,7 @@ async fn list_apps_emits_updates_and_returns_after_both_lists_load() -> Result<(
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
@@ -264,6 +342,7 @@ async fn list_apps_emits_updates_and_returns_after_both_lists_load() -> Result<(
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -300,6 +379,7 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
@@ -310,6 +390,7 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -358,6 +439,7 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
@@ -368,6 +450,7 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
]
|
||||
);
|
||||
@@ -382,6 +465,7 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
@@ -392,6 +476,7 @@ async fn list_apps_returns_connectors_with_accessible_flags() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -423,6 +508,7 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
@@ -433,6 +519,7 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -486,6 +573,7 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta/beta".to_string()),
|
||||
is_accessible: true,
|
||||
is_enabled: true,
|
||||
}];
|
||||
|
||||
assert_eq!(first_page, expected_first);
|
||||
@@ -525,6 +613,7 @@ async fn list_apps_paginates_results() -> Result<()> {
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}];
|
||||
|
||||
assert_eq!(second_page, expected_second);
|
||||
@@ -545,6 +634,7 @@ async fn list_apps_force_refetch_preserves_previous_cache_on_failure() -> Result
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}];
|
||||
let tools = vec![connector_tool("beta", "Beta App")?];
|
||||
let (server_url, server_handle) =
|
||||
@@ -633,6 +723,201 @@ async fn list_apps_force_refetch_preserves_previous_cache_on_failure() -> Result
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_apps_force_refetch_patches_updates_from_cached_snapshots() -> Result<()> {
|
||||
let initial_connectors = vec![
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha v1".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "Beta App".to_string(),
|
||||
description: Some("Beta v1".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
];
|
||||
let initial_tools = vec![connector_tool("beta", "Beta App")?];
|
||||
let (server_url, server_handle, server_control) = start_apps_server_with_delays_and_control(
|
||||
initial_connectors,
|
||||
initial_tools,
|
||||
Duration::from_millis(300),
|
||||
Duration::ZERO,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
write_connectors_config(codex_home.path(), &server_url)?;
|
||||
write_chatgpt_auth(
|
||||
codex_home.path(),
|
||||
ChatGptAuthFixture::new("chatgpt-token")
|
||||
.account_id("account-123")
|
||||
.chatgpt_user_id("user-123")
|
||||
.chatgpt_account_id("account-123"),
|
||||
AuthCredentialsStoreMode::File,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let warm_request = mcp
|
||||
.send_apps_list_request(AppsListParams {
|
||||
limit: None,
|
||||
cursor: None,
|
||||
thread_id: None,
|
||||
force_refetch: false,
|
||||
})
|
||||
.await?;
|
||||
let warm_first_update = read_app_list_updated_notification(&mut mcp).await?;
|
||||
assert_eq!(
|
||||
warm_first_update.data,
|
||||
vec![AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "Beta App".to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta-app/beta".to_string()),
|
||||
is_accessible: true,
|
||||
is_enabled: true,
|
||||
}]
|
||||
);
|
||||
|
||||
let warm_second_update = read_app_list_updated_notification(&mut mcp).await?;
|
||||
assert_eq!(
|
||||
warm_second_update.data,
|
||||
vec![
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "Beta App".to_string(),
|
||||
description: Some("Beta v1".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta-app/beta".to_string()),
|
||||
is_accessible: true,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha v1".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
let warm_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(warm_request)),
|
||||
)
|
||||
.await??;
|
||||
let AppsListResponse {
|
||||
data: warm_data,
|
||||
next_cursor: warm_next_cursor,
|
||||
} = to_response(warm_response)?;
|
||||
assert_eq!(warm_data, warm_second_update.data);
|
||||
assert!(warm_next_cursor.is_none());
|
||||
|
||||
server_control.set_connectors(vec![AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha v2".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}]);
|
||||
server_control.set_tools(Vec::new());
|
||||
|
||||
let refetch_request = mcp
|
||||
.send_apps_list_request(AppsListParams {
|
||||
limit: None,
|
||||
cursor: None,
|
||||
thread_id: None,
|
||||
force_refetch: true,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let first_update = read_app_list_updated_notification(&mut mcp).await?;
|
||||
assert_eq!(
|
||||
first_update.data,
|
||||
vec![
|
||||
AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha v1".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
AppInfo {
|
||||
id: "beta".to_string(),
|
||||
name: "Beta App".to_string(),
|
||||
description: Some("Beta v1".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/beta-app/beta".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
let expected_final = vec![AppInfo {
|
||||
id: "alpha".to_string(),
|
||||
name: "Alpha".to_string(),
|
||||
description: Some("Alpha v2".to_string()),
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some("https://chatgpt.com/apps/alpha/alpha".to_string()),
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}];
|
||||
let second_update = read_app_list_updated_notification(&mut mcp).await?;
|
||||
assert_eq!(second_update.data, expected_final);
|
||||
|
||||
let refetch_response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(refetch_request)),
|
||||
)
|
||||
.await??;
|
||||
let AppsListResponse {
|
||||
data: refetch_data,
|
||||
next_cursor: refetch_next_cursor,
|
||||
} = to_response(refetch_response)?;
|
||||
assert_eq!(refetch_data, expected_final);
|
||||
assert!(refetch_next_cursor.is_none());
|
||||
|
||||
server_handle.abort();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn read_app_list_updated_notification(
|
||||
mcp: &mut McpProcess,
|
||||
) -> Result<AppListUpdatedNotification> {
|
||||
@@ -652,22 +937,46 @@ async fn read_app_list_updated_notification(
|
||||
struct AppsServerState {
|
||||
expected_bearer: String,
|
||||
expected_account_id: String,
|
||||
response: serde_json::Value,
|
||||
response: Arc<StdMutex<serde_json::Value>>,
|
||||
directory_delay: Duration,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppListMcpServer {
|
||||
tools: Arc<Vec<Tool>>,
|
||||
tools: Arc<StdMutex<Vec<Tool>>>,
|
||||
tools_delay: Duration,
|
||||
}
|
||||
|
||||
impl AppListMcpServer {
|
||||
fn new(tools: Arc<Vec<Tool>>, tools_delay: Duration) -> Self {
|
||||
fn new(tools: Arc<StdMutex<Vec<Tool>>>, tools_delay: Duration) -> Self {
|
||||
Self { tools, tools_delay }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppsServerControl {
|
||||
response: Arc<StdMutex<serde_json::Value>>,
|
||||
tools: Arc<StdMutex<Vec<Tool>>>,
|
||||
}
|
||||
|
||||
impl AppsServerControl {
|
||||
fn set_connectors(&self, connectors: Vec<AppInfo>) {
|
||||
let mut response_guard = self
|
||||
.response
|
||||
.lock()
|
||||
.unwrap_or_else(std::sync::PoisonError::into_inner);
|
||||
*response_guard = json!({ "apps": connectors, "next_token": null });
|
||||
}
|
||||
|
||||
fn set_tools(&self, tools: Vec<Tool>) {
|
||||
let mut tools_guard = self
|
||||
.tools
|
||||
.lock()
|
||||
.unwrap_or_else(std::sync::PoisonError::into_inner);
|
||||
*tools_guard = tools;
|
||||
}
|
||||
}
|
||||
|
||||
impl ServerHandler for AppListMcpServer {
|
||||
fn get_info(&self) -> ServerInfo {
|
||||
ServerInfo {
|
||||
@@ -688,8 +997,12 @@ impl ServerHandler for AppListMcpServer {
|
||||
if tools_delay > Duration::ZERO {
|
||||
tokio::time::sleep(tools_delay).await;
|
||||
}
|
||||
let tools = tools
|
||||
.lock()
|
||||
.unwrap_or_else(std::sync::PoisonError::into_inner)
|
||||
.clone();
|
||||
Ok(ListToolsResult {
|
||||
tools: (*tools).clone(),
|
||||
tools,
|
||||
next_cursor: None,
|
||||
meta: None,
|
||||
})
|
||||
@@ -703,14 +1016,33 @@ async fn start_apps_server_with_delays(
|
||||
directory_delay: Duration,
|
||||
tools_delay: Duration,
|
||||
) -> Result<(String, JoinHandle<()>)> {
|
||||
let (server_url, server_handle, _server_control) =
|
||||
start_apps_server_with_delays_and_control(connectors, tools, directory_delay, tools_delay)
|
||||
.await?;
|
||||
Ok((server_url, server_handle))
|
||||
}
|
||||
|
||||
async fn start_apps_server_with_delays_and_control(
|
||||
connectors: Vec<AppInfo>,
|
||||
tools: Vec<Tool>,
|
||||
directory_delay: Duration,
|
||||
tools_delay: Duration,
|
||||
) -> Result<(String, JoinHandle<()>, AppsServerControl)> {
|
||||
let response = Arc::new(StdMutex::new(
|
||||
json!({ "apps": connectors, "next_token": null }),
|
||||
));
|
||||
let tools = Arc::new(StdMutex::new(tools));
|
||||
let state = AppsServerState {
|
||||
expected_bearer: "Bearer chatgpt-token".to_string(),
|
||||
expected_account_id: "account-123".to_string(),
|
||||
response: json!({ "apps": connectors, "next_token": null }),
|
||||
response: response.clone(),
|
||||
directory_delay,
|
||||
};
|
||||
let state = Arc::new(state);
|
||||
let tools = Arc::new(tools);
|
||||
let server_control = AppsServerControl {
|
||||
response,
|
||||
tools: tools.clone(),
|
||||
};
|
||||
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||
let addr = listener.local_addr()?;
|
||||
@@ -737,7 +1069,7 @@ async fn start_apps_server_with_delays(
|
||||
let _ = axum::serve(listener, router).await;
|
||||
});
|
||||
|
||||
Ok((format!("http://{addr}"), handle))
|
||||
Ok((format!("http://{addr}"), handle, server_control))
|
||||
}
|
||||
|
||||
async fn list_directory_connectors(
|
||||
@@ -758,7 +1090,12 @@ async fn list_directory_connectors(
|
||||
.is_some_and(|value| value == state.expected_account_id);
|
||||
|
||||
if bearer_ok && account_ok {
|
||||
Ok(Json(state.response.clone()))
|
||||
let response = state
|
||||
.response
|
||||
.lock()
|
||||
.unwrap_or_else(std::sync::PoisonError::into_inner)
|
||||
.clone();
|
||||
Ok(Json(response))
|
||||
} else {
|
||||
Err(StatusCode::UNAUTHORIZED)
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ mod plan_item;
|
||||
mod rate_limits;
|
||||
mod request_user_input;
|
||||
mod review;
|
||||
mod safety_check_downgrade;
|
||||
mod skills_list;
|
||||
mod thread_archive;
|
||||
mod thread_fork;
|
||||
|
||||
@@ -33,6 +33,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(100),
|
||||
cursor: None,
|
||||
include_hidden: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -54,6 +55,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
upgrade: None,
|
||||
display_name: "gpt-5.2-codex".to_string(),
|
||||
description: "Latest frontier agentic coding model.".to_string(),
|
||||
hidden: false,
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
@@ -84,6 +86,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
upgrade: Some("gpt-5.2-codex".to_string()),
|
||||
display_name: "gpt-5.1-codex-max".to_string(),
|
||||
description: "Codex-optimized flagship for deep and fast reasoning.".to_string(),
|
||||
hidden: false,
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
@@ -114,6 +117,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
upgrade: Some("gpt-5.2-codex".to_string()),
|
||||
display_name: "gpt-5.1-codex-mini".to_string(),
|
||||
description: "Optimized for codex. Cheaper, faster, but less capable.".to_string(),
|
||||
hidden: false,
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Medium,
|
||||
@@ -138,6 +142,7 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
description:
|
||||
"Latest frontier model with improvements across knowledge, reasoning and coding"
|
||||
.to_string(),
|
||||
hidden: false,
|
||||
supported_reasoning_efforts: vec![
|
||||
ReasoningEffortOption {
|
||||
reasoning_effort: ReasoningEffort::Low,
|
||||
@@ -173,6 +178,38 @@ async fn list_models_returns_all_models_with_large_limit() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_models_includes_hidden_models() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
write_models_cache(codex_home.path())?;
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
|
||||
timeout(DEFAULT_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let request_id = mcp
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(100),
|
||||
cursor: None,
|
||||
include_hidden: Some(true),
|
||||
})
|
||||
.await?;
|
||||
|
||||
let response: JSONRPCResponse = timeout(
|
||||
DEFAULT_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let ModelListResponse {
|
||||
data: items,
|
||||
next_cursor,
|
||||
} = to_response::<ModelListResponse>(response)?;
|
||||
|
||||
assert!(items.iter().any(|item| item.hidden));
|
||||
assert!(next_cursor.is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn list_models_pagination_works() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -185,6 +222,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: None,
|
||||
include_hidden: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -207,6 +245,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(next_cursor.clone()),
|
||||
include_hidden: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -229,6 +268,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(third_cursor.clone()),
|
||||
include_hidden: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -251,6 +291,7 @@ async fn list_models_pagination_works() -> Result<()> {
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: Some(1),
|
||||
cursor: Some(fourth_cursor.clone()),
|
||||
include_hidden: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -283,6 +324,7 @@ async fn list_models_rejects_invalid_cursor() -> Result<()> {
|
||||
.send_list_models_request(ModelListParams {
|
||||
limit: None,
|
||||
cursor: Some("invalid".to_string()),
|
||||
include_hidden: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -255,6 +255,7 @@ async fn review_start_rejects_empty_base_branch() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore = "flaky on windows CI")]
|
||||
#[tokio::test]
|
||||
async fn review_start_with_detached_delivery_returns_new_thread_id() -> Result<()> {
|
||||
let review_payload = json!({
|
||||
@@ -437,6 +438,7 @@ model_provider = "mock_provider"
|
||||
|
||||
[features]
|
||||
remote_models = false
|
||||
shell_snapshot = false
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider"
|
||||
|
||||
266
codex-rs/app-server/tests/suite/v2/safety_check_downgrade.rs
Normal file
266
codex-rs/app-server/tests/suite/v2/safety_check_downgrade.rs
Normal file
@@ -0,0 +1,266 @@
|
||||
use anyhow::Result;
|
||||
use app_test_support::McpProcess;
|
||||
use app_test_support::to_response;
|
||||
use codex_app_server_protocol::ItemCompletedNotification;
|
||||
use codex_app_server_protocol::ItemStartedNotification;
|
||||
use codex_app_server_protocol::JSONRPCNotification;
|
||||
use codex_app_server_protocol::JSONRPCResponse;
|
||||
use codex_app_server_protocol::RequestId;
|
||||
use codex_app_server_protocol::ThreadItem;
|
||||
use codex_app_server_protocol::ThreadStartParams;
|
||||
use codex_app_server_protocol::ThreadStartResponse;
|
||||
use codex_app_server_protocol::TurnStartParams;
|
||||
use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput;
|
||||
use core_test_support::responses;
|
||||
use core_test_support::skip_if_no_network;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
use tokio::time::timeout;
|
||||
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const REQUESTED_MODEL: &str = "gpt-5.1-codex-max";
|
||||
const SERVER_MODEL: &str = "gpt-5.2-codex";
|
||||
|
||||
#[tokio::test]
|
||||
async fn openai_model_header_mismatch_emits_warning_item_v2() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response = responses::sse_response(body).insert_header("OpenAI-Model", SERVER_MODEL);
|
||||
let _response_mock = responses::mount_response_once(&server, response).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some(REQUESTED_MODEL.to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "trigger safeguard".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let _turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn_start: TurnStartResponse = to_response(_turn_resp)?;
|
||||
|
||||
let warning_started = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let params = notification.params.expect("item/started params");
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(params).expect("deserialize item/started");
|
||||
if warning_text_from_item(&started.item).is_some_and(is_cyber_model_warning_text) {
|
||||
return Ok::<ItemStartedNotification, anyhow::Error>(started);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
|
||||
let warning_text =
|
||||
warning_text_from_item(&warning_started.item).expect("expected warning user message item");
|
||||
assert!(warning_text.contains("Warning:"));
|
||||
assert!(warning_text.contains("gpt-5.2 as a fallback"));
|
||||
assert!(warning_text.contains("regain access to gpt-5.3-codex"));
|
||||
|
||||
let warning_completed = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = mcp
|
||||
.read_stream_until_notification_message("item/completed")
|
||||
.await?;
|
||||
let params = notification.params.expect("item/completed params");
|
||||
let completed: ItemCompletedNotification =
|
||||
serde_json::from_value(params).expect("deserialize item/completed");
|
||||
if warning_text_from_item(&completed.item).is_some_and(is_cyber_model_warning_text) {
|
||||
return Ok::<ItemCompletedNotification, anyhow::Error>(completed);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
assert_eq!(
|
||||
warning_text_from_item(&warning_completed.item),
|
||||
warning_text_from_item(&warning_started.item)
|
||||
);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn response_model_field_mismatch_emits_warning_item_v2_when_header_matches_requested()
|
||||
-> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
|
||||
let server = responses::start_mock_server().await;
|
||||
let body = responses::sse(vec![
|
||||
serde_json::json!({
|
||||
"type": "response.created",
|
||||
"response": {
|
||||
"id": "resp-1",
|
||||
"model": SERVER_MODEL,
|
||||
}
|
||||
}),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let response = responses::sse_response(body).insert_header("OpenAI-Model", REQUESTED_MODEL);
|
||||
let _response_mock = responses::mount_response_once(&server, response).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some(REQUESTED_MODEL.to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "trigger response model check".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let turn_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
let _turn_start: TurnStartResponse = to_response(turn_resp)?;
|
||||
|
||||
let warning_started = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = mcp
|
||||
.read_stream_until_notification_message("item/started")
|
||||
.await?;
|
||||
let params = notification.params.expect("item/started params");
|
||||
let started: ItemStartedNotification =
|
||||
serde_json::from_value(params).expect("deserialize item/started");
|
||||
if warning_text_from_item(&started.item).is_some_and(is_cyber_model_warning_text) {
|
||||
return Ok::<ItemStartedNotification, anyhow::Error>(started);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
let warning_text =
|
||||
warning_text_from_item(&warning_started.item).expect("expected warning user message item");
|
||||
assert!(warning_text.contains("gpt-5.2 as a fallback"));
|
||||
|
||||
let warning_completed = timeout(DEFAULT_READ_TIMEOUT, async {
|
||||
loop {
|
||||
let notification: JSONRPCNotification = mcp
|
||||
.read_stream_until_notification_message("item/completed")
|
||||
.await?;
|
||||
let params = notification.params.expect("item/completed params");
|
||||
let completed: ItemCompletedNotification =
|
||||
serde_json::from_value(params).expect("deserialize item/completed");
|
||||
if warning_text_from_item(&completed.item).is_some_and(is_cyber_model_warning_text) {
|
||||
return Ok::<ItemCompletedNotification, anyhow::Error>(completed);
|
||||
}
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
assert_eq!(
|
||||
warning_text_from_item(&warning_completed.item),
|
||||
warning_text_from_item(&warning_started.item)
|
||||
);
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn warning_text_from_item(item: &ThreadItem) -> Option<&str> {
|
||||
let ThreadItem::UserMessage { content, .. } = item else {
|
||||
return None;
|
||||
};
|
||||
|
||||
content.iter().find_map(|input| match input {
|
||||
UserInput::Text { text, .. } if text.starts_with("Warning: ") => Some(text.as_str()),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
fn is_cyber_model_warning_text(text: &str) -> bool {
|
||||
text.contains("flagged for potentially high-risk cyber activity")
|
||||
&& text.contains("apply for trusted access: https://chatgpt.com/cyber")
|
||||
}
|
||||
|
||||
fn create_config_toml(codex_home: &std::path::Path, server_uri: &str) -> std::io::Result<()> {
|
||||
let config_toml = codex_home.join("config.toml");
|
||||
std::fs::write(
|
||||
config_toml,
|
||||
format!(
|
||||
r#"
|
||||
model = "{REQUESTED_MODEL}"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "read-only"
|
||||
|
||||
model_provider = "mock_provider"
|
||||
|
||||
[features]
|
||||
remote_models = false
|
||||
personality = true
|
||||
|
||||
[model_providers.mock_provider]
|
||||
name = "Mock provider for test"
|
||||
base_url = "{server_uri}/v1"
|
||||
wire_api = "responses"
|
||||
request_max_retries = 0
|
||||
stream_max_retries = 0
|
||||
"#
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -17,6 +17,8 @@ use codex_app_server_protocol::ThreadSourceKind;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_protocol::protocol::GitInfo as CoreGitInfo;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionSource as CoreSessionSource;
|
||||
use codex_protocol::protocol::SubAgentSource;
|
||||
use pretty_assertions::assert_eq;
|
||||
@@ -66,6 +68,7 @@ async fn list_threads_with_sort(
|
||||
model_providers: providers,
|
||||
source_kinds,
|
||||
archived,
|
||||
cwd: None,
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
@@ -127,6 +130,26 @@ fn set_rollout_mtime(path: &Path, updated_at_rfc3339: &str) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_rollout_cwd(path: &Path, cwd: &Path) -> Result<()> {
|
||||
let content = fs::read_to_string(path)?;
|
||||
let mut lines: Vec<String> = content.lines().map(str::to_string).collect();
|
||||
let first_line = lines
|
||||
.first_mut()
|
||||
.ok_or_else(|| anyhow::anyhow!("rollout at {} is empty", path.display()))?;
|
||||
let mut rollout_line: RolloutLine = serde_json::from_str(first_line)?;
|
||||
let RolloutItem::SessionMeta(mut session_meta_line) = rollout_line.item else {
|
||||
return Err(anyhow::anyhow!(
|
||||
"rollout at {} does not start with session metadata",
|
||||
path.display()
|
||||
));
|
||||
};
|
||||
session_meta_line.meta.cwd = cwd.to_path_buf();
|
||||
rollout_line.item = RolloutItem::SessionMeta(session_meta_line);
|
||||
*first_line = serde_json::to_string(&rollout_line)?;
|
||||
fs::write(path, lines.join("\n") + "\n")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_basic_empty() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -300,6 +323,63 @@ async fn thread_list_respects_provider_filter() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_respects_cwd_filter() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_minimal_config(codex_home.path())?;
|
||||
|
||||
let filtered_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T10-00-00",
|
||||
"2025-01-02T10:00:00Z",
|
||||
"filtered",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
let unfiltered_id = create_fake_rollout(
|
||||
codex_home.path(),
|
||||
"2025-01-02T11-00-00",
|
||||
"2025-01-02T11:00:00Z",
|
||||
"unfiltered",
|
||||
Some("mock_provider"),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let target_cwd = codex_home.path().join("target-cwd");
|
||||
fs::create_dir_all(&target_cwd)?;
|
||||
set_rollout_cwd(
|
||||
rollout_path(codex_home.path(), "2025-01-02T10-00-00", &filtered_id).as_path(),
|
||||
&target_cwd,
|
||||
)?;
|
||||
|
||||
let mut mcp = init_mcp(codex_home.path()).await?;
|
||||
let request_id = mcp
|
||||
.send_thread_list_request(codex_app_server_protocol::ThreadListParams {
|
||||
cursor: None,
|
||||
limit: Some(10),
|
||||
sort_key: None,
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
source_kinds: None,
|
||||
archived: None,
|
||||
cwd: Some(target_cwd.to_string_lossy().into_owned()),
|
||||
})
|
||||
.await?;
|
||||
let resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadListResponse { data, next_cursor } = to_response::<ThreadListResponse>(resp)?;
|
||||
|
||||
assert_eq!(next_cursor, None);
|
||||
assert_eq!(data.len(), 1);
|
||||
assert_eq!(data[0].id, filtered_id);
|
||||
assert_ne!(data[0].id, unfiltered_id);
|
||||
assert_eq!(data[0].cwd, target_cwd);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_list_empty_source_kinds_defaults_to_interactive_only() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
@@ -1107,6 +1187,7 @@ async fn thread_list_invalid_cursor_returns_error() -> Result<()> {
|
||||
model_providers: Some(vec!["mock_provider".to_string()]),
|
||||
source_kinds: None,
|
||||
archived: None,
|
||||
cwd: None,
|
||||
})
|
||||
.await?;
|
||||
let error: JSONRPCError = timeout(
|
||||
|
||||
@@ -209,18 +209,412 @@ async fn thread_resume_without_overrides_does_not_change_updated_at_or_mtime() -
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_keeps_in_flight_turn_streaming() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut primary = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, primary.initialize()).await??;
|
||||
|
||||
let start_id = primary
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let seed_turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "seed history".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(seed_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
primary.clear_message_buffer();
|
||||
|
||||
let mut secondary = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, secondary.initialize()).await??;
|
||||
|
||||
let turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "respond with docs".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let resume_id = secondary
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id,
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
secondary.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_rejects_history_when_thread_is_running() -> Result<()> {
|
||||
let server = responses::start_mock_server().await;
|
||||
let first_body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let second_body = responses::sse(vec![responses::ev_response_created("resp-2")]);
|
||||
let _first_response_mock = responses::mount_sse_once(&server, first_body).await;
|
||||
let _second_response_mock = responses::mount_sse_once(&server, second_body).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut primary = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, primary.initialize()).await??;
|
||||
|
||||
let start_id = primary
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let seed_turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "seed history".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(seed_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
primary.clear_message_buffer();
|
||||
|
||||
let running_turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "keep running".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(running_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let resume_id = primary
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id,
|
||||
history: Some(vec![ResponseItem::Message {
|
||||
id: None,
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "history override".to_string(),
|
||||
}],
|
||||
end_turn: None,
|
||||
phase: None,
|
||||
}]),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_error_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
assert!(
|
||||
resume_err.error.message.contains("cannot resume thread")
|
||||
&& resume_err.error.message.contains("with history")
|
||||
&& resume_err.error.message.contains("running"),
|
||||
"unexpected resume error: {}",
|
||||
resume_err.error.message
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_rejects_mismatched_path_when_thread_is_running() -> Result<()> {
|
||||
let server = responses::start_mock_server().await;
|
||||
let first_body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let second_body = responses::sse(vec![responses::ev_response_created("resp-2")]);
|
||||
let _first_response_mock = responses::mount_sse_once(&server, first_body).await;
|
||||
let _second_response_mock = responses::mount_sse_once(&server, second_body).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut primary = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, primary.initialize()).await??;
|
||||
|
||||
let start_id = primary
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let seed_turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "seed history".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(seed_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
primary.clear_message_buffer();
|
||||
|
||||
let running_turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "keep running".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(running_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let resume_id = primary
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id,
|
||||
path: Some(PathBuf::from("/tmp/does-not-match-running-rollout.jsonl")),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_err: JSONRPCError = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_error_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
assert!(
|
||||
resume_err.error.message.contains("mismatched path"),
|
||||
"unexpected resume error: {}",
|
||||
resume_err.error.message
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_rejoins_running_thread_even_with_override_mismatch() -> Result<()> {
|
||||
let server = responses::start_mock_server().await;
|
||||
let first_body = responses::sse(vec![
|
||||
responses::ev_response_created("resp-1"),
|
||||
responses::ev_assistant_message("msg-1", "Done"),
|
||||
responses::ev_completed("resp-1"),
|
||||
]);
|
||||
let second_body = responses::sse(vec![responses::ev_response_created("resp-2")]);
|
||||
let _response_mock =
|
||||
responses::mount_sse_sequence(&server, vec![first_body, second_body]).await;
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut primary = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, primary.initialize()).await??;
|
||||
|
||||
let start_id = primary
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let seed_turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "seed history".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(seed_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
primary.clear_message_buffer();
|
||||
|
||||
let running_turn_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: "keep running".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(running_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/started"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let resume_id = primary
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id.clone(),
|
||||
model: Some("not-the-running-model".to_string()),
|
||||
cwd: Some("/tmp".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { model, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
assert_eq!(model, "gpt-5.1-codex-max");
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
primary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Result<()> {
|
||||
let server = create_mock_responses_server_repeating_assistant("Done").await;
|
||||
let codex_home = TempDir::new()?;
|
||||
let rollout = setup_rollout_fixture(codex_home.path(), &server.uri())?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
let RestartedThreadFixture {
|
||||
mut mcp,
|
||||
thread_id,
|
||||
rollout_file_path,
|
||||
} = start_materialized_thread_and_restart(codex_home.path(), "materialize").await?;
|
||||
let expected_updated_at_rfc3339 = "2025-01-07T00:00:00Z";
|
||||
set_rollout_mtime(rollout_file_path.as_path(), expected_updated_at_rfc3339)?;
|
||||
let before_modified = std::fs::metadata(&rollout_file_path)?.modified()?;
|
||||
let expected_updated_at = chrono::DateTime::parse_from_rfc3339(expected_updated_at_rfc3339)?
|
||||
.with_timezone(&Utc)
|
||||
.timestamp();
|
||||
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: rollout.conversation_id.clone(),
|
||||
thread_id,
|
||||
model: Some("mock-model".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
@@ -230,16 +624,19 @@ async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Re
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadResumeResponse { thread, .. } = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
let ThreadResumeResponse {
|
||||
thread: resumed_thread,
|
||||
..
|
||||
} = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
assert_eq!(thread.updated_at, rollout.expected_updated_at);
|
||||
assert_eq!(resumed_thread.updated_at, expected_updated_at);
|
||||
|
||||
let after_resume_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_resume_modified, rollout.before_modified);
|
||||
let after_resume_modified = std::fs::metadata(&rollout_file_path)?.modified()?;
|
||||
assert_eq!(after_resume_modified, before_modified);
|
||||
|
||||
let turn_id = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: rollout.conversation_id,
|
||||
thread_id: resumed_thread.id,
|
||||
input: vec![UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
@@ -258,8 +655,8 @@ async fn thread_resume_with_overrides_defers_updated_at_until_turn_start() -> Re
|
||||
)
|
||||
.await??;
|
||||
|
||||
let after_turn_modified = std::fs::metadata(&rollout.rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > rollout.before_modified);
|
||||
let after_turn_modified = std::fs::metadata(&rollout_file_path)?.modified()?;
|
||||
assert!(after_turn_modified > before_modified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -374,22 +771,9 @@ async fn thread_resume_supports_history_and_overrides() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
// Start a thread.
|
||||
let start_id = mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
let RestartedThreadFixture {
|
||||
mut mcp, thread_id, ..
|
||||
} = start_materialized_thread_and_restart(codex_home.path(), "seed history").await?;
|
||||
|
||||
let history_text = "Hello from history";
|
||||
let history = vec![ResponseItem::Message {
|
||||
@@ -405,7 +789,7 @@ async fn thread_resume_supports_history_and_overrides() -> Result<()> {
|
||||
// Resume with explicit history and override the model.
|
||||
let resume_id = mcp
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id,
|
||||
thread_id,
|
||||
history: Some(history),
|
||||
model: Some("mock-model".to_string()),
|
||||
model_provider: Some("mock_provider".to_string()),
|
||||
@@ -429,6 +813,70 @@ async fn thread_resume_supports_history_and_overrides() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct RestartedThreadFixture {
|
||||
mcp: McpProcess,
|
||||
thread_id: String,
|
||||
rollout_file_path: PathBuf,
|
||||
}
|
||||
|
||||
async fn start_materialized_thread_and_restart(
|
||||
codex_home: &Path,
|
||||
seed_text: &str,
|
||||
) -> Result<RestartedThreadFixture> {
|
||||
let mut first_mcp = McpProcess::new(codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, first_mcp.initialize()).await??;
|
||||
|
||||
let start_id = first_mcp
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.1-codex-max".to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
first_mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let materialize_turn_id = first_mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
text: seed_text.to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
first_mcp.read_stream_until_response_message(RequestId::Integer(materialize_turn_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
first_mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let thread_id = thread.id;
|
||||
let rollout_file_path = thread
|
||||
.path
|
||||
.ok_or_else(|| anyhow::anyhow!("thread path missing from thread/start response"))?;
|
||||
|
||||
drop(first_mcp);
|
||||
|
||||
let mut second_mcp = McpProcess::new(codex_home).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, second_mcp.initialize()).await??;
|
||||
|
||||
Ok(RestartedThreadFixture {
|
||||
mcp: second_mcp,
|
||||
thread_id,
|
||||
rollout_file_path,
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
skip_if_no_network!(Ok(()));
|
||||
@@ -449,10 +897,10 @@ async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(codex_home.path(), &server.uri())?;
|
||||
|
||||
let mut mcp = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
let mut primary = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, primary.initialize()).await??;
|
||||
|
||||
let start_id = mcp
|
||||
let start_id = primary
|
||||
.send_thread_start_request(ThreadStartParams {
|
||||
model: Some("gpt-5.2-codex".to_string()),
|
||||
..Default::default()
|
||||
@@ -460,12 +908,12 @@ async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
.await?;
|
||||
let start_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
primary.read_stream_until_response_message(RequestId::Integer(start_id)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(start_resp)?;
|
||||
|
||||
let materialize_id = mcp
|
||||
let materialize_id = primary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id.clone(),
|
||||
input: vec![UserInput::Text {
|
||||
@@ -477,16 +925,19 @@ async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(materialize_id)),
|
||||
primary.read_stream_until_response_message(RequestId::Integer(materialize_id)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
primary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let resume_id = mcp
|
||||
let mut secondary = McpProcess::new(codex_home.path()).await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, secondary.initialize()).await??;
|
||||
|
||||
let resume_id = secondary
|
||||
.send_thread_resume_request(ThreadResumeParams {
|
||||
thread_id: thread.id,
|
||||
model: Some("gpt-5.2-codex".to_string()),
|
||||
@@ -496,12 +947,12 @@ async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
.await?;
|
||||
let resume_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
secondary.read_stream_until_response_message(RequestId::Integer(resume_id)),
|
||||
)
|
||||
.await??;
|
||||
let resume: ThreadResumeResponse = to_response::<ThreadResumeResponse>(resume_resp)?;
|
||||
|
||||
let turn_id = mcp
|
||||
let turn_id = secondary
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: resume.thread.id,
|
||||
input: vec![UserInput::Text {
|
||||
@@ -513,13 +964,13 @@ async fn thread_resume_accepts_personality_override() -> Result<()> {
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
secondary.read_stream_until_response_message(RequestId::Integer(turn_id)),
|
||||
)
|
||||
.await??;
|
||||
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
secondary.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ use app_test_support::create_mock_responses_server_sequence_unchecked;
|
||||
use app_test_support::create_shell_command_sse_response;
|
||||
use app_test_support::format_with_current_shell_display;
|
||||
use app_test_support::to_response;
|
||||
use app_test_support::write_models_cache_with_slug_for_originator;
|
||||
use codex_app_server_protocol::ByteRange;
|
||||
use codex_app_server_protocol::ClientInfo;
|
||||
use codex_app_server_protocol::CommandExecutionApprovalDecision;
|
||||
@@ -60,7 +59,6 @@ const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs
|
||||
const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);
|
||||
const TEST_ORIGINATOR: &str = "codex_vscode";
|
||||
const LOCAL_PRAGMATIC_TEMPLATE: &str = "You are a deeply pragmatic, effective software engineer.";
|
||||
const APP_SERVER_CACHE_ORIGINATOR: &str = "codex_app_server_cache_e2e";
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_sends_originator_header() -> Result<()> {
|
||||
@@ -137,89 +135,6 @@ async fn turn_start_sends_originator_header() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_uses_originator_scoped_cache_slug() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
let server = create_mock_responses_server_sequence_unchecked(responses).await;
|
||||
|
||||
let codex_home = TempDir::new()?;
|
||||
create_config_toml(
|
||||
codex_home.path(),
|
||||
&server.uri(),
|
||||
"never",
|
||||
&BTreeMap::from([(Feature::Personality, true)]),
|
||||
)?;
|
||||
let cached_slug = "app-server-cache-slug-e2e";
|
||||
write_models_cache_with_slug_for_originator(
|
||||
codex_home.path(),
|
||||
APP_SERVER_CACHE_ORIGINATOR,
|
||||
cached_slug,
|
||||
)?;
|
||||
|
||||
let mut mcp = McpProcess::new_with_env(
|
||||
codex_home.path(),
|
||||
&[(
|
||||
codex_core::default_client::CODEX_INTERNAL_ORIGINATOR_OVERRIDE_ENV_VAR,
|
||||
Some(APP_SERVER_CACHE_ORIGINATOR),
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
|
||||
|
||||
let thread_req = mcp
|
||||
.send_thread_start_request(ThreadStartParams::default())
|
||||
.await?;
|
||||
let thread_resp: JSONRPCResponse = timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(thread_req)),
|
||||
)
|
||||
.await??;
|
||||
let ThreadStartResponse { thread, .. } = to_response::<ThreadStartResponse>(thread_resp)?;
|
||||
|
||||
let turn_req = mcp
|
||||
.send_turn_start_request(TurnStartParams {
|
||||
thread_id: thread.id,
|
||||
input: vec![V2UserInput::Text {
|
||||
text: "Hello".to_string(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_response_message(RequestId::Integer(turn_req)),
|
||||
)
|
||||
.await??;
|
||||
timeout(
|
||||
DEFAULT_READ_TIMEOUT,
|
||||
mcp.read_stream_until_notification_message("turn/completed"),
|
||||
)
|
||||
.await??;
|
||||
|
||||
let requests = server
|
||||
.received_requests()
|
||||
.await
|
||||
.expect("failed to fetch received requests");
|
||||
let response_request = requests
|
||||
.into_iter()
|
||||
.find(|request| request.url.path().ends_with("/responses"))
|
||||
.expect("expected /responses request");
|
||||
let body: serde_json::Value = serde_json::from_slice(&response_request.body)
|
||||
.expect("responses request body should be json");
|
||||
assert_eq!(body["model"].as_str(), Some(cached_slug));
|
||||
assert!(
|
||||
codex_home
|
||||
.path()
|
||||
.join("models_cache")
|
||||
.join(APP_SERVER_CACHE_ORIGINATOR)
|
||||
.join("models_cache.json")
|
||||
.exists()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn turn_start_emits_user_message_item_with_text_elements() -> Result<()> {
|
||||
let responses = vec![create_final_assistant_message_sse_response("Done")?];
|
||||
|
||||
@@ -12,6 +12,8 @@ const LINUX_SANDBOX_ARG0: &str = "codex-linux-sandbox";
|
||||
const APPLY_PATCH_ARG0: &str = "apply_patch";
|
||||
const MISSPELLED_APPLY_PATCH_ARG0: &str = "applypatch";
|
||||
const LOCK_FILENAME: &str = ".lock";
|
||||
#[cfg(target_os = "windows")]
|
||||
const WINDOWS_TOKIO_WORKER_STACK_SIZE_BYTES: usize = 16 * 1024 * 1024;
|
||||
|
||||
/// Keeps the per-session PATH entry alive and locked for the process lifetime.
|
||||
pub struct Arg0PathEntryGuard {
|
||||
@@ -112,7 +114,7 @@ where
|
||||
|
||||
// Regular invocation – create a Tokio runtime and execute the provided
|
||||
// async entry-point.
|
||||
let runtime = tokio::runtime::Runtime::new()?;
|
||||
let runtime = build_runtime()?;
|
||||
runtime.block_on(async move {
|
||||
let codex_linux_sandbox_exe: Option<PathBuf> = if cfg!(target_os = "linux") {
|
||||
std::env::current_exe().ok()
|
||||
@@ -124,6 +126,18 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn build_runtime() -> anyhow::Result<tokio::runtime::Runtime> {
|
||||
let mut builder = tokio::runtime::Builder::new_multi_thread();
|
||||
builder.enable_all();
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Defensive hardening: Windows worker threads have lower effective
|
||||
// stack headroom, so use a larger stack for runtime workers.
|
||||
builder.thread_stack_size(WINDOWS_TOKIO_WORKER_STACK_SIZE_BYTES);
|
||||
}
|
||||
Ok(builder.build()?)
|
||||
}
|
||||
|
||||
const ILLEGAL_ENV_VAR_PREFIX: &str = "CODEX_";
|
||||
|
||||
/// Load env vars from ~/.codex/.env.
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::LazyLock;
|
||||
use std::sync::Mutex as StdMutex;
|
||||
|
||||
@@ -19,7 +20,9 @@ pub use codex_core::connectors::connector_display_label;
|
||||
use codex_core::connectors::connector_install_url;
|
||||
pub use codex_core::connectors::list_accessible_connectors_from_mcp_tools;
|
||||
pub use codex_core::connectors::list_accessible_connectors_from_mcp_tools_with_options;
|
||||
pub use codex_core::connectors::list_cached_accessible_connectors_from_mcp_tools;
|
||||
use codex_core::connectors::merge_connectors;
|
||||
pub use codex_core::connectors::with_app_enabled_state;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct DirectoryListResponse {
|
||||
@@ -72,13 +75,32 @@ pub async fn list_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
);
|
||||
let connectors = connectors_result?;
|
||||
let accessible = accessible_result?;
|
||||
Ok(merge_connectors_with_accessible(connectors, accessible))
|
||||
Ok(with_app_enabled_state(
|
||||
merge_connectors_with_accessible(connectors, accessible, true),
|
||||
config,
|
||||
))
|
||||
}
|
||||
|
||||
pub async fn list_all_connectors(config: &Config) -> anyhow::Result<Vec<AppInfo>> {
|
||||
list_all_connectors_with_options(config, false).await
|
||||
}
|
||||
|
||||
pub async fn list_cached_all_connectors(config: &Config) -> Option<Vec<AppInfo>> {
|
||||
if !config.features.enabled(Feature::Apps) {
|
||||
return Some(Vec::new());
|
||||
}
|
||||
|
||||
if init_chatgpt_token_from_auth(&config.codex_home, config.cli_auth_credentials_store_mode)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let token_data = get_chatgpt_token_data()?;
|
||||
let cache_key = all_connectors_cache_key(config, &token_data);
|
||||
read_cached_all_connectors(&cache_key)
|
||||
}
|
||||
|
||||
pub async fn list_all_connectors_with_options(
|
||||
config: &Config,
|
||||
force_refetch: bool,
|
||||
@@ -164,7 +186,20 @@ fn write_cached_all_connectors(cache_key: AllConnectorsCacheKey, connectors: &[A
|
||||
pub fn merge_connectors_with_accessible(
|
||||
connectors: Vec<AppInfo>,
|
||||
accessible_connectors: Vec<AppInfo>,
|
||||
all_connectors_loaded: bool,
|
||||
) -> Vec<AppInfo> {
|
||||
let accessible_connectors = if all_connectors_loaded {
|
||||
let connector_ids: HashSet<&str> = connectors
|
||||
.iter()
|
||||
.map(|connector| connector.id.as_str())
|
||||
.collect();
|
||||
accessible_connectors
|
||||
.into_iter()
|
||||
.filter(|connector| connector_ids.contains(connector.id.as_str()))
|
||||
.collect()
|
||||
} else {
|
||||
accessible_connectors
|
||||
};
|
||||
let merged = merge_connectors(connectors, accessible_connectors);
|
||||
filter_disallowed_connectors(merged)
|
||||
}
|
||||
@@ -283,6 +318,7 @@ fn directory_app_to_app_info(app: DirectoryApp) -> AppInfo {
|
||||
distribution_channel: app.distribution_channel,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -341,6 +377,7 @@ mod tests {
|
||||
distribution_channel: None,
|
||||
install_url: None,
|
||||
is_accessible: false,
|
||||
is_enabled: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -383,4 +420,41 @@ mod tests {
|
||||
]);
|
||||
assert_eq!(filtered, vec![app("delta")]);
|
||||
}
|
||||
|
||||
fn merged_app(id: &str, is_accessible: bool) -> AppInfo {
|
||||
AppInfo {
|
||||
id: id.to_string(),
|
||||
name: id.to_string(),
|
||||
description: None,
|
||||
logo_url: None,
|
||||
logo_url_dark: None,
|
||||
distribution_channel: None,
|
||||
install_url: Some(connector_install_url(id, id)),
|
||||
is_accessible,
|
||||
is_enabled: true,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn excludes_accessible_connectors_not_in_all_when_all_loaded() {
|
||||
let merged = merge_connectors_with_accessible(
|
||||
vec![app("alpha")],
|
||||
vec![app("alpha"), app("beta")],
|
||||
true,
|
||||
);
|
||||
assert_eq!(merged, vec![merged_app("alpha", true)]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_accessible_connectors_not_in_all_while_all_loading() {
|
||||
let merged = merge_connectors_with_accessible(
|
||||
vec![app("alpha")],
|
||||
vec![app("alpha"), app("beta")],
|
||||
false,
|
||||
);
|
||||
assert_eq!(
|
||||
merged,
|
||||
vec![merged_app("alpha", true), merged_app("beta", true)]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ async fn run_command_under_sandbox(
|
||||
let sandbox_policy_cwd = cwd.clone();
|
||||
|
||||
let stdio_policy = StdioPolicy::Inherit;
|
||||
let env = create_env(&config.shell_environment_policy, None);
|
||||
let env = create_env(&config.permissions.shell_environment_policy, None);
|
||||
|
||||
// Special-case Windows sandbox: execute and exit the process to emulate inherited stdio.
|
||||
if let SandboxType::Windows = sandbox_type {
|
||||
@@ -141,7 +141,7 @@ async fn run_command_under_sandbox(
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture;
|
||||
use codex_windows_sandbox::run_windows_sandbox_capture_elevated;
|
||||
|
||||
let policy_str = serde_json::to_string(config.sandbox_policy.get())?;
|
||||
let policy_str = serde_json::to_string(config.permissions.sandbox_policy.get())?;
|
||||
|
||||
let sandbox_cwd = sandbox_policy_cwd.clone();
|
||||
let cwd_clone = cwd.clone();
|
||||
@@ -213,12 +213,19 @@ async fn run_command_under_sandbox(
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
let _ = log_denials;
|
||||
|
||||
let managed_network_requirements_enabled = config.managed_network_requirements_enabled();
|
||||
|
||||
// This proxy should only live for the lifetime of the child process.
|
||||
let network_proxy = match config.network.as_ref() {
|
||||
let network_proxy = match config.permissions.network.as_ref() {
|
||||
Some(spec) => Some(
|
||||
spec.start_proxy()
|
||||
.await
|
||||
.map_err(|err| anyhow::anyhow!("failed to start managed network proxy: {err}"))?,
|
||||
spec.start_proxy(
|
||||
config.permissions.sandbox_policy.get(),
|
||||
None,
|
||||
None,
|
||||
managed_network_requirements_enabled,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow::anyhow!("failed to start managed network proxy: {err}"))?,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
@@ -232,7 +239,7 @@ async fn run_command_under_sandbox(
|
||||
spawn_command_under_seatbelt(
|
||||
command,
|
||||
cwd,
|
||||
config.sandbox_policy.get(),
|
||||
config.permissions.sandbox_policy.get(),
|
||||
sandbox_policy_cwd.as_path(),
|
||||
stdio_policy,
|
||||
network.as_ref(),
|
||||
@@ -251,7 +258,7 @@ async fn run_command_under_sandbox(
|
||||
codex_linux_sandbox_exe,
|
||||
command,
|
||||
cwd,
|
||||
config.sandbox_policy.get(),
|
||||
config.permissions.sandbox_policy.get(),
|
||||
sandbox_policy_cwd.as_path(),
|
||||
use_bwrap_sandbox,
|
||||
stdio_policy,
|
||||
|
||||
@@ -93,10 +93,10 @@ enum Subcommand {
|
||||
/// Remove stored authentication credentials.
|
||||
Logout(LogoutCommand),
|
||||
|
||||
/// [experimental] Run Codex as an MCP server and manage MCP servers.
|
||||
/// Manage external MCP servers for Codex.
|
||||
Mcp(McpCli),
|
||||
|
||||
/// [experimental] Run the Codex MCP server (stdio transport).
|
||||
/// Start Codex as an MCP server (stdio).
|
||||
McpServer,
|
||||
|
||||
/// [experimental] Run the app server or related tooling.
|
||||
|
||||
@@ -56,6 +56,9 @@ pub enum ResponseEvent {
|
||||
Created,
|
||||
OutputItemDone(ResponseItem),
|
||||
OutputItemAdded(ResponseItem),
|
||||
/// Emitted when the server includes `OpenAI-Model` on the stream response.
|
||||
/// This can differ from the requested model when backend safety routing applies.
|
||||
ServerModel(String),
|
||||
/// Emitted when `X-Reasoning-Included: true` is present on the response,
|
||||
/// meaning the server already accounted for past reasoning tokens and the
|
||||
/// client should not re-estimate them.
|
||||
|
||||
@@ -63,6 +63,9 @@ impl Stream for AggregatedStream {
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ModelsEtag(etag)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::ModelsEtag(etag))));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ServerModel(model)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::ServerModel(model))));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage,
|
||||
|
||||
@@ -163,6 +163,7 @@ impl Drop for WsStream {
|
||||
const X_CODEX_TURN_STATE_HEADER: &str = "x-codex-turn-state";
|
||||
const X_MODELS_ETAG_HEADER: &str = "x-models-etag";
|
||||
const X_REASONING_INCLUDED_HEADER: &str = "x-reasoning-included";
|
||||
const OPENAI_MODEL_HEADER: &str = "openai-model";
|
||||
|
||||
pub struct ResponsesWebsocketConnection {
|
||||
stream: Arc<Mutex<Option<WsStream>>>,
|
||||
@@ -170,6 +171,7 @@ pub struct ResponsesWebsocketConnection {
|
||||
idle_timeout: Duration,
|
||||
server_reasoning_included: bool,
|
||||
models_etag: Option<String>,
|
||||
server_model: Option<String>,
|
||||
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
|
||||
}
|
||||
|
||||
@@ -179,6 +181,7 @@ impl ResponsesWebsocketConnection {
|
||||
idle_timeout: Duration,
|
||||
server_reasoning_included: bool,
|
||||
models_etag: Option<String>,
|
||||
server_model: Option<String>,
|
||||
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -186,6 +189,7 @@ impl ResponsesWebsocketConnection {
|
||||
idle_timeout,
|
||||
server_reasoning_included,
|
||||
models_etag,
|
||||
server_model,
|
||||
telemetry,
|
||||
}
|
||||
}
|
||||
@@ -204,12 +208,16 @@ impl ResponsesWebsocketConnection {
|
||||
let idle_timeout = self.idle_timeout;
|
||||
let server_reasoning_included = self.server_reasoning_included;
|
||||
let models_etag = self.models_etag.clone();
|
||||
let server_model = self.server_model.clone();
|
||||
let telemetry = self.telemetry.clone();
|
||||
let request_body = serde_json::to_value(&request).map_err(|err| {
|
||||
ApiError::Stream(format!("failed to encode websocket request: {err}"))
|
||||
})?;
|
||||
|
||||
tokio::spawn(async move {
|
||||
if let Some(model) = server_model {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::ServerModel(model))).await;
|
||||
}
|
||||
if let Some(etag) = models_etag {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::ModelsEtag(etag))).await;
|
||||
}
|
||||
@@ -273,13 +281,14 @@ impl<A: AuthProvider> ResponsesWebsocketClient<A> {
|
||||
merge_request_headers(&self.provider.headers, extra_headers, default_headers);
|
||||
add_auth_headers_to_header_map(&self.auth, &mut headers);
|
||||
|
||||
let (stream, server_reasoning_included, models_etag) =
|
||||
let (stream, server_reasoning_included, models_etag, server_model) =
|
||||
connect_websocket(ws_url, headers, turn_state.clone()).await?;
|
||||
Ok(ResponsesWebsocketConnection::new(
|
||||
stream,
|
||||
self.provider.stream_idle_timeout,
|
||||
server_reasoning_included,
|
||||
models_etag,
|
||||
server_model,
|
||||
telemetry,
|
||||
))
|
||||
}
|
||||
@@ -304,7 +313,7 @@ async fn connect_websocket(
|
||||
url: Url,
|
||||
headers: HeaderMap,
|
||||
turn_state: Option<Arc<OnceLock<String>>>,
|
||||
) -> Result<(WsStream, bool, Option<String>), ApiError> {
|
||||
) -> Result<(WsStream, bool, Option<String>, Option<String>), ApiError> {
|
||||
ensure_rustls_crypto_provider();
|
||||
info!("connecting to websocket: {url}");
|
||||
|
||||
@@ -341,6 +350,11 @@ async fn connect_websocket(
|
||||
.get(X_MODELS_ETAG_HEADER)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(ToString::to_string);
|
||||
let server_model = response
|
||||
.headers()
|
||||
.get(OPENAI_MODEL_HEADER)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(ToString::to_string);
|
||||
if let Some(turn_state) = turn_state
|
||||
&& let Some(header_value) = response
|
||||
.headers()
|
||||
@@ -349,7 +363,12 @@ async fn connect_websocket(
|
||||
{
|
||||
let _ = turn_state.set(header_value.to_string());
|
||||
}
|
||||
Ok((WsStream::new(stream), reasoning_included, models_etag))
|
||||
Ok((
|
||||
WsStream::new(stream),
|
||||
reasoning_included,
|
||||
models_etag,
|
||||
server_model,
|
||||
))
|
||||
}
|
||||
|
||||
fn websocket_config() -> WebSocketConfig {
|
||||
@@ -469,6 +488,7 @@ async fn run_websocket_response_stream(
|
||||
idle_timeout: Duration,
|
||||
telemetry: Option<Arc<dyn WebsocketTelemetry>>,
|
||||
) -> Result<(), ApiError> {
|
||||
let mut last_server_model: Option<String> = None;
|
||||
let request_text = match serde_json::to_string(&request_body) {
|
||||
Ok(text) => text,
|
||||
Err(err) => {
|
||||
@@ -536,6 +556,14 @@ async fn run_websocket_response_stream(
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(model) = event.response_model()
|
||||
&& last_server_model.as_deref() != Some(model.as_str())
|
||||
{
|
||||
let _ = tx_event
|
||||
.send(Ok(ResponseEvent::ServerModel(model.clone())))
|
||||
.await;
|
||||
last_server_model = Some(model);
|
||||
}
|
||||
match process_responses_event(event) {
|
||||
Ok(Some(event)) => {
|
||||
let is_completed = matches!(event, ResponseEvent::Completed { .. });
|
||||
|
||||
@@ -17,6 +17,9 @@ pub(crate) fn subagent_header(source: &Option<SessionSource>) -> Option<String>
|
||||
match sub {
|
||||
codex_protocol::protocol::SubAgentSource::Review => Some("review".to_string()),
|
||||
codex_protocol::protocol::SubAgentSource::Compact => Some("compact".to_string()),
|
||||
codex_protocol::protocol::SubAgentSource::MemoryConsolidation => {
|
||||
Some("memory_consolidation".to_string())
|
||||
}
|
||||
codex_protocol::protocol::SubAgentSource::ThreadSpawn { .. } => {
|
||||
Some("collab_spawn".to_string())
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ use tracing::debug;
|
||||
use tracing::trace;
|
||||
|
||||
const X_REASONING_INCLUDED_HEADER: &str = "x-reasoning-included";
|
||||
const OPENAI_MODEL_HEADER: &str = "openai-model";
|
||||
|
||||
/// Streams SSE events from an on-disk fixture for tests.
|
||||
pub fn stream_from_fixture(
|
||||
@@ -60,6 +61,11 @@ pub fn spawn_response_stream(
|
||||
.get("X-Models-Etag")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(ToString::to_string);
|
||||
let server_model = stream_response
|
||||
.headers
|
||||
.get(OPENAI_MODEL_HEADER)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(ToString::to_string);
|
||||
let reasoning_included = stream_response
|
||||
.headers
|
||||
.get(X_REASONING_INCLUDED_HEADER)
|
||||
@@ -74,6 +80,9 @@ pub fn spawn_response_stream(
|
||||
}
|
||||
let (tx_event, rx_event) = mpsc::channel::<Result<ResponseEvent, ApiError>>(1600);
|
||||
tokio::spawn(async move {
|
||||
if let Some(model) = server_model {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::ServerModel(model))).await;
|
||||
}
|
||||
for snapshot in rate_limit_snapshots {
|
||||
let _ = tx_event.send(Ok(ResponseEvent::RateLimits(snapshot))).await;
|
||||
}
|
||||
@@ -169,6 +178,41 @@ impl ResponsesStreamEvent {
|
||||
pub fn kind(&self) -> &str {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn response_model(&self) -> Option<String> {
|
||||
self.response.as_ref().and_then(extract_server_model)
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_server_model(value: &Value) -> Option<String> {
|
||||
value
|
||||
.get("model")
|
||||
.and_then(json_value_as_string)
|
||||
.or_else(|| {
|
||||
value
|
||||
.get("headers")
|
||||
.and_then(header_openai_model_value_from_json)
|
||||
})
|
||||
}
|
||||
|
||||
fn header_openai_model_value_from_json(value: &Value) -> Option<String> {
|
||||
let headers = value.as_object()?;
|
||||
headers.iter().find_map(|(name, value)| {
|
||||
if name.eq_ignore_ascii_case("openai-model") || name.eq_ignore_ascii_case("x-openai-model")
|
||||
{
|
||||
json_value_as_string(value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn json_value_as_string(value: &Value) -> Option<String> {
|
||||
match value {
|
||||
Value::String(value) => Some(value.clone()),
|
||||
Value::Array(items) => items.first().and_then(json_value_as_string),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -339,6 +383,7 @@ pub async fn process_sse(
|
||||
) {
|
||||
let mut stream = stream.eventsource();
|
||||
let mut response_error: Option<ApiError> = None;
|
||||
let mut last_server_model: Option<String> = None;
|
||||
|
||||
loop {
|
||||
let start = Instant::now();
|
||||
@@ -378,6 +423,19 @@ pub async fn process_sse(
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(model) = event.response_model()
|
||||
&& last_server_model.as_deref() != Some(model.as_str())
|
||||
{
|
||||
if tx_event
|
||||
.send(Ok(ResponseEvent::ServerModel(model.clone())))
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return;
|
||||
}
|
||||
last_server_model = Some(model);
|
||||
}
|
||||
|
||||
match process_responses_event(event) {
|
||||
Ok(Some(event)) => {
|
||||
let is_completed = matches!(event, ResponseEvent::Completed { .. });
|
||||
@@ -456,9 +514,13 @@ mod tests {
|
||||
use super::*;
|
||||
use assert_matches::assert_matches;
|
||||
use bytes::Bytes;
|
||||
use codex_client::StreamResponse;
|
||||
use codex_protocol::models::MessagePhase;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use futures::stream;
|
||||
use http::HeaderMap;
|
||||
use http::HeaderValue;
|
||||
use http::StatusCode;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
use tokio::sync::mpsc;
|
||||
@@ -870,6 +932,149 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn spawn_response_stream_emits_server_model_header() {
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
OPENAI_MODEL_HEADER,
|
||||
HeaderValue::from_static(CYBER_RESTRICTED_MODEL_FOR_TESTS),
|
||||
);
|
||||
let bytes = stream::iter(Vec::<Result<Bytes, TransportError>>::new());
|
||||
let stream_response = StreamResponse {
|
||||
status: StatusCode::OK,
|
||||
headers,
|
||||
bytes: Box::pin(bytes),
|
||||
};
|
||||
|
||||
let mut stream = spawn_response_stream(stream_response, idle_timeout(), None, None);
|
||||
let event = stream
|
||||
.rx_event
|
||||
.recv()
|
||||
.await
|
||||
.expect("expected server model event")
|
||||
.expect("expected ok event");
|
||||
|
||||
match event {
|
||||
ResponseEvent::ServerModel(model) => {
|
||||
assert_eq!(model, CYBER_RESTRICTED_MODEL_FOR_TESTS);
|
||||
}
|
||||
other => panic!("expected server model event, got {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn process_sse_emits_server_model_from_response_payload() {
|
||||
let events = run_sse(vec![
|
||||
json!({
|
||||
"type": "response.created",
|
||||
"response": {
|
||||
"id": "resp-1",
|
||||
"model": CYBER_RESTRICTED_MODEL_FOR_TESTS
|
||||
}
|
||||
}),
|
||||
json!({
|
||||
"type": "response.completed",
|
||||
"response": {
|
||||
"id": "resp-1",
|
||||
"model": CYBER_RESTRICTED_MODEL_FOR_TESTS
|
||||
}
|
||||
}),
|
||||
])
|
||||
.await;
|
||||
|
||||
assert_eq!(events.len(), 3);
|
||||
assert_matches!(
|
||||
&events[0],
|
||||
ResponseEvent::ServerModel(model) if model == CYBER_RESTRICTED_MODEL_FOR_TESTS
|
||||
);
|
||||
assert_matches!(&events[1], ResponseEvent::Created);
|
||||
assert_matches!(
|
||||
&events[2],
|
||||
ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage: None,
|
||||
can_append: false
|
||||
} if response_id == "resp-1"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn process_sse_emits_server_model_from_response_headers_payload() {
|
||||
let events = run_sse(vec![
|
||||
json!({
|
||||
"type": "response.created",
|
||||
"response": {
|
||||
"id": "resp-1",
|
||||
"headers": {
|
||||
"OpenAI-Model": CYBER_RESTRICTED_MODEL_FOR_TESTS
|
||||
}
|
||||
}
|
||||
}),
|
||||
json!({
|
||||
"type": "response.completed",
|
||||
"response": {
|
||||
"id": "resp-1"
|
||||
}
|
||||
}),
|
||||
])
|
||||
.await;
|
||||
|
||||
assert_eq!(events.len(), 3);
|
||||
assert_matches!(
|
||||
&events[0],
|
||||
ResponseEvent::ServerModel(model) if model == CYBER_RESTRICTED_MODEL_FOR_TESTS
|
||||
);
|
||||
assert_matches!(&events[1], ResponseEvent::Created);
|
||||
assert_matches!(
|
||||
&events[2],
|
||||
ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage: None,
|
||||
can_append: false
|
||||
} if response_id == "resp-1"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn process_sse_emits_server_model_again_when_response_model_changes() {
|
||||
let events = run_sse(vec![
|
||||
json!({
|
||||
"type": "response.created",
|
||||
"response": {
|
||||
"id": "resp-1",
|
||||
"model": "gpt-5.2-codex"
|
||||
}
|
||||
}),
|
||||
json!({
|
||||
"type": "response.completed",
|
||||
"response": {
|
||||
"id": "resp-1",
|
||||
"model": "gpt-5.3-codex"
|
||||
}
|
||||
}),
|
||||
])
|
||||
.await;
|
||||
|
||||
assert_eq!(events.len(), 4);
|
||||
assert_matches!(
|
||||
&events[0],
|
||||
ResponseEvent::ServerModel(model) if model == "gpt-5.2-codex"
|
||||
);
|
||||
assert_matches!(&events[1], ResponseEvent::Created);
|
||||
assert_matches!(
|
||||
&events[2],
|
||||
ResponseEvent::ServerModel(model) if model == "gpt-5.3-codex"
|
||||
);
|
||||
assert_matches!(
|
||||
&events[3],
|
||||
ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage: None,
|
||||
can_append: false
|
||||
} if response_id == "resp-1"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_try_parse_retry_after() {
|
||||
let err = Error {
|
||||
@@ -909,4 +1114,6 @@ mod tests {
|
||||
let delay = try_parse_retry_after(&err);
|
||||
assert_eq!(delay, Some(Duration::from_secs(35)));
|
||||
}
|
||||
|
||||
const CYBER_RESTRICTED_MODEL_FOR_TESTS: &str = "gpt-5.3-codex";
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user