mirror of
https://github.com/openai/codex.git
synced 2026-04-17 03:04:47 +00:00
Compare commits
19 Commits
dev/ningyi
...
etraut/mes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7232276c36 | ||
|
|
bd66ec7c15 | ||
|
|
a015bfed5b | ||
|
|
b7f72a0dc4 | ||
|
|
b1236df88e | ||
|
|
e720beeef4 | ||
|
|
8440e89c46 | ||
|
|
ee9cfc1981 | ||
|
|
544e8385ea | ||
|
|
461b7017e6 | ||
|
|
645f61e06a | ||
|
|
480212edad | ||
|
|
b66021cf3e | ||
|
|
5159895879 | ||
|
|
2a40e791e8 | ||
|
|
70b7a3700c | ||
|
|
75b389665a | ||
|
|
09b222272b | ||
|
|
5d482b54e9 |
4
MODULE.bazel.lock
generated
4
MODULE.bazel.lock
generated
@@ -700,6 +700,7 @@
|
||||
"chacha20_0.9.1": "{\"dependencies\":[{\"name\":\"cfg-if\",\"req\":\"^1\"},{\"name\":\"cipher\",\"req\":\"^0.4.4\"},{\"features\":[\"dev\"],\"kind\":\"dev\",\"name\":\"cipher\",\"req\":\"^0.4.4\"},{\"name\":\"cpufeatures\",\"req\":\"^0.2\",\"target\":\"cfg(any(target_arch = \\\"x86_64\\\", target_arch = \\\"x86\\\"))\"},{\"kind\":\"dev\",\"name\":\"hex-literal\",\"req\":\"^0.3.3\"}],\"features\":{\"std\":[\"cipher/std\"],\"zeroize\":[\"cipher/zeroize\"]}}",
|
||||
"chacha20poly1305_0.10.1": "{\"dependencies\":[{\"default_features\":false,\"name\":\"aead\",\"req\":\"^0.5\"},{\"default_features\":false,\"features\":[\"dev\"],\"kind\":\"dev\",\"name\":\"aead\",\"req\":\"^0.5\"},{\"features\":[\"zeroize\"],\"name\":\"chacha20\",\"req\":\"^0.9\"},{\"name\":\"cipher\",\"req\":\"^0.4\"},{\"name\":\"poly1305\",\"req\":\"^0.8\"},{\"default_features\":false,\"name\":\"zeroize\",\"req\":\"^1.5\"}],\"features\":{\"alloc\":[\"aead/alloc\"],\"default\":[\"alloc\",\"getrandom\"],\"getrandom\":[\"aead/getrandom\",\"rand_core\"],\"heapless\":[\"aead/heapless\"],\"rand_core\":[\"aead/rand_core\"],\"reduced-round\":[],\"std\":[\"aead/std\",\"alloc\"],\"stream\":[\"aead/stream\"]}}",
|
||||
"chardetng_0.1.17": "{\"dependencies\":[{\"name\":\"arrayvec\",\"optional\":true,\"req\":\"^0.5.1\"},{\"name\":\"cfg-if\",\"req\":\"^1.0\"},{\"kind\":\"dev\",\"name\":\"detone\",\"req\":\"^1.0.0\"},{\"default_features\":false,\"name\":\"encoding_rs\",\"req\":\"^0.8.29\"},{\"default_features\":false,\"name\":\"memchr\",\"req\":\"^2.2.0\"},{\"name\":\"rayon\",\"optional\":true,\"req\":\"^1.3.0\"}],\"features\":{\"multithreading\":[\"rayon\",\"arrayvec\"],\"testing-only-no-semver-guarantees-do-not-use\":[]}}",
|
||||
"chrono-tz_0.10.4": "{\"dependencies\":[{\"features\":[\"derive\"],\"name\":\"arbitrary\",\"optional\":true,\"req\":\"^1.2\"},{\"default_features\":false,\"name\":\"chrono\",\"req\":\"^0.4.25\"},{\"default_features\":false,\"features\":[\"alloc\"],\"kind\":\"dev\",\"name\":\"chrono\",\"req\":\"^0.4\"},{\"kind\":\"build\",\"name\":\"chrono-tz-build\",\"optional\":true,\"req\":\"^0.5\"},{\"kind\":\"dev\",\"name\":\"chrono-tz-build\",\"req\":\"^0.5\"},{\"default_features\":false,\"name\":\"phf\",\"req\":\"^0.12\"},{\"default_features\":false,\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.99\"},{\"kind\":\"dev\",\"name\":\"serde_test\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"tzfile\",\"req\":\"^0.1\"},{\"default_features\":false,\"name\":\"uncased\",\"optional\":true,\"req\":\"^0.9\"}],\"features\":{\"case-insensitive\":[\"dep:uncased\",\"chrono-tz-build\",\"chrono-tz-build/case-insensitive\",\"phf/uncased\"],\"default\":[\"std\"],\"filter-by-regex\":[\"chrono-tz-build\",\"chrono-tz-build/filter-by-regex\"],\"serde\":[\"dep:serde\"],\"std\":[]}}",
|
||||
"chrono_0.4.43": "{\"dependencies\":[{\"features\":[\"derive\"],\"name\":\"arbitrary\",\"optional\":true,\"req\":\"^1.0.0\"},{\"kind\":\"dev\",\"name\":\"bincode\",\"req\":\"^1.3.0\"},{\"name\":\"defmt\",\"optional\":true,\"req\":\"^1.0.1\"},{\"features\":[\"fallback\"],\"name\":\"iana-time-zone\",\"optional\":true,\"req\":\"^0.1.45\",\"target\":\"cfg(unix)\"},{\"name\":\"js-sys\",\"optional\":true,\"req\":\"^0.3\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", not(any(target_os = \\\"emscripten\\\", target_os = \\\"wasi\\\"))))\"},{\"default_features\":false,\"name\":\"num-traits\",\"req\":\"^0.2\"},{\"name\":\"pure-rust-locales\",\"optional\":true,\"req\":\"^0.8.2\"},{\"default_features\":false,\"name\":\"rkyv\",\"optional\":true,\"req\":\"^0.7.43\"},{\"default_features\":false,\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.99\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"serde_derive\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"similar-asserts\",\"req\":\"^1.6.1\"},{\"name\":\"wasm-bindgen\",\"optional\":true,\"req\":\"^0.2\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", not(any(target_os = \\\"emscripten\\\", target_os = \\\"wasi\\\"))))\"},{\"kind\":\"dev\",\"name\":\"wasm-bindgen-test\",\"req\":\"^0.3\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", not(any(target_os = \\\"emscripten\\\", target_os = \\\"wasi\\\"))))\"},{\"kind\":\"dev\",\"name\":\"windows-bindgen\",\"req\":\"^0.66\"},{\"name\":\"windows-link\",\"optional\":true,\"req\":\"^0.2\",\"target\":\"cfg(windows)\"}],\"features\":{\"__internal_bench\":[],\"alloc\":[],\"clock\":[\"winapi\",\"iana-time-zone\",\"now\"],\"core-error\":[],\"default\":[\"clock\",\"std\",\"oldtime\",\"wasmbind\"],\"defmt\":[\"dep:defmt\",\"pure-rust-locales?/defmt\"],\"libc\":[],\"now\":[\"std\"],\"oldtime\":[],\"rkyv\":[\"dep:rkyv\",\"rkyv/size_32\"],\"rkyv-16\":[\"dep:rkyv\",\"rkyv?/size_16\"],\"rkyv-32\":[\"dep:rkyv\",\"rkyv?/size_32\"],\"rkyv-64\":[\"dep:rkyv\",\"rkyv?/size_64\"],\"rkyv-validation\":[\"rkyv?/validation\"],\"std\":[\"alloc\"],\"unstable-locales\":[\"pure-rust-locales\"],\"wasmbind\":[\"wasm-bindgen\",\"js-sys\"],\"winapi\":[\"windows-link\"]}}",
|
||||
"chunked_transfer_1.5.0": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"criterion\",\"req\":\"^0.3\"}],\"features\":{}}",
|
||||
"cipher_0.4.4": "{\"dependencies\":[{\"name\":\"blobby\",\"optional\":true,\"req\":\"^0.3\"},{\"name\":\"crypto-common\",\"req\":\"^0.1.6\"},{\"name\":\"inout\",\"req\":\"^0.1\"},{\"default_features\":false,\"name\":\"zeroize\",\"optional\":true,\"req\":\"^1.5\"}],\"features\":{\"alloc\":[],\"block-padding\":[\"inout/block-padding\"],\"dev\":[\"blobby\"],\"rand_core\":[\"crypto-common/rand_core\"],\"std\":[\"alloc\",\"crypto-common/std\",\"inout/std\"]}}",
|
||||
@@ -1166,7 +1167,9 @@
|
||||
"percent-encoding_2.3.2": "{\"dependencies\":[],\"features\":{\"alloc\":[],\"default\":[\"std\"],\"std\":[\"alloc\"]}}",
|
||||
"petgraph_0.6.5": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"ahash\",\"req\":\"^0.7.2\"},{\"kind\":\"dev\",\"name\":\"bincode\",\"req\":\"^1.3.3\"},{\"kind\":\"dev\",\"name\":\"defmac\",\"req\":\"^0.2.1\"},{\"default_features\":false,\"name\":\"fixedbitset\",\"req\":\"^0.4.0\"},{\"kind\":\"dev\",\"name\":\"fxhash\",\"req\":\"^0.2.1\"},{\"name\":\"indexmap\",\"req\":\"^2.0\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"itertools\",\"req\":\"^0.12.1\"},{\"kind\":\"dev\",\"name\":\"odds\",\"req\":\"^0.4.0\"},{\"default_features\":false,\"name\":\"quickcheck\",\"optional\":true,\"req\":\"^0.8\"},{\"kind\":\"dev\",\"name\":\"rand\",\"req\":\"^0.5.5\"},{\"name\":\"rayon\",\"optional\":true,\"req\":\"^1.5.3\"},{\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0\"},{\"name\":\"serde_derive\",\"optional\":true,\"req\":\"^1.0\"}],\"features\":{\"all\":[\"unstable\",\"quickcheck\",\"matrix_graph\",\"stable_graph\",\"graphmap\",\"rayon\"],\"default\":[\"graphmap\",\"stable_graph\",\"matrix_graph\"],\"generate\":[],\"graphmap\":[],\"matrix_graph\":[],\"rayon\":[\"dep:rayon\",\"indexmap/rayon\"],\"serde-1\":[\"serde\",\"serde_derive\"],\"stable_graph\":[],\"unstable\":[\"generate\"]}}",
|
||||
"petgraph_0.8.3": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"ahash\",\"req\":\"^0.7.2\"},{\"kind\":\"dev\",\"name\":\"bincode\",\"req\":\"^1.3.3\"},{\"kind\":\"dev\",\"name\":\"defmac\",\"req\":\"^0.2.1\"},{\"name\":\"dot-parser\",\"optional\":true,\"req\":\"^0.5.1\"},{\"name\":\"dot-parser-macros\",\"optional\":true,\"req\":\"^0.5.1\"},{\"default_features\":false,\"name\":\"fixedbitset\",\"req\":\"^0.5.7\"},{\"kind\":\"dev\",\"name\":\"fxhash\",\"req\":\"^0.2.1\"},{\"default_features\":false,\"features\":[\"default-hasher\",\"inline-more\"],\"name\":\"hashbrown\",\"req\":\"^0.15.0\"},{\"default_features\":false,\"name\":\"indexmap\",\"req\":\"^2.5.0\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"itertools\",\"req\":\"^0.12.1\"},{\"kind\":\"dev\",\"name\":\"odds\",\"req\":\"^0.4.0\"},{\"default_features\":false,\"name\":\"quickcheck\",\"optional\":true,\"req\":\"^0.8\"},{\"kind\":\"dev\",\"name\":\"rand\",\"req\":\"^0.5.5\"},{\"name\":\"rayon\",\"optional\":true,\"req\":\"^1.5.3\"},{\"default_features\":false,\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0\"},{\"default_features\":false,\"name\":\"serde_derive\",\"optional\":true,\"req\":\"^1.0\"}],\"features\":{\"all\":[\"unstable\",\"quickcheck\",\"matrix_graph\",\"stable_graph\",\"graphmap\",\"rayon\",\"dot_parser\"],\"default\":[\"std\",\"graphmap\",\"stable_graph\",\"matrix_graph\"],\"dot_parser\":[\"std\",\"dep:dot-parser\",\"dep:dot-parser-macros\"],\"generate\":[],\"graphmap\":[],\"matrix_graph\":[],\"quickcheck\":[\"std\",\"dep:quickcheck\",\"graphmap\",\"stable_graph\"],\"rayon\":[\"std\",\"dep:rayon\",\"indexmap/rayon\",\"hashbrown/rayon\"],\"serde-1\":[\"serde\",\"serde_derive\"],\"stable_graph\":[\"serde?/alloc\"],\"std\":[\"indexmap/std\"],\"unstable\":[\"generate\"]}}",
|
||||
"phf_0.12.1": "{\"dependencies\":[{\"name\":\"phf_macros\",\"optional\":true,\"req\":\"^0.12.0\"},{\"default_features\":false,\"name\":\"phf_shared\",\"req\":\"^0.12.0\"},{\"default_features\":false,\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0\"}],\"features\":{\"default\":[\"std\"],\"macros\":[\"phf_macros\"],\"std\":[\"phf_shared/std\",\"serde?/std\"],\"uncased\":[\"phf_macros?/uncased\",\"phf_shared/uncased\"],\"unicase\":[\"phf_macros?/unicase\",\"phf_shared/unicase\"]}}",
|
||||
"phf_shared_0.11.3": "{\"dependencies\":[{\"name\":\"siphasher\",\"req\":\"^1.0\"},{\"default_features\":false,\"name\":\"uncased\",\"optional\":true,\"req\":\"^0.9.9\"},{\"name\":\"unicase\",\"optional\":true,\"req\":\"^2.4.0\"}],\"features\":{\"default\":[\"std\"],\"std\":[]}}",
|
||||
"phf_shared_0.12.1": "{\"dependencies\":[{\"name\":\"siphasher\",\"req\":\"^1.0\"},{\"default_features\":false,\"name\":\"uncased\",\"optional\":true,\"req\":\"^0.9.9\"},{\"name\":\"unicase\",\"optional\":true,\"req\":\"^2.4.0\"}],\"features\":{\"default\":[\"std\"],\"std\":[]}}",
|
||||
"pin-project-internal_1.1.10": "{\"dependencies\":[{\"name\":\"proc-macro2\",\"req\":\"^1.0.60\"},{\"name\":\"quote\",\"req\":\"^1.0.25\"},{\"default_features\":false,\"features\":[\"parsing\",\"printing\",\"clone-impls\",\"proc-macro\",\"full\",\"visit-mut\"],\"name\":\"syn\",\"req\":\"^2.0.1\"}],\"features\":{}}",
|
||||
"pin-project-lite_0.2.16": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"rustversion\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"static_assertions\",\"req\":\"^1\"}],\"features\":{}}",
|
||||
"pin-project-lite_0.2.17": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"rustversion\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"static_assertions\",\"req\":\"^1\"}],\"features\":{}}",
|
||||
@@ -1268,6 +1271,7 @@
|
||||
"ring_0.17.14": "{\"dependencies\":[{\"default_features\":false,\"kind\":\"build\",\"name\":\"cc\",\"req\":\"^1.2.8\"},{\"default_features\":false,\"name\":\"cfg-if\",\"req\":\"^1.0.0\"},{\"name\":\"getrandom\",\"req\":\"^0.2.10\"},{\"default_features\":false,\"name\":\"libc\",\"req\":\"^0.2.148\",\"target\":\"cfg(all(any(all(target_arch = \\\"aarch64\\\", target_endian = \\\"little\\\"), all(target_arch = \\\"arm\\\", target_endian = \\\"little\\\")), any(target_os = \\\"android\\\", target_os = \\\"linux\\\")))\"},{\"default_features\":false,\"name\":\"libc\",\"req\":\"^0.2.155\",\"target\":\"cfg(all(all(target_arch = \\\"aarch64\\\", target_endian = \\\"little\\\"), target_vendor = \\\"apple\\\", any(target_os = \\\"ios\\\", target_os = \\\"macos\\\", target_os = \\\"tvos\\\", target_os = \\\"visionos\\\", target_os = \\\"watchos\\\")))\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"libc\",\"req\":\"^0.2.148\",\"target\":\"cfg(any(unix, windows, target_os = \\\"wasi\\\"))\"},{\"name\":\"untrusted\",\"req\":\"^0.9\"},{\"default_features\":false,\"features\":[\"std\"],\"kind\":\"dev\",\"name\":\"wasm-bindgen-test\",\"req\":\"^0.3.37\",\"target\":\"cfg(all(target_arch = \\\"wasm32\\\", target_os = \\\"unknown\\\"))\"},{\"features\":[\"Win32_Foundation\",\"Win32_System_Threading\"],\"name\":\"windows-sys\",\"req\":\"^0.52\",\"target\":\"cfg(all(all(target_arch = \\\"aarch64\\\", target_endian = \\\"little\\\"), target_os = \\\"windows\\\"))\"}],\"features\":{\"alloc\":[],\"default\":[\"alloc\",\"dev_urandom_fallback\"],\"dev_urandom_fallback\":[],\"less-safe-getrandom-custom-or-rdrand\":[],\"less-safe-getrandom-espidf\":[],\"slow_tests\":[],\"std\":[\"alloc\"],\"test_logging\":[],\"unstable-testing-arm-no-hw\":[],\"unstable-testing-arm-no-neon\":[],\"wasm32_unknown_unknown_js\":[\"getrandom/js\"]}}",
|
||||
"rmcp-macros_0.15.0": "{\"dependencies\":[{\"name\":\"darling\",\"req\":\"^0.23\"},{\"name\":\"proc-macro2\",\"req\":\"^1\"},{\"name\":\"quote\",\"req\":\"^1\"},{\"name\":\"serde_json\",\"req\":\"^1.0\"},{\"features\":[\"full\"],\"name\":\"syn\",\"req\":\"^2\"}],\"features\":{}}",
|
||||
"rmcp_0.15.0": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"anyhow\",\"req\":\"^1.0\"},{\"name\":\"async-trait\",\"req\":\"^0.1.89\"},{\"kind\":\"dev\",\"name\":\"async-trait\",\"req\":\"^0.1\"},{\"name\":\"axum\",\"optional\":true,\"req\":\"^0.8\"},{\"name\":\"base64\",\"optional\":true,\"req\":\"^0.22\"},{\"name\":\"bytes\",\"optional\":true,\"req\":\"^1\"},{\"default_features\":false,\"features\":[\"serde\",\"clock\",\"std\",\"oldtime\"],\"name\":\"chrono\",\"req\":\"^0.4.38\",\"target\":\"cfg(all(target_family = \\\"wasm\\\", target_os = \\\"unknown\\\"))\"},{\"features\":[\"serde\"],\"name\":\"chrono\",\"req\":\"^0.4.38\",\"target\":\"cfg(not(all(target_family = \\\"wasm\\\", target_os = \\\"unknown\\\")))\"},{\"name\":\"futures\",\"req\":\"^0.3\"},{\"name\":\"http\",\"optional\":true,\"req\":\"^1\"},{\"name\":\"http-body\",\"optional\":true,\"req\":\"^1\"},{\"name\":\"http-body-util\",\"optional\":true,\"req\":\"^0.1\"},{\"default_features\":false,\"features\":[\"reqwest\"],\"name\":\"oauth2\",\"optional\":true,\"req\":\"^5.0\"},{\"name\":\"pastey\",\"optional\":true,\"req\":\"^0.2.0\"},{\"name\":\"pin-project-lite\",\"req\":\"^0.2\"},{\"features\":[\"tokio1\"],\"name\":\"process-wrap\",\"optional\":true,\"req\":\"^9.0\"},{\"name\":\"rand\",\"optional\":true,\"req\":\"^0.9\"},{\"default_features\":false,\"features\":[\"json\",\"stream\"],\"name\":\"reqwest\",\"optional\":true,\"req\":\"^0.12\"},{\"name\":\"rmcp-macros\",\"optional\":true,\"req\":\"^0.15.0\"},{\"features\":[\"chrono04\"],\"name\":\"schemars\",\"optional\":true,\"req\":\"^1.0\"},{\"features\":[\"chrono04\"],\"kind\":\"dev\",\"name\":\"schemars\",\"req\":\"^1.1.0\"},{\"features\":[\"derive\",\"rc\"],\"name\":\"serde\",\"req\":\"^1.0\"},{\"name\":\"serde_json\",\"req\":\"^1.0\"},{\"name\":\"sse-stream\",\"optional\":true,\"req\":\"^0.2\"},{\"name\":\"thiserror\",\"req\":\"^2\"},{\"features\":[\"sync\",\"macros\",\"rt\",\"time\"],\"name\":\"tokio\",\"req\":\"^1\"},{\"features\":[\"full\"],\"kind\":\"dev\",\"name\":\"tokio\",\"req\":\"^1\"},{\"name\":\"tokio-stream\",\"optional\":true,\"req\":\"^0.1\"},{\"name\":\"tokio-util\",\"req\":\"^0.7\"},{\"name\":\"tower-service\",\"optional\":true,\"req\":\"^0.3\"},{\"name\":\"tracing\",\"req\":\"^0.1\"},{\"features\":[\"env-filter\",\"std\",\"fmt\"],\"kind\":\"dev\",\"name\":\"tracing-subscriber\",\"req\":\"^0.3\"},{\"name\":\"url\",\"optional\":true,\"req\":\"^2.4\"},{\"features\":[\"v4\"],\"name\":\"uuid\",\"optional\":true,\"req\":\"^1\"}],\"features\":{\"__reqwest\":[\"dep:reqwest\"],\"auth\":[\"dep:oauth2\",\"__reqwest\",\"dep:url\"],\"client\":[\"dep:tokio-stream\"],\"client-side-sse\":[\"dep:sse-stream\",\"dep:http\"],\"default\":[\"base64\",\"macros\",\"server\"],\"elicitation\":[\"dep:url\"],\"macros\":[\"dep:rmcp-macros\",\"dep:pastey\"],\"reqwest\":[\"__reqwest\",\"reqwest?/rustls-tls\"],\"reqwest-native-tls\":[\"__reqwest\",\"reqwest?/native-tls\"],\"reqwest-tls-no-provider\":[\"__reqwest\",\"reqwest?/rustls-tls-no-provider\"],\"schemars\":[\"dep:schemars\"],\"server\":[\"transport-async-rw\",\"dep:schemars\",\"dep:pastey\"],\"server-side-http\":[\"uuid\",\"dep:rand\",\"dep:tokio-stream\",\"dep:http\",\"dep:http-body\",\"dep:http-body-util\",\"dep:bytes\",\"dep:sse-stream\",\"dep:axum\",\"tower\"],\"tower\":[\"dep:tower-service\"],\"transport-async-rw\":[\"tokio/io-util\",\"tokio-util/codec\"],\"transport-child-process\":[\"transport-async-rw\",\"tokio/process\",\"dep:process-wrap\"],\"transport-io\":[\"transport-async-rw\",\"tokio/io-std\"],\"transport-streamable-http-client\":[\"client-side-sse\",\"transport-worker\"],\"transport-streamable-http-client-reqwest\":[\"transport-streamable-http-client\",\"__reqwest\"],\"transport-streamable-http-server\":[\"transport-streamable-http-server-session\",\"server-side-http\",\"transport-worker\"],\"transport-streamable-http-server-session\":[\"transport-async-rw\",\"dep:tokio-stream\"],\"transport-worker\":[\"dep:tokio-stream\"]}}",
|
||||
"rrule_0.14.0": "{\"dependencies\":[{\"name\":\"chrono\",\"req\":\"^0.4.39\"},{\"name\":\"chrono-tz\",\"req\":\"^0.10.1\"},{\"features\":[\"derive\"],\"name\":\"clap\",\"optional\":true,\"req\":\"^4.5.26\"},{\"name\":\"log\",\"req\":\"^0.4.25\"},{\"default_features\":false,\"features\":[\"derive\"],\"kind\":\"dev\",\"name\":\"orig_serde\",\"package\":\"serde\",\"req\":\"^1.0.217\"},{\"default_features\":false,\"features\":[\"perf\",\"std\"],\"name\":\"regex\",\"req\":\"^1.11.1\"},{\"kind\":\"dev\",\"name\":\"serde_json\",\"req\":\"^1.0.135\"},{\"name\":\"serde_with\",\"optional\":true,\"req\":\"^3.12.0\"},{\"name\":\"thiserror\",\"req\":\"^2.0.11\"}],\"features\":{\"by-easter\":[],\"cli-tool\":[\"clap\"],\"default\":[],\"exrule\":[],\"serde\":[\"serde_with\",\"chrono/serde\",\"chrono-tz/serde\"]}}",
|
||||
"rsa_0.9.10": "{\"dependencies\":[{\"features\":[\"alloc\"],\"kind\":\"dev\",\"name\":\"base64ct\",\"req\":\"^1\"},{\"default_features\":false,\"name\":\"const-oid\",\"req\":\"^0.9\"},{\"default_features\":false,\"features\":[\"alloc\",\"oid\"],\"name\":\"digest\",\"req\":\"^0.10.5\"},{\"kind\":\"dev\",\"name\":\"hex-literal\",\"req\":\"^0.4.1\"},{\"default_features\":false,\"features\":[\"i128\",\"prime\",\"zeroize\"],\"name\":\"num-bigint\",\"package\":\"num-bigint-dig\",\"req\":\"^0.8.6\"},{\"default_features\":false,\"name\":\"num-integer\",\"req\":\"^0.1.39\"},{\"default_features\":false,\"features\":[\"libm\"],\"name\":\"num-traits\",\"req\":\"^0.2.9\"},{\"default_features\":false,\"features\":[\"alloc\",\"pkcs8\"],\"name\":\"pkcs1\",\"req\":\"^0.7.5\"},{\"default_features\":false,\"features\":[\"alloc\"],\"name\":\"pkcs8\",\"req\":\"^0.10.2\"},{\"kind\":\"dev\",\"name\":\"proptest\",\"req\":\"^1\"},{\"kind\":\"dev\",\"name\":\"rand\",\"req\":\"^0.8\"},{\"kind\":\"dev\",\"name\":\"rand_chacha\",\"req\":\"^0.3\"},{\"default_features\":false,\"name\":\"rand_core\",\"req\":\"^0.6.4\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"rand_core\",\"req\":\"^0.6\"},{\"kind\":\"dev\",\"name\":\"rand_xorshift\",\"req\":\"^0.3\"},{\"default_features\":false,\"features\":[\"derive\"],\"name\":\"serde\",\"optional\":true,\"req\":\"^1.0.184\"},{\"kind\":\"dev\",\"name\":\"serde_test\",\"req\":\"^1.0.89\"},{\"default_features\":false,\"features\":[\"oid\"],\"name\":\"sha1\",\"optional\":true,\"req\":\"^0.10.5\"},{\"default_features\":false,\"features\":[\"oid\"],\"kind\":\"dev\",\"name\":\"sha1\",\"req\":\"^0.10.5\"},{\"default_features\":false,\"features\":[\"oid\"],\"name\":\"sha2\",\"optional\":true,\"req\":\"^0.10.6\"},{\"default_features\":false,\"features\":[\"oid\"],\"kind\":\"dev\",\"name\":\"sha2\",\"req\":\"^0.10.6\"},{\"default_features\":false,\"features\":[\"oid\"],\"kind\":\"dev\",\"name\":\"sha3\",\"req\":\"^0.10.7\"},{\"default_features\":false,\"features\":[\"alloc\",\"digest\",\"rand_core\"],\"name\":\"signature\",\"req\":\">2.0, <2.3\"},{\"default_features\":false,\"features\":[\"alloc\"],\"name\":\"spki\",\"req\":\"^0.7.3\"},{\"default_features\":false,\"name\":\"subtle\",\"req\":\"^2.1.1\"},{\"features\":[\"alloc\"],\"name\":\"zeroize\",\"req\":\"^1.5\"}],\"features\":{\"default\":[\"std\",\"pem\",\"u64_digit\"],\"getrandom\":[\"rand_core/getrandom\"],\"hazmat\":[],\"nightly\":[\"num-bigint/nightly\"],\"pem\":[\"pkcs1/pem\",\"pkcs8/pem\"],\"pkcs5\":[\"pkcs8/encryption\"],\"serde\":[\"dep:serde\",\"num-bigint/serde\"],\"std\":[\"digest/std\",\"pkcs1/std\",\"pkcs8/std\",\"rand_core/std\",\"signature/std\"],\"u64_digit\":[\"num-bigint/u64_digit\"]}}",
|
||||
"rtrb_0.3.3": "{\"dependencies\":[{\"kind\":\"dev\",\"name\":\"criterion\",\"req\":\"^0.8\"},{\"default_features\":false,\"kind\":\"dev\",\"name\":\"crossbeam-utils\",\"req\":\"^0.8\"},{\"kind\":\"dev\",\"name\":\"rand\",\"req\":\"^0.10\"}],\"features\":{\"default\":[\"std\"],\"std\":[]}}",
|
||||
"rust-embed-impl_8.11.0": "{\"dependencies\":[{\"name\":\"proc-macro2\",\"req\":\"^1\"},{\"name\":\"quote\",\"req\":\"^1\"},{\"name\":\"rust-embed-utils\",\"req\":\"^8.11.0\"},{\"name\":\"shellexpand\",\"optional\":true,\"req\":\"^3\"},{\"default_features\":false,\"features\":[\"derive\",\"parsing\",\"proc-macro\",\"printing\"],\"name\":\"syn\",\"req\":\"^2\"},{\"name\":\"walkdir\",\"req\":\"^2.3.1\"}],\"features\":{\"compression\":[],\"debug-embed\":[],\"deterministic-timestamps\":[],\"include-exclude\":[\"rust-embed-utils/include-exclude\"],\"interpolate-folder-path\":[\"shellexpand\"],\"mime-guess\":[\"rust-embed-utils/mime-guess\"]}}",
|
||||
|
||||
45
codex-rs/Cargo.lock
generated
45
codex-rs/Cargo.lock
generated
@@ -1253,6 +1253,16 @@ dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chrono-tz"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"phf",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chunked_transfer"
|
||||
version = "1.5.0"
|
||||
@@ -1897,6 +1907,7 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bm25",
|
||||
"chrono",
|
||||
"chrono-tz",
|
||||
"clap",
|
||||
"codex-analytics",
|
||||
"codex-api",
|
||||
@@ -1974,6 +1985,7 @@ dependencies = [
|
||||
"regex-lite",
|
||||
"reqwest",
|
||||
"rmcp",
|
||||
"rrule",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serial_test",
|
||||
@@ -7527,6 +7539,15 @@ dependencies = [
|
||||
"indexmap 2.13.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
|
||||
dependencies = [
|
||||
"phf_shared 0.12.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.11.3"
|
||||
@@ -7536,6 +7557,15 @@ dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"
|
||||
dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project"
|
||||
version = "1.1.10"
|
||||
@@ -8739,6 +8769,19 @@ dependencies = [
|
||||
"syn 2.0.114",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rrule"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "720acfb4980b9d8a6a430f6d7a11933e701ebbeba5eee39cc9d8c5f932aaff74"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"chrono-tz",
|
||||
"log",
|
||||
"regex",
|
||||
"thiserror 2.0.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "0.9.10"
|
||||
@@ -10097,7 +10140,7 @@ checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f"
|
||||
dependencies = [
|
||||
"new_debug_unreachable",
|
||||
"parking_lot",
|
||||
"phf_shared",
|
||||
"phf_shared 0.11.3",
|
||||
"precomputed-hash",
|
||||
]
|
||||
|
||||
|
||||
@@ -210,6 +210,7 @@ bm25 = "2.3.2"
|
||||
bytes = "1.10.1"
|
||||
chardetng = "0.1.17"
|
||||
chrono = "0.4.43"
|
||||
chrono-tz = "0.10.4"
|
||||
clap = "4"
|
||||
clap_complete = "4"
|
||||
color-eyre = "0.6.3"
|
||||
@@ -277,6 +278,7 @@ ratatui-macros = "0.6.0"
|
||||
regex = "1.12.3"
|
||||
regex-lite = "0.1.8"
|
||||
reqwest = "0.12"
|
||||
rrule = "0.14.0"
|
||||
rmcp = { version = "0.15.0", default-features = false }
|
||||
runfiles = { git = "https://github.com/dzbarsky/rules_rust", rev = "b56cbaa8465e74127f1ea216f813cd377295ad81" }
|
||||
rustls = { version = "0.23", default-features = false, features = [
|
||||
|
||||
@@ -37,6 +37,7 @@ use codex_protocol::protocol::GuardianAssessmentEvent;
|
||||
use codex_protocol::protocol::GuardianAssessmentStatus;
|
||||
use codex_protocol::protocol::ImageGenerationBeginEvent;
|
||||
use codex_protocol::protocol::ImageGenerationEndEvent;
|
||||
use codex_protocol::protocol::InjectedMessageEvent;
|
||||
use codex_protocol::protocol::ItemCompletedEvent;
|
||||
use codex_protocol::protocol::ItemStartedEvent;
|
||||
use codex_protocol::protocol::McpToolCallBeginEvent;
|
||||
@@ -207,6 +208,7 @@ impl ThreadHistoryBuilder {
|
||||
EventMsg::TurnAborted(payload) => self.handle_turn_aborted(payload),
|
||||
EventMsg::TurnStarted(payload) => self.handle_turn_started(payload),
|
||||
EventMsg::TurnComplete(payload) => self.handle_turn_complete(payload),
|
||||
EventMsg::InjectedMessage(payload) => self.handle_injected_message(payload),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -268,6 +270,21 @@ impl ThreadHistoryBuilder {
|
||||
self.current_turn = Some(turn);
|
||||
}
|
||||
|
||||
fn handle_injected_message(&mut self, payload: &InjectedMessageEvent) {
|
||||
if payload.content.trim().is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let id = self.next_item_id();
|
||||
self.ensure_turn().items.push(ThreadItem::UserMessage {
|
||||
id,
|
||||
content: vec![UserInput::Text {
|
||||
text: payload.content.clone(),
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_agent_message(
|
||||
&mut self,
|
||||
text: String,
|
||||
|
||||
@@ -99,6 +99,7 @@ use codex_app_server_protocol::TurnPlanStep;
|
||||
use codex_app_server_protocol::TurnPlanUpdatedNotification;
|
||||
use codex_app_server_protocol::TurnStartedNotification;
|
||||
use codex_app_server_protocol::TurnStatus;
|
||||
use codex_app_server_protocol::UserInput as V2UserInput;
|
||||
use codex_app_server_protocol::build_command_execution_end_item;
|
||||
use codex_app_server_protocol::build_file_change_approval_request_item;
|
||||
use codex_app_server_protocol::build_file_change_begin_item;
|
||||
@@ -1470,6 +1471,24 @@ pub(crate) async fn apply_bespoke_event_handling(
|
||||
.send_server_notification(ServerNotification::ItemCompleted(completed))
|
||||
.await;
|
||||
}
|
||||
EventMsg::InjectedMessage(event) => {
|
||||
if !event.content.trim().is_empty() {
|
||||
let notification = ItemCompletedNotification {
|
||||
thread_id: conversation_id.to_string(),
|
||||
turn_id: event_turn_id.clone(),
|
||||
item: ThreadItem::UserMessage {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
content: vec![V2UserInput::Text {
|
||||
text: event.content,
|
||||
text_elements: Vec::new(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
outgoing
|
||||
.send_server_notification(ServerNotification::ItemCompleted(notification))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
EventMsg::ItemStarted(item_started_event) => {
|
||||
let item: ThreadItem = item_started_event.item.clone().into();
|
||||
let notification = ItemStartedNotification {
|
||||
|
||||
@@ -5739,6 +5739,10 @@ impl CodexMessageProcessor {
|
||||
let _ = ctx
|
||||
.mark_archived(thread_id, archived_path.as_path(), Utc::now())
|
||||
.await;
|
||||
let thread_id_str = thread_id.to_string();
|
||||
if let Err(err) = ctx.delete_thread_delivery_state(&thread_id_str).await {
|
||||
warn!("failed to delete delivery state for archived thread {thread_id}: {err}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -20,6 +20,9 @@ use codex_app_server_protocol::TurnStartResponse;
|
||||
use codex_app_server_protocol::UserInput;
|
||||
use codex_core::ARCHIVED_SESSIONS_SUBDIR;
|
||||
use codex_core::find_thread_path_by_id_str;
|
||||
use codex_state::ExternalMessageCreateParams;
|
||||
use codex_state::StateRuntime;
|
||||
use codex_state::ThreadTimerCreateParams;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::path::Path;
|
||||
use tempfile::TempDir;
|
||||
@@ -118,6 +121,26 @@ async fn thread_archive_requires_materialized_rollout() -> Result<()> {
|
||||
.expect("expected rollout path for thread id to exist after materialization");
|
||||
assert_paths_match_on_disk(&discovered_path, &rollout_path)?;
|
||||
|
||||
let state_db = StateRuntime::init(codex_home.path().to_path_buf(), "mock_provider".to_string())
|
||||
.await
|
||||
.expect("initialize state db");
|
||||
state_db
|
||||
.create_external_message(&message_params("message-1", &thread.id))
|
||||
.await
|
||||
.expect("create archived thread message");
|
||||
state_db
|
||||
.create_external_message(&message_params("message-2", "other-thread"))
|
||||
.await
|
||||
.expect("create other thread message");
|
||||
state_db
|
||||
.create_thread_timer(&timer_params("timer-1", &thread.id))
|
||||
.await
|
||||
.expect("create archived thread timer");
|
||||
state_db
|
||||
.create_thread_timer(&timer_params("timer-2", "other-thread"))
|
||||
.await
|
||||
.expect("create other thread timer");
|
||||
|
||||
let archive_id = mcp
|
||||
.send_thread_archive_request(ThreadArchiveParams {
|
||||
thread_id: thread.id.clone(),
|
||||
@@ -156,6 +179,40 @@ async fn thread_archive_requires_materialized_rollout() -> Result<()> {
|
||||
"expected archived rollout path {} to exist",
|
||||
archived_rollout_path.display()
|
||||
);
|
||||
assert_eq!(
|
||||
state_db
|
||||
.list_external_messages(&thread.id)
|
||||
.await
|
||||
.expect("list archived thread messages"),
|
||||
Vec::new()
|
||||
);
|
||||
assert_eq!(
|
||||
state_db
|
||||
.list_thread_timers(&thread.id)
|
||||
.await
|
||||
.expect("list archived thread timers"),
|
||||
Vec::new()
|
||||
);
|
||||
assert_eq!(
|
||||
state_db
|
||||
.list_external_messages("other-thread")
|
||||
.await
|
||||
.expect("list other thread messages")
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["message-2".to_string()]
|
||||
);
|
||||
assert_eq!(
|
||||
state_db
|
||||
.list_thread_timers("other-thread")
|
||||
.await
|
||||
.expect("list other thread timers")
|
||||
.into_iter()
|
||||
.map(|timer| timer.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["timer-2".to_string()]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -323,3 +380,34 @@ fn assert_paths_match_on_disk(actual: &Path, expected: &Path) -> std::io::Result
|
||||
assert_eq!(actual, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn message_params(id: &str, thread_id: &str) -> ExternalMessageCreateParams {
|
||||
ExternalMessageCreateParams {
|
||||
id: id.to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
source: "external".to_string(),
|
||||
content: "do something".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: "after-turn".to_string(),
|
||||
queued_at: 100,
|
||||
}
|
||||
}
|
||||
|
||||
fn timer_params(id: &str, thread_id: &str) -> ThreadTimerCreateParams {
|
||||
ThreadTimerCreateParams {
|
||||
id: id.to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
source: "agent".to_string(),
|
||||
client_id: "codex-tui".to_string(),
|
||||
trigger_json: r#"{"kind":"delay","seconds":10,"repeat":false}"#.to_string(),
|
||||
content: "run tests".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: "after-turn".to_string(),
|
||||
created_at: 100,
|
||||
next_run_at: Some(110),
|
||||
last_run_at: None,
|
||||
pending_run: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,11 +40,14 @@ mod app_cmd;
|
||||
mod desktop_app;
|
||||
mod marketplace_cmd;
|
||||
mod mcp_cmd;
|
||||
mod queue_cmd;
|
||||
#[cfg(not(windows))]
|
||||
mod wsl_paths;
|
||||
|
||||
use crate::marketplace_cmd::MarketplaceCli;
|
||||
use crate::mcp_cmd::McpCli;
|
||||
use crate::queue_cmd::QueueCommand;
|
||||
use crate::queue_cmd::run_queue_command;
|
||||
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
@@ -66,6 +69,8 @@ use codex_terminal_detection::TerminalName;
|
||||
version,
|
||||
// If a sub‑command is given, ignore requirements of the default args.
|
||||
subcommand_negates_reqs = true,
|
||||
// Prefer a recognized subcommand over the default interactive prompt positional.
|
||||
subcommand_precedence_over_arg = true,
|
||||
// The executable is sometimes invoked via a platform‑specific name like
|
||||
// `codex-x86_64-unknown-linux-musl`, but the help output should always use
|
||||
// the generic `codex` command name that users run.
|
||||
@@ -140,6 +145,9 @@ enum Subcommand {
|
||||
/// Resume a previous interactive session (picker by default; use --last to continue the most recent).
|
||||
Resume(ResumeCommand),
|
||||
|
||||
/// Queue a message to an existing thread.
|
||||
Queue(QueueCommand),
|
||||
|
||||
/// Fork a previous interactive session (picker by default; use --last to fork the most recent).
|
||||
Fork(ForkCommand),
|
||||
|
||||
@@ -807,6 +815,14 @@ async fn cli_main(arg0_paths: Arg0DispatchPaths) -> anyhow::Result<()> {
|
||||
.await?;
|
||||
handle_app_exit(exit_info)?;
|
||||
}
|
||||
Some(Subcommand::Queue(queue_cli)) => {
|
||||
reject_remote_mode_for_subcommand(
|
||||
root_remote.as_deref(),
|
||||
root_remote_auth_token_env.as_deref(),
|
||||
"queue",
|
||||
)?;
|
||||
run_queue_command(queue_cli, &root_config_overrides, &interactive).await?;
|
||||
}
|
||||
Some(Subcommand::Fork(ForkCommand {
|
||||
session_id,
|
||||
last,
|
||||
|
||||
301
codex-rs/cli/src/queue_cmd.rs
Normal file
301
codex-rs/cli/src/queue_cmd.rs
Normal file
@@ -0,0 +1,301 @@
|
||||
//! Implementation for the `codex queue` command.
|
||||
//!
|
||||
//! The top-level CLI module owns command routing; this module owns the
|
||||
//! queue-specific policy for resolving target threads and writing immediate
|
||||
//! messages into the SQLite state database.
|
||||
|
||||
use clap::Parser;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::timers::TimerDelivery;
|
||||
use codex_features::Feature;
|
||||
use codex_features::Features;
|
||||
use codex_protocol::ThreadId;
|
||||
use codex_state::StateRuntime;
|
||||
use codex_tui::Cli as TuiCli;
|
||||
use codex_utils_cli::CliConfigOverrides;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub(crate) struct QueueCommand {
|
||||
/// Target thread id.
|
||||
#[arg(long = "thread", value_name = "THREAD_ID")]
|
||||
thread: String,
|
||||
|
||||
/// Message text.
|
||||
#[arg(long = "message", value_name = "TEXT")]
|
||||
message: String,
|
||||
}
|
||||
|
||||
pub(crate) async fn run_queue_command(
|
||||
cmd: QueueCommand,
|
||||
root_config_overrides: &CliConfigOverrides,
|
||||
interactive: &TuiCli,
|
||||
) -> anyhow::Result<()> {
|
||||
let cli_kv_overrides = root_config_overrides
|
||||
.parse_overrides()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let overrides = ConfigOverrides {
|
||||
config_profile: interactive.config_profile.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
let config =
|
||||
Config::load_with_cli_overrides_and_harness_overrides(cli_kv_overrides, overrides).await?;
|
||||
validate_queue_feature_flags(&config.features)?;
|
||||
let thread_id = resolve_queue_thread_id(config.codex_home.as_path(), &cmd.thread).await?;
|
||||
let state_db =
|
||||
StateRuntime::init(config.sqlite_home.clone(), config.model_provider_id.clone()).await?;
|
||||
let delivery = TimerDelivery::AfterTurn;
|
||||
|
||||
let message_params = codex_state::ExternalMessageCreateParams::new(
|
||||
thread_id,
|
||||
"external".to_string(),
|
||||
cmd.message,
|
||||
/*instructions*/ None,
|
||||
"{}".to_string(),
|
||||
delivery.as_str().to_string(),
|
||||
unix_timestamp_now()?,
|
||||
);
|
||||
state_db.create_external_message(&message_params).await?;
|
||||
remove_queued_message_if_thread_missing(
|
||||
config.codex_home.as_path(),
|
||||
&state_db,
|
||||
&message_params.thread_id,
|
||||
&message_params.id,
|
||||
)
|
||||
.await?;
|
||||
println!(
|
||||
"Queued message {} for thread {}.",
|
||||
message_params.id, message_params.thread_id
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_queued_message_if_thread_missing(
|
||||
codex_home: &Path,
|
||||
state_db: &StateRuntime,
|
||||
thread_id: &str,
|
||||
message_id: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
if codex_core::find_thread_path_by_id_str(codex_home, thread_id)
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
state_db
|
||||
.delete_external_message(thread_id, message_id)
|
||||
.await?;
|
||||
anyhow::bail!("thread `{thread_id}` was archived before queued work could be created");
|
||||
}
|
||||
|
||||
fn validate_queue_feature_flags(features: &Features) -> anyhow::Result<()> {
|
||||
if !features.enabled(Feature::QueuedMessages) {
|
||||
anyhow::bail!("codex queue requires the queued_messages feature");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn resolve_queue_thread_id(codex_home: &Path, target: &str) -> anyhow::Result<String> {
|
||||
if let Ok(thread_id) = ThreadId::from_string(target) {
|
||||
if codex_core::find_thread_path_by_id_str(codex_home, &thread_id.to_string())
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
anyhow::bail!("no thread with id `{thread_id}`");
|
||||
}
|
||||
return Ok(thread_id.to_string());
|
||||
}
|
||||
|
||||
let mut active_thread_ids = Vec::new();
|
||||
for thread_id in codex_core::find_thread_ids_by_name(codex_home, target).await? {
|
||||
if codex_core::find_thread_path_by_id_str(codex_home, &thread_id.to_string())
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
active_thread_ids.push(thread_id);
|
||||
}
|
||||
}
|
||||
|
||||
match active_thread_ids.as_slice() {
|
||||
[] => anyhow::bail!("no thread named `{target}`"),
|
||||
[thread_id] => Ok(thread_id.to_string()),
|
||||
_ => anyhow::bail!("more than one thread is named `{target}`; use a thread id instead"),
|
||||
}
|
||||
}
|
||||
|
||||
fn unix_timestamp_now() -> anyhow::Result<i64> {
|
||||
let duration = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.map_err(|err| anyhow::anyhow!("system clock is before unix epoch: {err}"))?;
|
||||
i64::try_from(duration.as_secs()).map_err(|_| anyhow::anyhow!("current time is out of range"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::MultitoolCli;
|
||||
use crate::Subcommand;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn write_test_rollout(codex_home: &Path, thread_id: ThreadId) {
|
||||
let sessions_dir = codex_home
|
||||
.join("sessions")
|
||||
.join("2026")
|
||||
.join("04")
|
||||
.join("10");
|
||||
std::fs::create_dir_all(&sessions_dir).expect("create sessions dir");
|
||||
std::fs::write(
|
||||
sessions_dir.join(format!("rollout-2026-04-10T12-00-00-{thread_id}.jsonl")),
|
||||
"",
|
||||
)
|
||||
.expect("write rollout");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn queue_command_parses_immediate_message() {
|
||||
let cli = MultitoolCli::try_parse_from([
|
||||
"codex",
|
||||
"queue",
|
||||
"--thread",
|
||||
"thread-1",
|
||||
"--message",
|
||||
"do work",
|
||||
])
|
||||
.expect("parse");
|
||||
let Some(Subcommand::Queue(cmd)) = cli.subcommand else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
assert_eq!(cmd.thread, "thread-1");
|
||||
assert_eq!(cmd.message, "do work");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn queue_without_required_args_is_subcommand_error() {
|
||||
let err = MultitoolCli::try_parse_from(["codex", "queue"])
|
||||
.expect_err("queue should be parsed as a subcommand, not as an interactive prompt");
|
||||
assert_eq!(err.kind(), clap::error::ErrorKind::MissingRequiredArgument);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn queue_requires_queued_messages_feature() {
|
||||
let mut features = Features::with_defaults();
|
||||
|
||||
let err = validate_queue_feature_flags(&features)
|
||||
.expect_err("queue should require queued_messages");
|
||||
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"codex queue requires the queued_messages feature"
|
||||
);
|
||||
|
||||
features.enable(Feature::QueuedMessages);
|
||||
validate_queue_feature_flags(&features)
|
||||
.expect("queued messages feature should permit immediate queue command");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn queue_thread_resolves_thread_name() {
|
||||
let temp = TempDir::new().expect("tempdir");
|
||||
let thread_id = ThreadId::new();
|
||||
write_test_rollout(temp.path(), thread_id);
|
||||
codex_core::append_thread_name(temp.path(), thread_id, "named-thread")
|
||||
.await
|
||||
.expect("append thread name");
|
||||
|
||||
assert_eq!(
|
||||
resolve_queue_thread_id(temp.path(), "named-thread")
|
||||
.await
|
||||
.expect("resolve"),
|
||||
thread_id.to_string()
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn queue_thread_id_requires_existing_thread() {
|
||||
let temp = TempDir::new().expect("tempdir");
|
||||
let thread_id = ThreadId::new();
|
||||
write_test_rollout(temp.path(), thread_id);
|
||||
|
||||
assert_eq!(
|
||||
resolve_queue_thread_id(temp.path(), &thread_id.to_string())
|
||||
.await
|
||||
.expect("resolve"),
|
||||
thread_id.to_string()
|
||||
);
|
||||
|
||||
let missing = ThreadId::new();
|
||||
assert_eq!(
|
||||
resolve_queue_thread_id(temp.path(), &missing.to_string())
|
||||
.await
|
||||
.expect_err("missing id should fail")
|
||||
.to_string(),
|
||||
format!("no thread with id `{missing}`")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn queue_thread_name_rejects_missing_and_ambiguous_names() {
|
||||
let temp = TempDir::new().expect("tempdir");
|
||||
let first = ThreadId::new();
|
||||
let second = ThreadId::new();
|
||||
write_test_rollout(temp.path(), first);
|
||||
write_test_rollout(temp.path(), second);
|
||||
codex_core::append_thread_name(temp.path(), first, "same")
|
||||
.await
|
||||
.expect("append first name");
|
||||
codex_core::append_thread_name(temp.path(), second, "same")
|
||||
.await
|
||||
.expect("append second name");
|
||||
|
||||
assert_eq!(
|
||||
resolve_queue_thread_id(temp.path(), "missing")
|
||||
.await
|
||||
.expect_err("missing name should fail")
|
||||
.to_string(),
|
||||
"no thread named `missing`"
|
||||
);
|
||||
assert_eq!(
|
||||
resolve_queue_thread_id(temp.path(), "same")
|
||||
.await
|
||||
.expect_err("ambiguous name should fail")
|
||||
.to_string(),
|
||||
"more than one thread is named `same`; use a thread id instead"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn queue_thread_name_ignores_names_without_rollouts() {
|
||||
let temp = TempDir::new().expect("tempdir");
|
||||
let stale = ThreadId::new();
|
||||
let active = ThreadId::new();
|
||||
write_test_rollout(temp.path(), active);
|
||||
codex_core::append_thread_name(temp.path(), stale, "same")
|
||||
.await
|
||||
.expect("append stale name");
|
||||
codex_core::append_thread_name(temp.path(), stale, "stale")
|
||||
.await
|
||||
.expect("append stale-only name");
|
||||
codex_core::append_thread_name(temp.path(), active, "same")
|
||||
.await
|
||||
.expect("append active name");
|
||||
|
||||
assert_eq!(
|
||||
resolve_queue_thread_id(temp.path(), "same")
|
||||
.await
|
||||
.expect("resolve"),
|
||||
active.to_string()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
resolve_queue_thread_id(temp.path(), "stale")
|
||||
.await
|
||||
.expect_err("stale name should fail")
|
||||
.to_string(),
|
||||
"no thread named `stale`"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -24,6 +24,7 @@ async-trait = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
bm25 = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
chrono-tz = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
codex-analytics = { workspace = true }
|
||||
codex-api = { workspace = true }
|
||||
@@ -87,6 +88,7 @@ once_cell = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
regex-lite = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["json", "stream"] }
|
||||
rrule = { workspace = true }
|
||||
rmcp = { workspace = true, default-features = false, features = [
|
||||
"base64",
|
||||
"macros",
|
||||
|
||||
@@ -434,6 +434,9 @@
|
||||
"prevent_idle_sleep": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"queued_messages": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"realtime_conversation": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -485,6 +488,9 @@
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"timers": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"tool_call_mcp_elicitation": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2283,6 +2289,9 @@
|
||||
"prevent_idle_sleep": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"queued_messages": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"realtime_conversation": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2334,6 +2343,9 @@
|
||||
"steer": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"timers": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"tool_call_mcp_elicitation": {
|
||||
"type": "boolean"
|
||||
},
|
||||
@@ -2804,4 +2816,4 @@
|
||||
},
|
||||
"title": "ConfigToml",
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ use std::fmt::Debug;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
use std::time::SystemTime;
|
||||
use std::time::UNIX_EPOCH;
|
||||
@@ -113,6 +114,7 @@ use codex_protocol::permissions::FileSystemSandboxPolicy;
|
||||
use codex_protocol::permissions::NetworkSandboxPolicy;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::HasLegacyEvent;
|
||||
use codex_protocol::protocol::InjectedMessageEvent;
|
||||
use codex_protocol::protocol::InterAgentCommunication;
|
||||
use codex_protocol::protocol::ItemCompletedEvent;
|
||||
use codex_protocol::protocol::ItemStartedEvent;
|
||||
@@ -201,6 +203,9 @@ use codex_protocol::exec_output::StreamOutput;
|
||||
mod rollout_reconstruction;
|
||||
#[cfg(test)]
|
||||
mod rollout_reconstruction_tests;
|
||||
mod timer_runtime;
|
||||
#[cfg(test)]
|
||||
mod timer_runtime_tests;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum SteerInputError {
|
||||
@@ -283,6 +288,7 @@ use crate::mentions::collect_explicit_app_ids;
|
||||
use crate::mentions::collect_explicit_plugin_mentions;
|
||||
use crate::mentions::collect_tool_mentions_from_messages;
|
||||
use crate::network_policy_decision::execpolicy_network_rule_amendment;
|
||||
use crate::pending_input::PendingInputItem;
|
||||
use crate::plugins::PluginsManager;
|
||||
use crate::plugins::build_plugin_injections;
|
||||
use crate::plugins::render_plugins_section;
|
||||
@@ -306,6 +312,7 @@ use crate::tasks::GhostSnapshotTask;
|
||||
use crate::tasks::ReviewTask;
|
||||
use crate::tasks::SessionTask;
|
||||
use crate::tasks::SessionTaskContext;
|
||||
use crate::timers::TimersState;
|
||||
use crate::tools::ToolRouter;
|
||||
use crate::tools::context::SharedTurnDiffTracker;
|
||||
use crate::tools::js_repl::JsReplHandle;
|
||||
@@ -837,15 +844,27 @@ pub(crate) struct Session {
|
||||
pending_mcp_server_refresh_config: Mutex<Option<McpServerRefreshConfig>>,
|
||||
pub(crate) conversation: Arc<RealtimeConversationManager>,
|
||||
pub(crate) active_turn: Mutex<Option<ActiveTurn>>,
|
||||
/// Prevents concurrent timers from claiming multiple timers before a
|
||||
/// newly started turn becomes the active turn.
|
||||
timer_start_in_progress: Mutex<bool>,
|
||||
timer_db_sync_started: AtomicBool,
|
||||
mailbox: Mailbox,
|
||||
mailbox_rx: Mutex<MailboxReceiver>,
|
||||
idle_pending_input: Mutex<Vec<ResponseInputItem>>, // TODO (jif) merge with mailbox!
|
||||
idle_pending_input: Mutex<Vec<PendingInputItem>>, // TODO (jif) merge with mailbox!
|
||||
timers: Mutex<TimersState>,
|
||||
timer_tasks_cancellation_token: CancellationToken,
|
||||
pub(crate) guardian_review_session: GuardianReviewSessionManager,
|
||||
pub(crate) services: SessionServices,
|
||||
js_repl: Arc<JsReplHandle>,
|
||||
next_internal_sub_id: AtomicU64,
|
||||
}
|
||||
|
||||
impl Drop for Session {
|
||||
fn drop(&mut self) {
|
||||
self.timer_tasks_cancellation_token.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct TurnSkillsContext {
|
||||
pub(crate) outcome: Arc<SkillLoadOutcome>,
|
||||
@@ -2074,9 +2093,13 @@ impl Session {
|
||||
pending_mcp_server_refresh_config: Mutex::new(None),
|
||||
conversation: Arc::new(RealtimeConversationManager::new()),
|
||||
active_turn: Mutex::new(None),
|
||||
timer_start_in_progress: Mutex::new(false),
|
||||
timer_db_sync_started: AtomicBool::new(false),
|
||||
mailbox,
|
||||
mailbox_rx: Mutex::new(mailbox_rx),
|
||||
idle_pending_input: Mutex::new(Vec::new()),
|
||||
timers: Mutex::new(TimersState::default()),
|
||||
timer_tasks_cancellation_token: CancellationToken::new(),
|
||||
guardian_review_session: GuardianReviewSessionManager::default(),
|
||||
services,
|
||||
js_repl,
|
||||
@@ -2209,6 +2232,7 @@ impl Session {
|
||||
let mut state = sess.state.lock().await;
|
||||
state.set_pending_session_start_source(Some(session_start_source));
|
||||
}
|
||||
sess.restore_timers_from_db().await;
|
||||
|
||||
memories::start_memories_startup_task(
|
||||
&sess,
|
||||
@@ -2227,6 +2251,39 @@ impl Session {
|
||||
self.services.state_db.clone()
|
||||
}
|
||||
|
||||
async fn timer_state_db(&self) -> Result<state_db::StateDbHandle, String> {
|
||||
if let Some(state_db) = self.state_db() {
|
||||
return Ok(state_db);
|
||||
}
|
||||
|
||||
let config = {
|
||||
let state = self.state.lock().await;
|
||||
state
|
||||
.session_configuration
|
||||
.original_config_do_not_use
|
||||
.clone()
|
||||
};
|
||||
if config.ephemeral {
|
||||
return Err("timer storage is unavailable for ephemeral sessions".to_string());
|
||||
}
|
||||
match codex_state::StateRuntime::init(
|
||||
config.sqlite_home.clone(),
|
||||
config.model_provider_id.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(runtime) => Ok(runtime),
|
||||
Err(err) => {
|
||||
let message = format!(
|
||||
"failed to initialize SQLite state db for timers at {}: {err}",
|
||||
config.sqlite_home.display()
|
||||
);
|
||||
warn!("{message}");
|
||||
Err(message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Flush rollout writes and return the final durability-barrier result.
|
||||
pub(crate) async fn flush_rollout(&self) -> std::io::Result<()> {
|
||||
let recorder = {
|
||||
@@ -4086,6 +4143,19 @@ impl Session {
|
||||
self.ensure_rollout_materialized().await;
|
||||
}
|
||||
|
||||
pub(crate) async fn record_generated_message_and_emit_display(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
response_item: ResponseItem,
|
||||
injected_event: InjectedMessageEvent,
|
||||
) {
|
||||
self.record_conversation_items(turn_context, std::slice::from_ref(&response_item))
|
||||
.await;
|
||||
self.send_event(turn_context, EventMsg::InjectedMessage(injected_event))
|
||||
.await;
|
||||
self.ensure_rollout_materialized().await;
|
||||
}
|
||||
|
||||
pub(crate) async fn notify_background_event(
|
||||
&self,
|
||||
turn_context: &TurnContext,
|
||||
@@ -4199,7 +4269,7 @@ impl Session {
|
||||
}
|
||||
|
||||
let mut turn_state = active_turn.turn_state.lock().await;
|
||||
turn_state.push_pending_input(input.into());
|
||||
turn_state.push_pending_input(ResponseInputItem::from(input).into());
|
||||
turn_state.accept_mailbox_delivery_for_current_turn();
|
||||
Ok(active_turn_id.clone())
|
||||
}
|
||||
@@ -4214,7 +4284,7 @@ impl Session {
|
||||
Some(at) => {
|
||||
let mut ts = at.turn_state.lock().await;
|
||||
for item in input {
|
||||
ts.push_pending_input(item);
|
||||
ts.push_pending_input(item.into());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -4270,7 +4340,16 @@ impl Session {
|
||||
self.mailbox_rx.lock().await.has_pending_trigger_turn()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub async fn prepend_pending_input(&self, input: Vec<ResponseInputItem>) -> Result<(), ()> {
|
||||
self.prepend_pending_input_items(input.into_iter().map(Into::into).collect())
|
||||
.await
|
||||
}
|
||||
|
||||
pub(crate) async fn prepend_pending_input_items(
|
||||
&self,
|
||||
input: Vec<PendingInputItem>,
|
||||
) -> Result<(), ()> {
|
||||
let mut active = self.active_turn.lock().await;
|
||||
match active.as_mut() {
|
||||
Some(at) => {
|
||||
@@ -4282,7 +4361,16 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub async fn get_pending_input(&self) -> Vec<ResponseInputItem> {
|
||||
self.take_pending_input_items()
|
||||
.await
|
||||
.into_iter()
|
||||
.map(PendingInputItem::into_model_input)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) async fn take_pending_input_items(&self) -> Vec<PendingInputItem> {
|
||||
let (pending_input, accepts_mailbox_delivery) = {
|
||||
let mut active = self.active_turn.lock().await;
|
||||
match active.as_mut() {
|
||||
@@ -4305,6 +4393,7 @@ impl Session {
|
||||
.drain()
|
||||
.into_iter()
|
||||
.map(|mail| mail.to_response_input_item())
|
||||
.map(Into::into)
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
if pending_input.is_empty() {
|
||||
@@ -4326,10 +4415,29 @@ impl Session {
|
||||
}
|
||||
|
||||
let mut idle_pending_input = self.idle_pending_input.lock().await;
|
||||
idle_pending_input.extend(items);
|
||||
idle_pending_input.extend(items.into_iter().map(PendingInputItem::from));
|
||||
}
|
||||
|
||||
pub(crate) async fn take_queued_response_items_for_next_turn(&self) -> Vec<ResponseInputItem> {
|
||||
#[allow(dead_code)]
|
||||
pub(crate) async fn queue_pending_input_for_next_turn(&self, items: Vec<PendingInputItem>) {
|
||||
if items.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut idle_pending_input = self.idle_pending_input.lock().await;
|
||||
for item in items {
|
||||
if let Some(timer_source) = item.timer_source()
|
||||
&& idle_pending_input
|
||||
.iter()
|
||||
.any(|queued| queued.timer_source() == Some(timer_source))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
idle_pending_input.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn take_queued_pending_input_for_next_turn(&self) -> Vec<PendingInputItem> {
|
||||
std::mem::take(&mut *self.idle_pending_input.lock().await)
|
||||
}
|
||||
|
||||
@@ -6248,7 +6356,7 @@ pub(crate) async fn run_turn(
|
||||
// submitted through the UI while the model was running. Though the UI
|
||||
// may support this, the model might not.
|
||||
let pending_input = if can_drain_pending_input {
|
||||
sess.get_pending_input().await
|
||||
sess.take_pending_input_items().await
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
@@ -6269,7 +6377,9 @@ pub(crate) async fn run_turn(
|
||||
} => {
|
||||
let remaining_pending_input = pending_input_iter.collect::<Vec<_>>();
|
||||
if !remaining_pending_input.is_empty() {
|
||||
let _ = sess.prepend_pending_input(remaining_pending_input).await;
|
||||
let _ = sess
|
||||
.prepend_pending_input_items(remaining_pending_input)
|
||||
.await;
|
||||
requeued_pending_input = true;
|
||||
}
|
||||
blocked_pending_input_contexts = additional_contexts;
|
||||
@@ -7234,6 +7344,7 @@ fn realtime_text_for_event(msg: &EventMsg) -> Option<String> {
|
||||
| EventMsg::TurnComplete(_)
|
||||
| EventMsg::TokenCount(_)
|
||||
| EventMsg::UserMessage(_)
|
||||
| EventMsg::InjectedMessage(_)
|
||||
| EventMsg::AgentMessageDelta(_)
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::AgentReasoningDelta(_)
|
||||
|
||||
825
codex-rs/core/src/codex/timer_runtime.rs
Normal file
825
codex-rs/core/src/codex/timer_runtime.rs
Normal file
@@ -0,0 +1,825 @@
|
||||
//! SQLite-backed runtime bridge for thread timers and queued thread messages.
|
||||
//!
|
||||
//! This module connects [`Session`] to the persistent state database, keeps the
|
||||
//! in-memory timer scheduler reconciled with cross-instance changes, and
|
||||
//! converts claimed timers/messages into generated model input plus
|
||||
//! transcript-safe delivery events.
|
||||
//!
|
||||
//! Timer and queued-message delivery must be single-consumer across all harness
|
||||
//! instances for a thread, even though those instances share the same SQLite
|
||||
//! state database. In other words, if two app or CLI processes are attached to
|
||||
//! the same thread, a due timer or queued message should be injected by at most
|
||||
//! one of them.
|
||||
//!
|
||||
//! The database is the authority for that guarantee. Before this module
|
||||
//! delivers a queued message, it calls into the state layer to atomically claim
|
||||
//! and remove the next eligible row. Timers are first selected from local
|
||||
//! memory, but delivery proceeds only if the matching SQLite claim also wins:
|
||||
//! one-shot timers are deleted as part of the claim, and recurring timers are
|
||||
//! updated with the expected previous run timestamp so competing instances
|
||||
//! cannot both observe and persist the same run. If another instance wins the
|
||||
//! database race, this runtime refreshes its local timer view from SQLite and
|
||||
//! skips delivery.
|
||||
//!
|
||||
//! The local `timer_start_in_progress` flag is still useful, but only as an
|
||||
//! in-process guard. It prevents this [`Session`] from starting multiple pending
|
||||
//! timer/message deliveries concurrently; cross-process exclusivity comes from
|
||||
//! the SQLite claim operations above.
|
||||
|
||||
use super::BackgroundEventEvent;
|
||||
use super::Event;
|
||||
use super::EventMsg;
|
||||
use super::INITIAL_SUBMIT_ID;
|
||||
use super::Session;
|
||||
use crate::injected_message::InjectedMessage;
|
||||
use crate::injected_message::MessagePayload;
|
||||
use crate::pending_input::PendingInputItem;
|
||||
use crate::timers::ClaimedTimer;
|
||||
use crate::timers::CreateTimer;
|
||||
use crate::timers::MAX_ACTIVE_TIMERS_PER_THREAD;
|
||||
use crate::timers::PersistedTimer;
|
||||
use crate::timers::RecurringTimerPolicy;
|
||||
use crate::timers::RestoredTimerTask;
|
||||
use crate::timers::TIMER_FIRED_BACKGROUND_EVENT_PREFIX;
|
||||
use crate::timers::TIMER_UPDATED_BACKGROUND_EVENT_PREFIX;
|
||||
use crate::timers::ThreadTimer;
|
||||
use crate::timers::ThreadTimerTrigger;
|
||||
use crate::timers::TimerDelivery;
|
||||
use crate::timers::TimerTaskSpec;
|
||||
use crate::timers::TimersState;
|
||||
use crate::timers::timer_injected_message;
|
||||
use chrono::Utc;
|
||||
use codex_features::Feature;
|
||||
use codex_rollout::state_db;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Duration;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::warn;
|
||||
|
||||
const TIMER_SOURCE_AGENT: &str = "agent";
|
||||
const TIMER_CLIENT_ID_FALLBACK: &str = "codex-cli";
|
||||
const TIMER_DB_SYNC_INTERVAL: Duration = Duration::from_secs(15);
|
||||
const TIMER_DB_MAX_REFRESH_INTERVAL: Duration = Duration::from_secs(60);
|
||||
const TIMER_DUE_PERSIST_RETRY_DELAY: Duration = Duration::from_secs(1);
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
enum TimerDbSyncStatus {
|
||||
Failed,
|
||||
Unchanged,
|
||||
Changed,
|
||||
}
|
||||
|
||||
enum PendingMessageStart {
|
||||
Started,
|
||||
NotReady,
|
||||
None,
|
||||
}
|
||||
|
||||
enum PendingMessageClaim {
|
||||
Claimed(Box<PendingInputItem>, TimerDelivery),
|
||||
NotReady,
|
||||
}
|
||||
|
||||
fn db_timer_to_persisted_timer(row: codex_state::ThreadTimer) -> Option<PersistedTimer> {
|
||||
let trigger = match serde_json::from_str(&row.trigger_json) {
|
||||
Ok(trigger) => trigger,
|
||||
Err(err) => {
|
||||
warn!("skipping invalid persisted timer {} trigger: {err}", row.id);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
let delivery =
|
||||
match serde_json::from_value::<TimerDelivery>(serde_json::Value::String(row.delivery)) {
|
||||
Ok(delivery) => delivery,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"skipping invalid persisted timer {} delivery: {err}",
|
||||
row.id
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
let meta = match serde_json::from_str(&row.meta_json) {
|
||||
Ok(meta) => meta,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"skipping invalid persisted timer {} metadata: {err}",
|
||||
row.id
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
Some(PersistedTimer {
|
||||
timer: ThreadTimer {
|
||||
id: row.id,
|
||||
trigger,
|
||||
content: row.content,
|
||||
instructions: row.instructions,
|
||||
meta,
|
||||
delivery,
|
||||
created_at: row.created_at,
|
||||
next_run_at: row.next_run_at,
|
||||
last_run_at: row.last_run_at,
|
||||
},
|
||||
pending_run: row.pending_run,
|
||||
})
|
||||
}
|
||||
|
||||
impl Session {
|
||||
pub(crate) async fn list_timers(self: &Arc<Self>) -> Vec<ThreadTimer> {
|
||||
if !self.timers_feature_enabled() {
|
||||
return Vec::new();
|
||||
}
|
||||
self.sync_timers_from_db(/*emit_update*/ false).await;
|
||||
self.list_timers_from_memory().await
|
||||
}
|
||||
|
||||
async fn list_timers_from_memory(&self) -> Vec<ThreadTimer> {
|
||||
self.timers.lock().await.list_timers()
|
||||
}
|
||||
|
||||
pub(crate) async fn create_timer(
|
||||
self: &Arc<Self>,
|
||||
trigger: ThreadTimerTrigger,
|
||||
payload: MessagePayload,
|
||||
delivery: TimerDelivery,
|
||||
) -> Result<ThreadTimer, String> {
|
||||
if !self.timers_feature_enabled() {
|
||||
return Err("timers feature is disabled".to_string());
|
||||
}
|
||||
self.ensure_rollout_materialized().await;
|
||||
let state_db = self.timer_state_db().await?;
|
||||
self.start_timer_db_sync_task(state_db.clone());
|
||||
|
||||
let timer_cancel = CancellationToken::new();
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
let (timer, persisted_timer, timer_spec) = {
|
||||
let mut timers = self.timers.lock().await;
|
||||
let (timer, timer_spec) = timers.create_timer(
|
||||
CreateTimer {
|
||||
id: id.clone(),
|
||||
trigger,
|
||||
payload,
|
||||
delivery,
|
||||
now: Utc::now(),
|
||||
},
|
||||
Some(timer_cancel.clone()),
|
||||
)?;
|
||||
let persisted_timer = timers
|
||||
.persisted_timer(&id)
|
||||
.ok_or_else(|| format!("created timer {id} was not stored in memory"))?;
|
||||
(timer, persisted_timer, timer_spec)
|
||||
};
|
||||
let params = self
|
||||
.thread_timer_create_params(&persisted_timer, TIMER_SOURCE_AGENT)
|
||||
.await?;
|
||||
match state_db
|
||||
.create_thread_timer_if_below_limit(¶ms, MAX_ACTIVE_TIMERS_PER_THREAD)
|
||||
.await
|
||||
{
|
||||
Ok(true) => {}
|
||||
Ok(false) => {
|
||||
if let Some(runtime) = self.timers.lock().await.remove_timer(&id) {
|
||||
TimersState::cancel_runtime(&runtime);
|
||||
}
|
||||
return Err(format!(
|
||||
"too many active timers; each thread supports at most {MAX_ACTIVE_TIMERS_PER_THREAD} timers"
|
||||
));
|
||||
}
|
||||
Err(err) => {
|
||||
if let Some(runtime) = self.timers.lock().await.remove_timer(&id) {
|
||||
TimersState::cancel_runtime(&runtime);
|
||||
}
|
||||
return Err(format!("failed to persist timer to sqlite: {err}"));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(timer_spec) = timer_spec {
|
||||
self.spawn_timer_task(id, timer_spec, timer_cancel);
|
||||
}
|
||||
self.emit_timer_updated_notification().await;
|
||||
self.maybe_start_pending_timer().await;
|
||||
Ok(timer)
|
||||
}
|
||||
|
||||
pub(crate) async fn delete_timer(self: &Arc<Self>, id: &str) -> Result<bool, String> {
|
||||
if !self.timers_feature_enabled() {
|
||||
return Err("timers feature is disabled".to_string());
|
||||
}
|
||||
self.sync_timers_from_db(/*emit_update*/ false).await;
|
||||
let state_db = self.timer_state_db().await?;
|
||||
self.start_timer_db_sync_task(state_db.clone());
|
||||
|
||||
let deleted = match state_db
|
||||
.delete_thread_timer(&self.thread_id_string(), id)
|
||||
.await
|
||||
{
|
||||
Ok(deleted) => deleted,
|
||||
Err(err) => return Err(format!("failed to delete timer from sqlite: {err}")),
|
||||
};
|
||||
let runtime = self.timers.lock().await.remove_timer(id);
|
||||
let Some(runtime) = runtime else {
|
||||
return Ok(deleted);
|
||||
};
|
||||
TimersState::cancel_runtime(&runtime);
|
||||
self.emit_timer_updated_notification().await;
|
||||
Ok(deleted)
|
||||
}
|
||||
|
||||
pub(crate) async fn maybe_start_pending_timer(self: &Arc<Self>) {
|
||||
if self
|
||||
.try_start_pending_timer(RecurringTimerPolicy::IncludeOnlyNeverRun)
|
||||
.await
|
||||
{
|
||||
return;
|
||||
}
|
||||
match self.maybe_start_pending_message().await {
|
||||
PendingMessageStart::Started => return,
|
||||
PendingMessageStart::NotReady | PendingMessageStart::None => {}
|
||||
}
|
||||
self.try_start_pending_timer(RecurringTimerPolicy::IncludeAll)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn try_start_pending_timer(
|
||||
self: &Arc<Self>,
|
||||
recurring_timer_policy: RecurringTimerPolicy,
|
||||
) -> bool {
|
||||
let Some(ClaimedTimer {
|
||||
timer,
|
||||
context,
|
||||
deleted_one_shot_timer,
|
||||
..
|
||||
}) = self
|
||||
.claim_next_timer_for_delivery(recurring_timer_policy)
|
||||
.await
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
self.emit_timer_fired_notification(&timer).await;
|
||||
if deleted_one_shot_timer {
|
||||
self.emit_timer_updated_notification().await;
|
||||
}
|
||||
let message = timer_injected_message(&context);
|
||||
let input_item = PendingInputItem::injected(message.prompt_input_item(), message.event());
|
||||
match context.delivery {
|
||||
TimerDelivery::SteerCurrentTurn => {
|
||||
if !self.inject_timer_into_active_turn(input_item.clone()).await {
|
||||
self.queue_pending_input_for_next_turn(vec![input_item])
|
||||
.await;
|
||||
self.maybe_start_turn_for_pending_work().await;
|
||||
}
|
||||
}
|
||||
TimerDelivery::AfterTurn => {
|
||||
self.queue_pending_input_for_next_turn(vec![input_item])
|
||||
.await;
|
||||
self.maybe_start_turn_for_pending_work().await;
|
||||
}
|
||||
}
|
||||
*self.timer_start_in_progress.lock().await = false;
|
||||
true
|
||||
}
|
||||
|
||||
async fn maybe_start_pending_message(self: &Arc<Self>) -> PendingMessageStart {
|
||||
let Some(claim) = self.claim_next_message_for_delivery().await else {
|
||||
return PendingMessageStart::None;
|
||||
};
|
||||
let PendingMessageClaim::Claimed(input_item, delivery) = claim else {
|
||||
return PendingMessageStart::NotReady;
|
||||
};
|
||||
let input_item = *input_item;
|
||||
|
||||
match delivery {
|
||||
TimerDelivery::SteerCurrentTurn => {
|
||||
if !self
|
||||
.inject_message_into_active_turn(input_item.clone())
|
||||
.await
|
||||
{
|
||||
self.queue_pending_input_for_next_turn(vec![input_item])
|
||||
.await;
|
||||
self.maybe_start_turn_for_pending_work().await;
|
||||
}
|
||||
}
|
||||
TimerDelivery::AfterTurn => {
|
||||
self.queue_pending_input_for_next_turn(vec![input_item])
|
||||
.await;
|
||||
self.maybe_start_turn_for_pending_work().await;
|
||||
}
|
||||
}
|
||||
*self.timer_start_in_progress.lock().await = false;
|
||||
PendingMessageStart::Started
|
||||
}
|
||||
|
||||
async fn claim_next_message_for_delivery(self: &Arc<Self>) -> Option<PendingMessageClaim> {
|
||||
if !self.queued_messages_feature_enabled() {
|
||||
return None;
|
||||
}
|
||||
let mut timer_start_in_progress = self.timer_start_in_progress.lock().await;
|
||||
if *timer_start_in_progress {
|
||||
return None;
|
||||
}
|
||||
*timer_start_in_progress = true;
|
||||
drop(timer_start_in_progress);
|
||||
|
||||
let has_pending_turn_inputs = self.has_queued_response_items_for_next_turn().await
|
||||
|| self.has_trigger_turn_mailbox_items().await;
|
||||
let (has_active_turn, active_turn_is_regular) = {
|
||||
let active_turn = self.active_turn.lock().await;
|
||||
let has_active_turn = active_turn.is_some();
|
||||
let active_turn_is_regular = active_turn
|
||||
.as_ref()
|
||||
.and_then(|turn| turn.tasks.first())
|
||||
.is_some_and(|(_, task)| matches!(task.kind, crate::state::TaskKind::Regular));
|
||||
(has_active_turn, active_turn_is_regular)
|
||||
};
|
||||
let can_after_turn = !has_active_turn && !has_pending_turn_inputs;
|
||||
let can_steer_current_turn = active_turn_is_regular;
|
||||
let state_db = match self.timer_state_db().await {
|
||||
Ok(state_db) => state_db,
|
||||
Err(err) => {
|
||||
warn!("failed to claim queued message from sqlite: {err}");
|
||||
*self.timer_start_in_progress.lock().await = false;
|
||||
return None;
|
||||
}
|
||||
};
|
||||
self.start_timer_db_sync_task(state_db.clone());
|
||||
|
||||
loop {
|
||||
let claim = match state_db
|
||||
.claim_next_external_message(
|
||||
&self.thread_id_string(),
|
||||
can_after_turn,
|
||||
can_steer_current_turn,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(claim) => claim,
|
||||
Err(err) => {
|
||||
warn!("failed to claim queued message from sqlite: {err}");
|
||||
*self.timer_start_in_progress.lock().await = false;
|
||||
return None;
|
||||
}
|
||||
};
|
||||
match claim {
|
||||
Some(codex_state::ExternalMessageClaim::Claimed(row)) => {
|
||||
let (message, delivery) = match InjectedMessage::from_external_row(row) {
|
||||
Ok(parsed) => parsed,
|
||||
Err(err) => {
|
||||
warn!("{err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let input_item =
|
||||
PendingInputItem::injected(message.prompt_input_item(), message.event());
|
||||
return Some(PendingMessageClaim::Claimed(Box::new(input_item), delivery));
|
||||
}
|
||||
Some(codex_state::ExternalMessageClaim::Invalid { id, reason }) => {
|
||||
warn!("dropped invalid queued message {id}: {reason}");
|
||||
continue;
|
||||
}
|
||||
Some(codex_state::ExternalMessageClaim::NotReady) | None => {
|
||||
*self.timer_start_in_progress.lock().await = false;
|
||||
return claim.map(|_| PendingMessageClaim::NotReady);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn claim_next_timer_for_delivery(
|
||||
self: &Arc<Self>,
|
||||
recurring_timer_policy: RecurringTimerPolicy,
|
||||
) -> Option<ClaimedTimer> {
|
||||
if !self.timers_feature_enabled() {
|
||||
return None;
|
||||
}
|
||||
let mut timer_start_in_progress = self.timer_start_in_progress.lock().await;
|
||||
if *timer_start_in_progress {
|
||||
return None;
|
||||
}
|
||||
*timer_start_in_progress = true;
|
||||
drop(timer_start_in_progress);
|
||||
|
||||
let has_pending_turn_inputs = self.has_queued_response_items_for_next_turn().await
|
||||
|| self.has_trigger_turn_mailbox_items().await;
|
||||
|
||||
let (has_active_turn, active_turn_is_regular) = {
|
||||
let active_turn = self.active_turn.lock().await;
|
||||
let has_active_turn = active_turn.is_some();
|
||||
let active_turn_is_regular = active_turn
|
||||
.as_ref()
|
||||
.and_then(|turn| turn.tasks.first())
|
||||
.is_some_and(|(_, task)| matches!(task.kind, crate::state::TaskKind::Regular));
|
||||
(has_active_turn, active_turn_is_regular)
|
||||
};
|
||||
let can_after_turn = !has_active_turn && !has_pending_turn_inputs;
|
||||
let claimed = self.timers.lock().await.claim_next_timer(
|
||||
Utc::now(),
|
||||
can_after_turn,
|
||||
active_turn_is_regular,
|
||||
recurring_timer_policy,
|
||||
);
|
||||
let Some(claimed) = claimed else {
|
||||
*self.timer_start_in_progress.lock().await = false;
|
||||
return None;
|
||||
};
|
||||
|
||||
if !self.try_claim_timer_in_db(&claimed).await {
|
||||
self.sync_timers_from_db(/*emit_update*/ true).await;
|
||||
*self.timer_start_in_progress.lock().await = false;
|
||||
return None;
|
||||
}
|
||||
Some(claimed)
|
||||
}
|
||||
|
||||
async fn inject_timer_into_active_turn(&self, item: PendingInputItem) -> bool {
|
||||
self.inject_message_into_active_turn(item).await
|
||||
}
|
||||
|
||||
async fn inject_message_into_active_turn(&self, item: PendingInputItem) -> bool {
|
||||
let active = self.active_turn.lock().await;
|
||||
let Some(active_turn) = active.as_ref() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
match active_turn.tasks.first().map(|(_, task)| task.kind) {
|
||||
Some(crate::state::TaskKind::Regular) => {
|
||||
let mut turn_state = active_turn.turn_state.lock().await;
|
||||
turn_state.push_pending_input(item);
|
||||
true
|
||||
}
|
||||
Some(crate::state::TaskKind::Review | crate::state::TaskKind::Compact) | None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_timer_task(
|
||||
self: &Arc<Self>,
|
||||
id: String,
|
||||
timer_spec: TimerTaskSpec,
|
||||
cancellation_token: CancellationToken,
|
||||
) {
|
||||
let weak = Arc::downgrade(self);
|
||||
let session_cancel = self.timer_tasks_cancellation_token.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut delay = timer_spec.delay;
|
||||
let mut due_persist_retry_at = None;
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = session_cancel.cancelled() => break,
|
||||
_ = cancellation_token.cancelled() => break,
|
||||
_ = tokio::time::sleep(delay) => {}
|
||||
}
|
||||
let Some(session) = weak.upgrade() else {
|
||||
break;
|
||||
};
|
||||
let due_at = Utc::now();
|
||||
let (changed, due_at_timestamp) = match due_persist_retry_at.take() {
|
||||
Some(due_at_timestamp) => (true, due_at_timestamp),
|
||||
None => (
|
||||
session.timers.lock().await.mark_timer_due(&id, due_at),
|
||||
due_at.timestamp(),
|
||||
),
|
||||
};
|
||||
if changed
|
||||
&& !session
|
||||
.persist_timer_due_best_effort(&id, due_at_timestamp)
|
||||
.await
|
||||
{
|
||||
let sync_status = session.sync_timers_from_db(/*emit_update*/ true).await;
|
||||
if matches!(sync_status, TimerDbSyncStatus::Failed)
|
||||
&& session.timers.lock().await.persisted_timer(&id).is_some()
|
||||
{
|
||||
due_persist_retry_at = Some(due_at_timestamp);
|
||||
delay = TIMER_DUE_PERSIST_RETRY_DELAY;
|
||||
continue;
|
||||
}
|
||||
if !matches!(sync_status, TimerDbSyncStatus::Failed) {
|
||||
session.maybe_start_pending_timer().await;
|
||||
}
|
||||
let next_timer_spec = session
|
||||
.timers
|
||||
.lock()
|
||||
.await
|
||||
.timer_spec_for_timer(&id, Utc::now());
|
||||
let Some(next_timer_spec) = next_timer_spec else {
|
||||
break;
|
||||
};
|
||||
delay = next_timer_spec.delay;
|
||||
continue;
|
||||
}
|
||||
session.maybe_start_pending_timer().await;
|
||||
let next_timer_spec = session
|
||||
.timers
|
||||
.lock()
|
||||
.await
|
||||
.timer_spec_for_timer(&id, Utc::now());
|
||||
let Some(next_timer_spec) = next_timer_spec else {
|
||||
break;
|
||||
};
|
||||
delay = next_timer_spec.delay;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn thread_timer_create_params(
|
||||
&self,
|
||||
persisted_timer: &PersistedTimer,
|
||||
source: &str,
|
||||
) -> Result<codex_state::ThreadTimerCreateParams, String> {
|
||||
let timer = &persisted_timer.timer;
|
||||
Ok(codex_state::ThreadTimerCreateParams {
|
||||
id: timer.id.clone(),
|
||||
thread_id: self.thread_id_string(),
|
||||
source: source.to_string(),
|
||||
client_id: self.timer_client_id().await,
|
||||
trigger_json: serde_json::to_string(&timer.trigger)
|
||||
.map_err(|err| format!("failed to serialize timer trigger: {err}"))?,
|
||||
content: timer.content.clone(),
|
||||
instructions: timer.instructions.clone(),
|
||||
meta_json: serde_json::to_string(&timer.meta)
|
||||
.map_err(|err| format!("failed to serialize timer metadata: {err}"))?,
|
||||
delivery: timer.delivery.as_str().to_string(),
|
||||
created_at: timer.created_at,
|
||||
next_run_at: timer.next_run_at,
|
||||
last_run_at: timer.last_run_at,
|
||||
pending_run: persisted_timer.pending_run,
|
||||
})
|
||||
}
|
||||
|
||||
fn thread_timer_update_params(
|
||||
&self,
|
||||
persisted_timer: &PersistedTimer,
|
||||
) -> Result<codex_state::ThreadTimerUpdateParams, String> {
|
||||
let timer = &persisted_timer.timer;
|
||||
Ok(codex_state::ThreadTimerUpdateParams {
|
||||
trigger_json: serde_json::to_string(&timer.trigger)
|
||||
.map_err(|err| format!("failed to serialize timer trigger: {err}"))?,
|
||||
content: timer.content.clone(),
|
||||
instructions: timer.instructions.clone(),
|
||||
meta_json: serde_json::to_string(&timer.meta)
|
||||
.map_err(|err| format!("failed to serialize timer metadata: {err}"))?,
|
||||
delivery: timer.delivery.as_str().to_string(),
|
||||
next_run_at: timer.next_run_at,
|
||||
last_run_at: timer.last_run_at,
|
||||
pending_run: persisted_timer.pending_run,
|
||||
})
|
||||
}
|
||||
|
||||
async fn persist_timer_due_best_effort(&self, id: &str, due_at: i64) -> bool {
|
||||
let state_db = match self.timer_state_db().await {
|
||||
Ok(state_db) => state_db,
|
||||
Err(err) => {
|
||||
warn!("failed to persist due timer {id}: {err}");
|
||||
return false;
|
||||
}
|
||||
};
|
||||
let Some(persisted_timer) = self.timers.lock().await.persisted_timer(id) else {
|
||||
return false;
|
||||
};
|
||||
match state_db
|
||||
.update_thread_timer_due(
|
||||
&self.thread_id_string(),
|
||||
id,
|
||||
due_at,
|
||||
persisted_timer.timer.next_run_at,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(updated) => updated,
|
||||
Err(err) => {
|
||||
warn!("failed to persist due timer {id}: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn try_claim_timer_in_db(&self, claimed: &ClaimedTimer) -> bool {
|
||||
let state_db = match self.timer_state_db().await {
|
||||
Ok(state_db) => state_db,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to claim timer {} in sqlite: {err}",
|
||||
claimed.timer.id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
let thread_id = self.thread_id_string();
|
||||
let result = if claimed.deleted_one_shot_timer {
|
||||
state_db
|
||||
.claim_one_shot_thread_timer(
|
||||
&thread_id,
|
||||
&claimed.timer.id,
|
||||
claimed.context.queued_at,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
let persisted_timer = PersistedTimer {
|
||||
timer: claimed.timer.clone(),
|
||||
pending_run: claimed.timer.trigger.is_idle_recurring(),
|
||||
};
|
||||
let Ok(params) = self.thread_timer_update_params(&persisted_timer) else {
|
||||
return false;
|
||||
};
|
||||
state_db
|
||||
.claim_recurring_thread_timer(
|
||||
&thread_id,
|
||||
&claimed.timer.id,
|
||||
claimed.context.queued_at,
|
||||
claimed.previous_last_run_at,
|
||||
¶ms,
|
||||
)
|
||||
.await
|
||||
};
|
||||
match result {
|
||||
Ok(claimed) => claimed,
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"failed to claim timer {} in sqlite: {err}",
|
||||
claimed.timer.id
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn restore_timers_from_db(self: &Arc<Self>) {
|
||||
if !self.timer_db_sync_feature_enabled() {
|
||||
return;
|
||||
}
|
||||
let Ok(state_db) = self.timer_state_db().await else {
|
||||
return;
|
||||
};
|
||||
self.start_timer_db_sync_task(state_db);
|
||||
if self.timers_feature_enabled() {
|
||||
self.sync_timers_from_db(/*emit_update*/ true).await;
|
||||
}
|
||||
self.maybe_start_pending_timer().await;
|
||||
}
|
||||
|
||||
fn start_timer_db_sync_task(self: &Arc<Self>, state_db: state_db::StateDbHandle) {
|
||||
if !self.timer_db_sync_feature_enabled() {
|
||||
return;
|
||||
}
|
||||
if self
|
||||
.timer_db_sync_started
|
||||
.swap(/*val*/ true, Ordering::SeqCst)
|
||||
{
|
||||
return;
|
||||
}
|
||||
let weak = Arc::downgrade(self);
|
||||
let session_cancel = self.timer_tasks_cancellation_token.clone();
|
||||
tokio::spawn(async move {
|
||||
let checker = match state_db.timer_data_version_checker().await {
|
||||
Ok(checker) => checker,
|
||||
Err(err) => {
|
||||
warn!("failed to start timer db sync: {err}");
|
||||
if let Some(session) = weak.upgrade() {
|
||||
session.timer_db_sync_started.store(false, Ordering::SeqCst);
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
let mut last_data_version = checker.data_version().await.ok();
|
||||
let mut last_full_refresh = tokio::time::Instant::now();
|
||||
let mut interval = tokio::time::interval(TIMER_DB_SYNC_INTERVAL);
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
if let Some(session) = weak.upgrade() {
|
||||
session.sync_timers_from_db(/*emit_update*/ true).await;
|
||||
session.maybe_start_pending_timer().await;
|
||||
last_full_refresh = tokio::time::Instant::now();
|
||||
}
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = session_cancel.cancelled() => break,
|
||||
_ = interval.tick() => {}
|
||||
}
|
||||
|
||||
let current_data_version = match checker.data_version().await {
|
||||
Ok(version) => Some(version),
|
||||
Err(err) => {
|
||||
warn!("failed to poll timer db data_version: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
let version_changed =
|
||||
current_data_version.is_some() && current_data_version != last_data_version;
|
||||
let max_refresh_elapsed =
|
||||
last_full_refresh.elapsed() >= TIMER_DB_MAX_REFRESH_INTERVAL;
|
||||
if !version_changed && !max_refresh_elapsed {
|
||||
continue;
|
||||
}
|
||||
last_data_version = current_data_version.or(last_data_version);
|
||||
let Some(session) = weak.upgrade() else {
|
||||
break;
|
||||
};
|
||||
session.sync_timers_from_db(/*emit_update*/ true).await;
|
||||
session.maybe_start_pending_timer().await;
|
||||
last_full_refresh = tokio::time::Instant::now();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn sync_timers_from_db(self: &Arc<Self>, emit_update: bool) -> TimerDbSyncStatus {
|
||||
if !self.timers_feature_enabled() {
|
||||
return TimerDbSyncStatus::Failed;
|
||||
}
|
||||
let Ok(state_db) = self.timer_state_db().await else {
|
||||
return TimerDbSyncStatus::Failed;
|
||||
};
|
||||
self.start_timer_db_sync_task(state_db.clone());
|
||||
let thread_id = self.thread_id_string();
|
||||
let db_timers = match state_db.list_thread_timers(&thread_id).await {
|
||||
Ok(timers) => timers,
|
||||
Err(err) => {
|
||||
warn!("failed to load timers from sqlite for thread {thread_id}: {err}");
|
||||
return TimerDbSyncStatus::Failed;
|
||||
}
|
||||
};
|
||||
let persisted = db_timers
|
||||
.into_iter()
|
||||
.filter_map(db_timer_to_persisted_timer)
|
||||
.collect::<Vec<_>>();
|
||||
let (changed, restored_tasks) = self
|
||||
.timers
|
||||
.lock()
|
||||
.await
|
||||
.replace_timers_if_changed(persisted, Utc::now());
|
||||
self.spawn_restored_timer_tasks(restored_tasks);
|
||||
if changed && emit_update {
|
||||
self.emit_timer_updated_notification().await;
|
||||
}
|
||||
if changed {
|
||||
TimerDbSyncStatus::Changed
|
||||
} else {
|
||||
TimerDbSyncStatus::Unchanged
|
||||
}
|
||||
}
|
||||
|
||||
fn timers_feature_enabled(&self) -> bool {
|
||||
self.features.enabled(Feature::Timers)
|
||||
}
|
||||
|
||||
fn queued_messages_feature_enabled(&self) -> bool {
|
||||
self.features.enabled(Feature::QueuedMessages)
|
||||
}
|
||||
|
||||
fn timer_db_sync_feature_enabled(&self) -> bool {
|
||||
self.timers_feature_enabled() || self.queued_messages_feature_enabled()
|
||||
}
|
||||
|
||||
fn spawn_restored_timer_tasks(self: &Arc<Self>, restored_tasks: Vec<RestoredTimerTask>) {
|
||||
for RestoredTimerTask {
|
||||
id,
|
||||
timer_spec,
|
||||
timer_cancel,
|
||||
} in restored_tasks
|
||||
{
|
||||
self.spawn_timer_task(id, timer_spec, timer_cancel);
|
||||
}
|
||||
}
|
||||
|
||||
fn thread_id_string(&self) -> String {
|
||||
self.conversation_id.to_string()
|
||||
}
|
||||
|
||||
async fn timer_client_id(&self) -> String {
|
||||
let state = self.state.lock().await;
|
||||
state
|
||||
.session_configuration
|
||||
.app_server_client_name
|
||||
.clone()
|
||||
.unwrap_or_else(|| TIMER_CLIENT_ID_FALLBACK.to_string())
|
||||
}
|
||||
|
||||
async fn emit_timer_updated_notification(&self) {
|
||||
let timers = self.list_timers_from_memory().await;
|
||||
let Ok(payload) = serde_json::to_string(&timers) else {
|
||||
warn!("failed to serialize timer update payload");
|
||||
return;
|
||||
};
|
||||
self.send_event_raw(Event {
|
||||
id: INITIAL_SUBMIT_ID.to_owned(),
|
||||
msg: EventMsg::BackgroundEvent(BackgroundEventEvent {
|
||||
message: format!("{TIMER_UPDATED_BACKGROUND_EVENT_PREFIX}{payload}"),
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn emit_timer_fired_notification(&self, timer: &ThreadTimer) {
|
||||
let Ok(payload) = serde_json::to_string(timer) else {
|
||||
warn!("failed to serialize timer fired payload");
|
||||
return;
|
||||
};
|
||||
self.send_event_raw(Event {
|
||||
id: INITIAL_SUBMIT_ID.to_owned(),
|
||||
msg: EventMsg::BackgroundEvent(BackgroundEventEvent {
|
||||
message: format!("{TIMER_FIRED_BACKGROUND_EVENT_PREFIX}{payload}"),
|
||||
}),
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
190
codex-rs/core/src/codex/timer_runtime_tests.rs
Normal file
190
codex-rs/core/src/codex/timer_runtime_tests.rs
Normal file
@@ -0,0 +1,190 @@
|
||||
use super::make_session_and_context_with_rx;
|
||||
use crate::injected_message::MessagePayload;
|
||||
use crate::timers::MAX_ACTIVE_TIMERS_PER_THREAD;
|
||||
use crate::timers::ThreadTimerTrigger;
|
||||
use crate::timers::TimerDelivery;
|
||||
use codex_features::Feature;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[tokio::test]
|
||||
async fn dropping_session_cancels_timer_tasks() {
|
||||
let (session, _, _) = make_session_and_context_with_rx().await;
|
||||
let cancel_token = session.timer_tasks_cancellation_token.clone();
|
||||
|
||||
drop(session);
|
||||
|
||||
assert!(cancel_token.is_cancelled());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn maybe_start_pending_timer_claims_only_one_timer_while_start_is_in_progress() {
|
||||
let (mut session, _, _) = make_session_and_context_with_rx().await;
|
||||
Arc::get_mut(&mut session)
|
||||
.expect("session should have no other references")
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.expect("test config should allow feature update");
|
||||
let config = {
|
||||
let state = session.state.lock().await;
|
||||
state
|
||||
.session_configuration
|
||||
.original_config_do_not_use
|
||||
.clone()
|
||||
};
|
||||
let state_db = codex_state::StateRuntime::init(
|
||||
config.sqlite_home.clone(),
|
||||
config.model_provider_id.clone(),
|
||||
)
|
||||
.await
|
||||
.expect("state db should open");
|
||||
let now = chrono::Utc::now();
|
||||
let trigger = ThreadTimerTrigger::Delay {
|
||||
seconds: 10,
|
||||
repeat: Some(true),
|
||||
};
|
||||
{
|
||||
let mut timers = session.timers.lock().await;
|
||||
for (id, content) in [("timer-1", "first"), ("timer-2", "second")] {
|
||||
timers
|
||||
.create_timer(
|
||||
crate::timers::CreateTimer {
|
||||
id: id.to_string(),
|
||||
trigger: trigger.clone(),
|
||||
payload: MessagePayload {
|
||||
content: content.to_string(),
|
||||
instructions: None,
|
||||
meta: Default::default(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
timers.mark_timer_due(id, now);
|
||||
}
|
||||
}
|
||||
for timer_id in ["timer-1", "timer-2"] {
|
||||
let persisted_timer = session
|
||||
.timers
|
||||
.lock()
|
||||
.await
|
||||
.persisted_timer(timer_id)
|
||||
.expect("timer should be in memory");
|
||||
let timer = persisted_timer.timer;
|
||||
state_db
|
||||
.create_thread_timer(&codex_state::ThreadTimerCreateParams {
|
||||
id: timer.id,
|
||||
thread_id: session.conversation_id.to_string(),
|
||||
source: "agent".to_string(),
|
||||
client_id: "codex-cli".to_string(),
|
||||
trigger_json: serde_json::to_string(&timer.trigger)
|
||||
.expect("trigger should serialize"),
|
||||
content: timer.content,
|
||||
instructions: timer.instructions,
|
||||
meta_json: serde_json::to_string(&timer.meta).expect("metadata should serialize"),
|
||||
delivery: timer.delivery.as_str().to_string(),
|
||||
created_at: timer.created_at,
|
||||
next_run_at: timer.next_run_at,
|
||||
last_run_at: timer.last_run_at,
|
||||
pending_run: persisted_timer.pending_run,
|
||||
})
|
||||
.await
|
||||
.expect("timer should be persisted");
|
||||
}
|
||||
|
||||
let first = Arc::clone(&session);
|
||||
let second = Arc::clone(&session);
|
||||
tokio::join!(
|
||||
first.maybe_start_pending_timer(),
|
||||
second.maybe_start_pending_timer()
|
||||
);
|
||||
|
||||
let timers = session.timers.lock().await.list_timers();
|
||||
assert_eq!(
|
||||
timers
|
||||
.iter()
|
||||
.filter(|timer| timer.last_run_at.is_some())
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_timer_rejects_when_sqlite_thread_timer_limit_is_reached() {
|
||||
let (mut session, _, _) = make_session_and_context_with_rx().await;
|
||||
Arc::get_mut(&mut session)
|
||||
.expect("session should have no other references")
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.expect("test config should allow feature update");
|
||||
let config = {
|
||||
let state = session.state.lock().await;
|
||||
state
|
||||
.session_configuration
|
||||
.original_config_do_not_use
|
||||
.clone()
|
||||
};
|
||||
let state_db = codex_state::StateRuntime::init(
|
||||
config.sqlite_home.clone(),
|
||||
config.model_provider_id.clone(),
|
||||
)
|
||||
.await
|
||||
.expect("state db should open");
|
||||
let thread_id = session.conversation_id.to_string();
|
||||
for index in 0..MAX_ACTIVE_TIMERS_PER_THREAD {
|
||||
state_db
|
||||
.create_thread_timer(&test_timer_params(&thread_id, &format!("timer-{index}")))
|
||||
.await
|
||||
.expect("seed timer");
|
||||
}
|
||||
|
||||
let err = session
|
||||
.create_timer(
|
||||
ThreadTimerTrigger::Delay {
|
||||
seconds: 10,
|
||||
repeat: None,
|
||||
},
|
||||
MessagePayload {
|
||||
content: "overflow".to_string(),
|
||||
instructions: None,
|
||||
meta: Default::default(),
|
||||
},
|
||||
TimerDelivery::AfterTurn,
|
||||
)
|
||||
.await
|
||||
.expect_err("timer creation should reject full sqlite timer set");
|
||||
|
||||
assert_eq!(
|
||||
err,
|
||||
format!(
|
||||
"too many active timers; each thread supports at most {MAX_ACTIVE_TIMERS_PER_THREAD} timers"
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
state_db
|
||||
.list_thread_timers(&thread_id)
|
||||
.await
|
||||
.expect("list timers")
|
||||
.len(),
|
||||
MAX_ACTIVE_TIMERS_PER_THREAD
|
||||
);
|
||||
}
|
||||
|
||||
fn test_timer_params(thread_id: &str, id: &str) -> codex_state::ThreadTimerCreateParams {
|
||||
codex_state::ThreadTimerCreateParams {
|
||||
id: id.to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
source: "agent".to_string(),
|
||||
client_id: "codex-cli".to_string(),
|
||||
trigger_json: r#"{"kind":"delay","seconds":10}"#.to_string(),
|
||||
content: "existing timer".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: TimerDelivery::AfterTurn.as_str().to_string(),
|
||||
created_at: 100,
|
||||
next_run_at: Some(200),
|
||||
last_run_at: None,
|
||||
pending_run: false,
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@ use crate::function_tool::FunctionCallError;
|
||||
use crate::mcp_tool_exposure::DIRECT_MCP_TOOL_EXPOSURE_THRESHOLD;
|
||||
use crate::mcp_tool_exposure::build_mcp_tool_exposure;
|
||||
use crate::shell::default_user_shell;
|
||||
use crate::timers::TimersState;
|
||||
use crate::tools::format_exec_output_str;
|
||||
|
||||
use codex_features::Features;
|
||||
@@ -2941,9 +2942,13 @@ pub(crate) async fn make_session_and_context() -> (Session, TurnContext) {
|
||||
pending_mcp_server_refresh_config: Mutex::new(None),
|
||||
conversation: Arc::new(RealtimeConversationManager::new()),
|
||||
active_turn: Mutex::new(None),
|
||||
timer_start_in_progress: Mutex::new(false),
|
||||
timer_db_sync_started: AtomicBool::new(false),
|
||||
mailbox,
|
||||
mailbox_rx: Mutex::new(mailbox_rx),
|
||||
idle_pending_input: Mutex::new(Vec::new()),
|
||||
timers: Mutex::new(TimersState::default()),
|
||||
timer_tasks_cancellation_token: CancellationToken::new(),
|
||||
guardian_review_session: crate::guardian::GuardianReviewSessionManager::default(),
|
||||
services,
|
||||
js_repl,
|
||||
@@ -3786,9 +3791,13 @@ pub(crate) async fn make_session_and_context_with_dynamic_tools_and_rx(
|
||||
pending_mcp_server_refresh_config: Mutex::new(None),
|
||||
conversation: Arc::new(RealtimeConversationManager::new()),
|
||||
active_turn: Mutex::new(None),
|
||||
timer_start_in_progress: Mutex::new(false),
|
||||
timer_db_sync_started: AtomicBool::new(false),
|
||||
mailbox,
|
||||
mailbox_rx: Mutex::new(mailbox_rx),
|
||||
idle_pending_input: Mutex::new(Vec::new()),
|
||||
timers: Mutex::new(TimersState::default()),
|
||||
timer_tasks_cancellation_token: CancellationToken::new(),
|
||||
guardian_review_session: crate::guardian::GuardianReviewSessionManager::default(),
|
||||
services,
|
||||
js_repl,
|
||||
|
||||
@@ -29,6 +29,10 @@ use std::path::PathBuf;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::sync::watch;
|
||||
|
||||
use crate::injected_message::MessagePayload;
|
||||
use crate::timers::ThreadTimer;
|
||||
use crate::timers::ThreadTimerTrigger;
|
||||
use crate::timers::TimerDelivery;
|
||||
use codex_rollout::state_db::StateDbHandle;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -87,6 +91,11 @@ impl CodexThread {
|
||||
self.codex.session.flush_rollout().await
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub async fn has_pending_input(&self) -> bool {
|
||||
self.codex.session.has_pending_input().await
|
||||
}
|
||||
|
||||
pub async fn submit_with_trace(
|
||||
&self,
|
||||
op: Op,
|
||||
@@ -276,6 +285,26 @@ impl CodexThread {
|
||||
|
||||
Ok(*guard)
|
||||
}
|
||||
|
||||
pub async fn create_timer(
|
||||
&self,
|
||||
trigger: ThreadTimerTrigger,
|
||||
payload: MessagePayload,
|
||||
delivery: TimerDelivery,
|
||||
) -> Result<ThreadTimer, String> {
|
||||
self.codex
|
||||
.session
|
||||
.create_timer(trigger, payload, delivery)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_timer(&self, id: &str) -> Result<bool, String> {
|
||||
self.codex.session.delete_timer(id).await
|
||||
}
|
||||
|
||||
pub async fn list_timers(&self) -> Vec<ThreadTimer> {
|
||||
self.codex.session.list_timers().await
|
||||
}
|
||||
}
|
||||
|
||||
fn pending_message_input_item(message: &ResponseItem) -> CodexResult<ResponseInputItem> {
|
||||
|
||||
@@ -10,19 +10,20 @@ use codex_hooks::UserPromptSubmitOutcome;
|
||||
use codex_hooks::UserPromptSubmitRequest;
|
||||
use codex_protocol::items::TurnItem;
|
||||
use codex_protocol::models::DeveloperInstructions;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::HookCompletedEvent;
|
||||
use codex_protocol::protocol::HookRunSummary;
|
||||
use codex_protocol::protocol::HookStartedEvent;
|
||||
use codex_protocol::protocol::InjectedMessageEvent;
|
||||
use codex_protocol::user_input::UserInput;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::codex::Session;
|
||||
use crate::codex::TurnContext;
|
||||
use crate::event_mapping::parse_turn_item;
|
||||
use crate::pending_input::PendingInputItem;
|
||||
|
||||
pub(crate) struct HookRuntimeOutcome {
|
||||
pub should_stop: bool,
|
||||
@@ -38,6 +39,7 @@ pub(crate) enum PendingInputRecord {
|
||||
UserMessage {
|
||||
content: Vec<UserInput>,
|
||||
response_item: ResponseItem,
|
||||
injected_event: Option<InjectedMessageEvent>,
|
||||
additional_contexts: Vec<String>,
|
||||
},
|
||||
ConversationItem {
|
||||
@@ -199,9 +201,10 @@ pub(crate) async fn run_user_prompt_submit_hooks(
|
||||
pub(crate) async fn inspect_pending_input(
|
||||
sess: &Arc<Session>,
|
||||
turn_context: &Arc<TurnContext>,
|
||||
pending_input_item: ResponseInputItem,
|
||||
pending_input_item: PendingInputItem,
|
||||
) -> PendingInputHookDisposition {
|
||||
let response_item = ResponseItem::from(pending_input_item);
|
||||
let (input_item, injected_event) = pending_input_item.into_parts();
|
||||
let response_item = ResponseItem::from(input_item);
|
||||
if let Some(TurnItem::UserMessage(user_message)) = parse_turn_item(&response_item) {
|
||||
let user_prompt_submit_outcome =
|
||||
run_user_prompt_submit_hooks(sess, turn_context, user_message.message()).await;
|
||||
@@ -213,6 +216,7 @@ pub(crate) async fn inspect_pending_input(
|
||||
PendingInputHookDisposition::Accepted(Box::new(PendingInputRecord::UserMessage {
|
||||
content: user_message.content,
|
||||
response_item,
|
||||
injected_event,
|
||||
additional_contexts: user_prompt_submit_outcome.additional_contexts,
|
||||
}))
|
||||
}
|
||||
@@ -232,14 +236,24 @@ pub(crate) async fn record_pending_input(
|
||||
PendingInputRecord::UserMessage {
|
||||
content,
|
||||
response_item,
|
||||
injected_event,
|
||||
additional_contexts,
|
||||
} => {
|
||||
sess.record_user_prompt_and_emit_turn_item(
|
||||
turn_context.as_ref(),
|
||||
content.as_slice(),
|
||||
response_item,
|
||||
)
|
||||
.await;
|
||||
if let Some(injected_event) = injected_event {
|
||||
sess.record_generated_message_and_emit_display(
|
||||
turn_context.as_ref(),
|
||||
response_item,
|
||||
injected_event,
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
sess.record_user_prompt_and_emit_turn_item(
|
||||
turn_context.as_ref(),
|
||||
content.as_slice(),
|
||||
response_item,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
record_additional_contexts(sess, turn_context, additional_contexts).await;
|
||||
}
|
||||
PendingInputRecord::ConversationItem { response_item } => {
|
||||
|
||||
175
codex-rs/core/src/injected_message.rs
Normal file
175
codex-rs/core/src/injected_message.rs
Normal file
@@ -0,0 +1,175 @@
|
||||
//! Generated user-message payloads and model-visible XML envelopes.
|
||||
//!
|
||||
//! This module owns the representation for messages that are delivered into a
|
||||
//! thread by the harness rather than typed directly by the user. The model sees
|
||||
//! the XML envelope recorded in history; transcript clients receive a separate
|
||||
//! structured event with the human-facing text so they do not need to parse or
|
||||
//! hide that XML.
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use crate::timers::TimerDelivery;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::protocol::InjectedMessageEvent;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
const EXTERNAL_MESSAGE_OPEN_TAG: &str = "<external_message>";
|
||||
const EXTERNAL_MESSAGE_CLOSE_TAG: &str = "</external_message>";
|
||||
const TIMER_MESSAGE_OPEN_TAG: &str = "<timer_message>";
|
||||
const TIMER_MESSAGE_CLOSE_TAG: &str = "</timer_message>";
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MessagePayload {
|
||||
pub content: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub instructions: Option<String>,
|
||||
#[serde(default)]
|
||||
pub meta: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) enum InjectedMessage {
|
||||
External {
|
||||
source: String,
|
||||
content: String,
|
||||
},
|
||||
Timer {
|
||||
timer_id: String,
|
||||
content: String,
|
||||
instructions: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
impl InjectedMessage {
|
||||
pub(crate) fn from_external_row(
|
||||
row: codex_state::ExternalMessage,
|
||||
) -> Result<(Self, TimerDelivery), String> {
|
||||
let delivery = serde_json::from_value::<TimerDelivery>(serde_json::Value::String(
|
||||
row.delivery.clone(),
|
||||
))
|
||||
.map_err(|err| format!("invalid message delivery `{}`: {err}", row.delivery))?;
|
||||
Ok((
|
||||
Self::External {
|
||||
source: row.source,
|
||||
content: row.content,
|
||||
},
|
||||
delivery,
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) fn prompt_input_item(&self) -> ResponseInputItem {
|
||||
ResponseInputItem::Message {
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: self.render_prompt(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn event(&self) -> InjectedMessageEvent {
|
||||
match self {
|
||||
Self::External { source, content } => InjectedMessageEvent {
|
||||
content: content.clone(),
|
||||
source: source.clone(),
|
||||
},
|
||||
Self::Timer {
|
||||
timer_id, content, ..
|
||||
} => InjectedMessageEvent {
|
||||
content: content.clone(),
|
||||
source: format!("timer {timer_id}"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn render_prompt(&self) -> String {
|
||||
let mut rendered = String::new();
|
||||
match self {
|
||||
Self::External { content, .. } => {
|
||||
rendered.push_str(EXTERNAL_MESSAGE_OPEN_TAG);
|
||||
rendered.push('\n');
|
||||
push_block_tag(&mut rendered, "content", content);
|
||||
rendered.push_str(EXTERNAL_MESSAGE_CLOSE_TAG);
|
||||
}
|
||||
Self::Timer {
|
||||
timer_id,
|
||||
content,
|
||||
instructions,
|
||||
} => {
|
||||
rendered.push_str(TIMER_MESSAGE_OPEN_TAG);
|
||||
rendered.push('\n');
|
||||
push_tag(&mut rendered, "timer_id", timer_id);
|
||||
push_block_tag(&mut rendered, "content", content);
|
||||
if let Some(instructions) = instructions.as_deref() {
|
||||
push_block_tag(&mut rendered, "instructions", instructions);
|
||||
}
|
||||
rendered.push_str(TIMER_MESSAGE_CLOSE_TAG);
|
||||
}
|
||||
}
|
||||
rendered
|
||||
}
|
||||
}
|
||||
|
||||
fn push_tag(rendered: &mut String, tag: &str, value: &str) {
|
||||
rendered.push('<');
|
||||
rendered.push_str(tag);
|
||||
rendered.push('>');
|
||||
rendered.push_str(&xml_escape(value));
|
||||
rendered.push_str("</");
|
||||
rendered.push_str(tag);
|
||||
rendered.push_str(">\n");
|
||||
}
|
||||
|
||||
fn push_block_tag(rendered: &mut String, tag: &str, value: &str) {
|
||||
rendered.push('<');
|
||||
rendered.push_str(tag);
|
||||
rendered.push_str(">\n");
|
||||
rendered.push_str(&xml_escape(value));
|
||||
rendered.push('\n');
|
||||
rendered.push_str("</");
|
||||
rendered.push_str(tag);
|
||||
rendered.push_str(">\n");
|
||||
}
|
||||
|
||||
fn xml_escape(value: &str) -> String {
|
||||
value
|
||||
.replace('&', "&")
|
||||
.replace('<', "<")
|
||||
.replace('>', ">")
|
||||
.replace('"', """)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn renders_external_message_prompt_with_only_content() {
|
||||
let message = InjectedMessage::External {
|
||||
source: "external".to_string(),
|
||||
content: "run <tests>".to_string(),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
message.render_prompt(),
|
||||
"<external_message>\n<content>\nrun <tests>\n</content>\n</external_message>"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn renders_timer_message_prompt_with_timer_id() {
|
||||
let message = InjectedMessage::Timer {
|
||||
timer_id: "timer-1".to_string(),
|
||||
content: "run <tests>".to_string(),
|
||||
instructions: Some("stay \"brief\"".to_string()),
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
message.render_prompt(),
|
||||
"<timer_message>\n<timer_id>timer-1</timer_id>\n<content>\nrun <tests>\n</content>\n<instructions>\nstay "brief"\n</instructions>\n</timer_message>"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -39,9 +39,12 @@ mod flags;
|
||||
mod git_info_tests;
|
||||
mod guardian;
|
||||
mod hook_runtime;
|
||||
pub mod injected_message;
|
||||
mod installation_id;
|
||||
pub(crate) mod instructions;
|
||||
pub(crate) mod landlock;
|
||||
mod timer_trigger;
|
||||
pub mod timers;
|
||||
pub use landlock::spawn_command_under_linux_sandbox;
|
||||
pub(crate) mod mcp;
|
||||
mod mcp_skill_dependencies;
|
||||
@@ -62,6 +65,7 @@ mod mcp_tool_call;
|
||||
mod memories;
|
||||
pub(crate) mod mention_syntax;
|
||||
pub(crate) mod message_history;
|
||||
pub(crate) mod pending_input;
|
||||
pub(crate) mod utils;
|
||||
pub use mention_syntax::PLUGIN_TEXT_MENTION_SIGIL;
|
||||
pub use mention_syntax::TOOL_MENTION_SIGIL;
|
||||
@@ -167,6 +171,7 @@ pub use rollout::append_thread_name;
|
||||
pub use rollout::find_archived_thread_path_by_id_str;
|
||||
#[deprecated(note = "use find_thread_path_by_id_str")]
|
||||
pub use rollout::find_conversation_path_by_id_str;
|
||||
pub use rollout::find_thread_ids_by_name;
|
||||
pub use rollout::find_thread_meta_by_name_str;
|
||||
pub use rollout::find_thread_name_by_id;
|
||||
pub use rollout::find_thread_names_by_ids;
|
||||
|
||||
49
codex-rs/core/src/pending_input.rs
Normal file
49
codex-rs/core/src/pending_input.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
//! Pending input queued for delivery into a future or active regular turn.
|
||||
//!
|
||||
//! Most pending input is an ordinary model input item. Injected messages carry
|
||||
//! the model-visible item plus a separate display event so clients can render
|
||||
//! the human-facing content without parsing the XML envelope recorded in model
|
||||
//! history.
|
||||
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::protocol::InjectedMessageEvent;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct PendingInputItem {
|
||||
item: ResponseInputItem,
|
||||
injected_event: Option<InjectedMessageEvent>,
|
||||
}
|
||||
|
||||
impl PendingInputItem {
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn injected(item: ResponseInputItem, event: InjectedMessageEvent) -> Self {
|
||||
Self {
|
||||
item,
|
||||
injected_event: Some(event),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn timer_source(&self) -> Option<&str> {
|
||||
let source = self.injected_event.as_ref()?.source.as_str();
|
||||
source.starts_with("timer ").then_some(source)
|
||||
}
|
||||
|
||||
pub(crate) fn into_parts(self) -> (ResponseInputItem, Option<InjectedMessageEvent>) {
|
||||
(self.item, self.injected_event)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn into_model_input(self) -> ResponseInputItem {
|
||||
self.item
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ResponseInputItem> for PendingInputItem {
|
||||
fn from(item: ResponseInputItem) -> Self {
|
||||
Self {
|
||||
item,
|
||||
injected_event: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ pub use codex_rollout::append_thread_name;
|
||||
pub use codex_rollout::find_archived_thread_path_by_id_str;
|
||||
#[deprecated(note = "use find_thread_path_by_id_str")]
|
||||
pub use codex_rollout::find_conversation_path_by_id_str;
|
||||
pub use codex_rollout::find_thread_ids_by_name;
|
||||
pub use codex_rollout::find_thread_meta_by_name_str;
|
||||
pub use codex_rollout::find_thread_name_by_id;
|
||||
pub use codex_rollout::find_thread_names_by_ids;
|
||||
|
||||
@@ -10,7 +10,6 @@ use tokio_util::sync::CancellationToken;
|
||||
use tokio_util::task::AbortOnDropHandle;
|
||||
|
||||
use codex_protocol::dynamic_tools::DynamicToolResponse;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::request_permissions::RequestPermissionsResponse;
|
||||
use codex_protocol::request_user_input::RequestUserInputResponse;
|
||||
use codex_rmcp_client::ElicitationResponse;
|
||||
@@ -18,6 +17,7 @@ use rmcp::model::RequestId;
|
||||
use tokio::sync::oneshot;
|
||||
|
||||
use crate::codex::TurnContext;
|
||||
use crate::pending_input::PendingInputItem;
|
||||
use crate::tasks::AnySessionTask;
|
||||
use codex_protocol::models::PermissionProfile;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
@@ -101,7 +101,7 @@ pub(crate) struct TurnState {
|
||||
pending_user_input: HashMap<String, oneshot::Sender<RequestUserInputResponse>>,
|
||||
pending_elicitations: HashMap<(String, RequestId), oneshot::Sender<ElicitationResponse>>,
|
||||
pending_dynamic_tools: HashMap<String, oneshot::Sender<DynamicToolResponse>>,
|
||||
pending_input: Vec<ResponseInputItem>,
|
||||
pending_input: Vec<PendingInputItem>,
|
||||
mailbox_delivery_phase: MailboxDeliveryPhase,
|
||||
granted_permissions: Option<PermissionProfile>,
|
||||
pub(crate) tool_calls: u64,
|
||||
@@ -197,11 +197,11 @@ impl TurnState {
|
||||
self.pending_dynamic_tools.remove(key)
|
||||
}
|
||||
|
||||
pub(crate) fn push_pending_input(&mut self, input: ResponseInputItem) {
|
||||
pub(crate) fn push_pending_input(&mut self, input: PendingInputItem) {
|
||||
self.pending_input.push(input);
|
||||
}
|
||||
|
||||
pub(crate) fn prepend_pending_input(&mut self, mut input: Vec<ResponseInputItem>) {
|
||||
pub(crate) fn prepend_pending_input(&mut self, mut input: Vec<PendingInputItem>) {
|
||||
if input.is_empty() {
|
||||
return;
|
||||
}
|
||||
@@ -210,7 +210,7 @@ impl TurnState {
|
||||
self.pending_input = input;
|
||||
}
|
||||
|
||||
pub(crate) fn take_pending_input(&mut self) -> Vec<ResponseInputItem> {
|
||||
pub(crate) fn take_pending_input(&mut self) -> Vec<PendingInputItem> {
|
||||
if self.pending_input.is_empty() {
|
||||
Vec::with_capacity(0)
|
||||
} else {
|
||||
|
||||
@@ -27,6 +27,7 @@ use crate::hook_runtime::PendingInputHookDisposition;
|
||||
use crate::hook_runtime::inspect_pending_input;
|
||||
use crate::hook_runtime::record_additional_contexts;
|
||||
use crate::hook_runtime::record_pending_input;
|
||||
use crate::pending_input::PendingInputItem;
|
||||
use crate::state::ActiveTurn;
|
||||
use crate::state::RunningTask;
|
||||
use crate::state::TaskKind;
|
||||
@@ -38,7 +39,6 @@ use codex_otel::TURN_NETWORK_PROXY_METRIC;
|
||||
use codex_otel::TURN_TOKEN_USAGE_METRIC;
|
||||
use codex_otel::TURN_TOOL_CALL_METRIC;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
@@ -255,8 +255,8 @@ impl Session {
|
||||
let cancellation_token = CancellationToken::new();
|
||||
let done = Arc::new(Notify::new());
|
||||
|
||||
let queued_response_items = self.take_queued_response_items_for_next_turn().await;
|
||||
let mailbox_items = self.get_pending_input().await;
|
||||
let queued_response_items = self.take_queued_pending_input_for_next_turn().await;
|
||||
let mailbox_items = self.take_pending_input_items().await;
|
||||
let turn_state = {
|
||||
let mut active = self.active_turn.lock().await;
|
||||
let turn = active.get_or_insert_with(ActiveTurn::default);
|
||||
@@ -405,7 +405,7 @@ impl Session {
|
||||
.turn_metadata_state
|
||||
.cancel_git_enrichment_task();
|
||||
|
||||
let mut pending_input = Vec::<ResponseInputItem>::new();
|
||||
let mut pending_input = Vec::<PendingInputItem>::new();
|
||||
let mut should_clear_active_turn = false;
|
||||
let mut token_usage_at_turn_start = None;
|
||||
let mut turn_tool_calls = 0_u64;
|
||||
@@ -535,14 +535,15 @@ impl Session {
|
||||
});
|
||||
self.send_event(turn_context.as_ref(), event).await;
|
||||
|
||||
if should_clear_active_turn {
|
||||
let session = Arc::clone(self);
|
||||
let _scheduler = tokio::task::spawn_blocking(move || {
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
let session = Arc::clone(self);
|
||||
drop(tokio::task::spawn_blocking(move || {
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
session.maybe_start_pending_timer().await;
|
||||
if should_clear_active_turn {
|
||||
session.maybe_start_turn_for_pending_work().await;
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
async fn take_active_turn(&self) -> Option<ActiveTurn> {
|
||||
|
||||
883
codex-rs/core/src/timer_trigger.rs
Normal file
883
codex-rs/core/src/timer_trigger.rs
Normal file
@@ -0,0 +1,883 @@
|
||||
//! Trigger validation and next-fire calculation for persistent thread timers.
|
||||
//!
|
||||
//! This module keeps calendar and delay scheduling details out of `timers.rs`.
|
||||
//! It owns the persisted trigger shape, local wall-clock schedule normalization,
|
||||
//! and RRULE-backed recurrence evaluation.
|
||||
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration as ChronoDuration;
|
||||
use chrono::LocalResult;
|
||||
use chrono::NaiveDateTime;
|
||||
use chrono::NaiveTime;
|
||||
use chrono::TimeZone;
|
||||
use chrono::Utc;
|
||||
use rrule::RRuleSet;
|
||||
use rrule::Tz;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::time::Duration;
|
||||
|
||||
const LOCAL_DATE_TIME_FORMAT: &str = "%Y-%m-%dT%H:%M:%S";
|
||||
const RRULE_DATE_TIME_FORMAT: &str = "%Y%m%dT%H%M%S";
|
||||
const TIME_ONLY_FORMATS: &[&str] = &["%H:%M:%S", "%H:%M", "%H"];
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(tag = "kind", rename_all = "kebab-case")]
|
||||
pub enum TimerTrigger {
|
||||
Delay {
|
||||
seconds: u64,
|
||||
repeat: Option<bool>,
|
||||
},
|
||||
Schedule {
|
||||
dtstart: Option<String>,
|
||||
rrule: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct TriggerTiming {
|
||||
pub(crate) trigger: TimerTrigger,
|
||||
pub(crate) pending_run: bool,
|
||||
pub(crate) next_run_at: Option<i64>,
|
||||
pub(crate) timer_delay: Option<Duration>,
|
||||
}
|
||||
|
||||
impl TimerTrigger {
|
||||
pub(crate) fn is_recurring(&self) -> bool {
|
||||
match self {
|
||||
Self::Delay { repeat, .. } => repeat.unwrap_or(false),
|
||||
Self::Schedule { rrule, .. } => rrule.as_ref().is_some_and(|rrule| !rrule.is_empty()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_idle_recurring(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Delay {
|
||||
seconds: 0,
|
||||
repeat: Some(true),
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn timing_for_new_trigger(
|
||||
trigger: TimerTrigger,
|
||||
created_at: DateTime<Utc>,
|
||||
now: DateTime<Utc>,
|
||||
) -> Result<TriggerTiming, String> {
|
||||
let timezone = local_timezone();
|
||||
timing_for_new_trigger_with_timezone(trigger, created_at, now, timezone)
|
||||
}
|
||||
|
||||
pub(crate) fn timing_for_restored_trigger(
|
||||
trigger: TimerTrigger,
|
||||
created_at: i64,
|
||||
persisted_pending_run: bool,
|
||||
persisted_next_run_at: Option<i64>,
|
||||
now: DateTime<Utc>,
|
||||
) -> Result<TriggerTiming, String> {
|
||||
let timezone = local_timezone();
|
||||
timing_for_restored_trigger_with_timezone(
|
||||
trigger,
|
||||
created_at,
|
||||
persisted_pending_run,
|
||||
persisted_next_run_at,
|
||||
now,
|
||||
timezone,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn next_run_after_due(
|
||||
trigger: &TimerTrigger,
|
||||
created_at: i64,
|
||||
now: DateTime<Utc>,
|
||||
) -> Result<Option<i64>, String> {
|
||||
let timezone = local_timezone();
|
||||
next_run_after_due_with_timezone(trigger, created_at, now, timezone)
|
||||
}
|
||||
|
||||
pub(crate) fn normalize_schedule_dtstart_input(input: &str) -> Result<String, String> {
|
||||
let timezone = local_timezone();
|
||||
normalize_schedule_dtstart_input_with_timezone(input, Utc::now(), timezone)
|
||||
}
|
||||
|
||||
fn normalize_schedule_dtstart_input_with_timezone(
|
||||
input: &str,
|
||||
now: DateTime<Utc>,
|
||||
timezone: Tz,
|
||||
) -> Result<String, String> {
|
||||
let trimmed = input.trim();
|
||||
if trimmed.is_empty() {
|
||||
return Err("schedule dtstart cannot be empty".to_string());
|
||||
}
|
||||
if parse_dtstart(trimmed).is_ok() {
|
||||
validate_dtstart(trimmed, timezone)?;
|
||||
return Ok(trimmed.to_string());
|
||||
}
|
||||
|
||||
let Some(time) = parse_time_only(trimmed) else {
|
||||
return Err(format!(
|
||||
"schedule dtstart `{trimmed}` must use format YYYY-MM-DDTHH:MM:SS or a time like HH:MM"
|
||||
));
|
||||
};
|
||||
let local_now = now.with_timezone(&timezone).naive_local();
|
||||
let mut local_dtstart = local_now.date().and_time(time);
|
||||
if local_dtstart <= local_now {
|
||||
local_dtstart = local_dtstart
|
||||
.checked_add_signed(ChronoDuration::days(1))
|
||||
.ok_or_else(|| "schedule dtstart is out of range".to_string())?;
|
||||
}
|
||||
let dtstart = local_dtstart.format(LOCAL_DATE_TIME_FORMAT).to_string();
|
||||
validate_dtstart(&dtstart, timezone)?;
|
||||
Ok(dtstart)
|
||||
}
|
||||
|
||||
fn timing_for_new_trigger_with_timezone(
|
||||
trigger: TimerTrigger,
|
||||
created_at: DateTime<Utc>,
|
||||
now: DateTime<Utc>,
|
||||
timezone: Tz,
|
||||
) -> Result<TriggerTiming, String> {
|
||||
let normalized = normalize_trigger(trigger, now, timezone)?;
|
||||
match &normalized {
|
||||
TimerTrigger::Delay { seconds, repeat } => {
|
||||
let repeat = repeat.unwrap_or(false);
|
||||
if repeat && *seconds == 0 {
|
||||
return Ok(timing(
|
||||
normalized, /*pending_run*/ true, /*next_run_at*/ None, now,
|
||||
));
|
||||
}
|
||||
let next_run_at = checked_add_seconds(created_at, *seconds)?;
|
||||
let pending_run = next_run_at <= now;
|
||||
let next_run_at = if repeat {
|
||||
if pending_run {
|
||||
next_delay_recurring_run_at(created_at, *seconds, now)?
|
||||
} else {
|
||||
Some(next_run_at.timestamp())
|
||||
}
|
||||
} else {
|
||||
Some(next_run_at.timestamp())
|
||||
};
|
||||
Ok(timing(normalized, pending_run, next_run_at, now))
|
||||
}
|
||||
TimerTrigger::Schedule { rrule: None, .. } => {
|
||||
let due_at = schedule_dtstart_utc(&normalized, timezone)?;
|
||||
Ok(timing(
|
||||
normalized,
|
||||
due_at <= now,
|
||||
Some(due_at.timestamp()),
|
||||
now,
|
||||
))
|
||||
}
|
||||
TimerTrigger::Schedule { rrule: Some(_), .. } => {
|
||||
let due_or_next = next_schedule_occurrence_at_or_after(&normalized, now, timezone)?;
|
||||
let Some(due_or_next) = due_or_next else {
|
||||
return Ok(timing(
|
||||
normalized, /*pending_run*/ false, /*next_run_at*/ None, now,
|
||||
));
|
||||
};
|
||||
let pending_run = due_or_next <= now.timestamp();
|
||||
let next_run_at = if pending_run {
|
||||
next_schedule_occurrence_after(&normalized, now, timezone)?
|
||||
} else {
|
||||
Some(due_or_next)
|
||||
};
|
||||
Ok(timing(normalized, pending_run, next_run_at, now))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn timing_for_restored_trigger_with_timezone(
|
||||
trigger: TimerTrigger,
|
||||
created_at: i64,
|
||||
persisted_pending_run: bool,
|
||||
persisted_next_run_at: Option<i64>,
|
||||
now: DateTime<Utc>,
|
||||
timezone: Tz,
|
||||
) -> Result<TriggerTiming, String> {
|
||||
let normalized = normalize_trigger(trigger, now, timezone)?;
|
||||
match &normalized {
|
||||
TimerTrigger::Delay { seconds, repeat } => {
|
||||
let repeat = repeat.unwrap_or(false);
|
||||
if repeat && *seconds == 0 {
|
||||
return Ok(timing(
|
||||
normalized, /*pending_run*/ true, /*next_run_at*/ None, now,
|
||||
));
|
||||
}
|
||||
let next_run_at = persisted_next_run_at
|
||||
.or_else(|| next_delay_run_at(created_at, *seconds))
|
||||
.ok_or_else(|| "delay next run time is out of range".to_string())?;
|
||||
let due = next_run_at <= now.timestamp();
|
||||
let pending_run = persisted_pending_run || due;
|
||||
let next_run_at = if repeat && due {
|
||||
next_delay_recurring_run_at_from_timestamp(created_at, *seconds, now)?
|
||||
} else {
|
||||
Some(next_run_at)
|
||||
};
|
||||
Ok(timing(normalized, pending_run, next_run_at, now))
|
||||
}
|
||||
TimerTrigger::Schedule { rrule: None, .. } => {
|
||||
let next_run_at = persisted_next_run_at
|
||||
.or_else(|| {
|
||||
schedule_dtstart_utc(&normalized, timezone)
|
||||
.ok()
|
||||
.map(|dt| dt.timestamp())
|
||||
})
|
||||
.ok_or_else(|| "schedule next run time is out of range".to_string())?;
|
||||
Ok(timing(
|
||||
normalized,
|
||||
persisted_pending_run || next_run_at <= now.timestamp(),
|
||||
Some(next_run_at),
|
||||
now,
|
||||
))
|
||||
}
|
||||
TimerTrigger::Schedule { rrule: Some(_), .. } => {
|
||||
let Some(persisted_next_run_at) = persisted_next_run_at else {
|
||||
return Ok(timing(
|
||||
normalized,
|
||||
persisted_pending_run,
|
||||
/*next_run_at*/ None,
|
||||
now,
|
||||
));
|
||||
};
|
||||
let due = persisted_next_run_at <= now.timestamp();
|
||||
let next_run_at = if due {
|
||||
next_schedule_occurrence_after(&normalized, now, timezone)?
|
||||
} else {
|
||||
Some(persisted_next_run_at)
|
||||
};
|
||||
Ok(timing(
|
||||
normalized,
|
||||
persisted_pending_run || due,
|
||||
next_run_at,
|
||||
now,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_run_after_due_with_timezone(
|
||||
trigger: &TimerTrigger,
|
||||
created_at: i64,
|
||||
now: DateTime<Utc>,
|
||||
timezone: Tz,
|
||||
) -> Result<Option<i64>, String> {
|
||||
match trigger {
|
||||
TimerTrigger::Delay { seconds, repeat } => {
|
||||
if repeat.unwrap_or(false) {
|
||||
if *seconds == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
next_delay_recurring_run_at_from_timestamp(created_at, *seconds, now)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
TimerTrigger::Schedule { rrule: None, .. } => Ok(None),
|
||||
TimerTrigger::Schedule { rrule: Some(_), .. } => {
|
||||
next_schedule_occurrence_after(trigger, now, timezone)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_trigger(
|
||||
trigger: TimerTrigger,
|
||||
now: DateTime<Utc>,
|
||||
timezone: Tz,
|
||||
) -> Result<TimerTrigger, String> {
|
||||
match trigger {
|
||||
TimerTrigger::Delay { seconds, repeat } => Ok(TimerTrigger::Delay { seconds, repeat }),
|
||||
TimerTrigger::Schedule { dtstart, rrule } => {
|
||||
let dtstart = normalize_optional_string(dtstart);
|
||||
let rrule = normalize_optional_string(rrule);
|
||||
if dtstart.is_none() && rrule.is_none() {
|
||||
return Err("schedule trigger requires dtstart, rrule, or both".to_string());
|
||||
}
|
||||
let dtstart = match (dtstart, rrule.as_ref()) {
|
||||
(Some(dtstart), _) => {
|
||||
validate_dtstart(&dtstart, timezone)?;
|
||||
Some(dtstart)
|
||||
}
|
||||
(None, Some(_)) => Some(format_local_dtstart(now, timezone)),
|
||||
(None, None) => None,
|
||||
};
|
||||
let normalized = TimerTrigger::Schedule { dtstart, rrule };
|
||||
if matches!(normalized, TimerTrigger::Schedule { rrule: Some(_), .. }) {
|
||||
parse_rrule_set(&normalized, timezone)?;
|
||||
}
|
||||
Ok(normalized)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn timing(
|
||||
trigger: TimerTrigger,
|
||||
pending_run: bool,
|
||||
next_run_at: Option<i64>,
|
||||
now: DateTime<Utc>,
|
||||
) -> TriggerTiming {
|
||||
let keep_timer_for_pending = trigger.is_recurring();
|
||||
let timer_delay = next_run_at
|
||||
.filter(|_| !pending_run || keep_timer_for_pending)
|
||||
.and_then(|next| timer_delay(next, now));
|
||||
TriggerTiming {
|
||||
trigger,
|
||||
pending_run,
|
||||
next_run_at,
|
||||
timer_delay,
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_optional_string(value: Option<String>) -> Option<String> {
|
||||
value
|
||||
.map(|value| value.trim().to_string())
|
||||
.filter(|value| !value.is_empty())
|
||||
}
|
||||
|
||||
fn checked_add_seconds(start: DateTime<Utc>, seconds: u64) -> Result<DateTime<Utc>, String> {
|
||||
let seconds = i64::try_from(seconds)
|
||||
.map_err(|_| "delay seconds value is too large to schedule".to_string())?;
|
||||
start
|
||||
.checked_add_signed(ChronoDuration::seconds(seconds))
|
||||
.ok_or_else(|| "delay next run time is out of range".to_string())
|
||||
}
|
||||
|
||||
fn next_delay_run_at(created_at: i64, seconds: u64) -> Option<i64> {
|
||||
let seconds = i64::try_from(seconds).ok()?;
|
||||
created_at.checked_add(seconds)
|
||||
}
|
||||
|
||||
fn next_delay_recurring_run_at(
|
||||
created_at: DateTime<Utc>,
|
||||
seconds: u64,
|
||||
now: DateTime<Utc>,
|
||||
) -> Result<Option<i64>, String> {
|
||||
next_delay_recurring_run_at_from_timestamp(created_at.timestamp(), seconds, now)
|
||||
}
|
||||
|
||||
fn next_delay_recurring_run_at_from_timestamp(
|
||||
created_at: i64,
|
||||
seconds: u64,
|
||||
now: DateTime<Utc>,
|
||||
) -> Result<Option<i64>, String> {
|
||||
let seconds = i64::try_from(seconds)
|
||||
.map_err(|_| "delay seconds value is too large to schedule".to_string())?;
|
||||
if seconds <= 0 {
|
||||
return Err("delay.repeat requires seconds to be greater than 0".to_string());
|
||||
}
|
||||
let elapsed = now.timestamp().saturating_sub(created_at);
|
||||
let completed_intervals = elapsed.div_euclid(seconds) + 1;
|
||||
created_at
|
||||
.checked_add(
|
||||
completed_intervals
|
||||
.checked_mul(seconds)
|
||||
.ok_or_else(|| "delay next run time is out of range".to_string())?,
|
||||
)
|
||||
.map(Some)
|
||||
.ok_or_else(|| "delay next run time is out of range".to_string())
|
||||
}
|
||||
|
||||
fn timer_delay(next_run_at: i64, now: DateTime<Utc>) -> Option<Duration> {
|
||||
if next_run_at <= now.timestamp() {
|
||||
return Some(Duration::ZERO);
|
||||
}
|
||||
u64::try_from(next_run_at - now.timestamp())
|
||||
.ok()
|
||||
.map(Duration::from_secs)
|
||||
}
|
||||
|
||||
fn schedule_dtstart_utc(trigger: &TimerTrigger, timezone: Tz) -> Result<DateTime<Utc>, String> {
|
||||
let TimerTrigger::Schedule {
|
||||
dtstart: Some(dtstart),
|
||||
..
|
||||
} = trigger
|
||||
else {
|
||||
return Err("schedule trigger requires dtstart".to_string());
|
||||
};
|
||||
local_dtstart_to_utc(dtstart, timezone)
|
||||
}
|
||||
|
||||
fn next_schedule_occurrence_after(
|
||||
trigger: &TimerTrigger,
|
||||
after: DateTime<Utc>,
|
||||
timezone: Tz,
|
||||
) -> Result<Option<i64>, String> {
|
||||
let set = parse_rrule_set(trigger, timezone)?;
|
||||
let after = after
|
||||
.checked_add_signed(ChronoDuration::seconds(1))
|
||||
.unwrap_or(after);
|
||||
let result = set.after(after.with_timezone(&timezone)).all(1);
|
||||
Ok(result
|
||||
.dates
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|next| next.with_timezone(&Utc).timestamp()))
|
||||
}
|
||||
|
||||
fn next_schedule_occurrence_at_or_after(
|
||||
trigger: &TimerTrigger,
|
||||
at: DateTime<Utc>,
|
||||
timezone: Tz,
|
||||
) -> Result<Option<i64>, String> {
|
||||
let after = at
|
||||
.checked_sub_signed(ChronoDuration::seconds(1))
|
||||
.unwrap_or(at);
|
||||
next_schedule_occurrence_after(trigger, after, timezone)
|
||||
}
|
||||
|
||||
fn parse_rrule_set(trigger: &TimerTrigger, timezone: Tz) -> Result<RRuleSet, String> {
|
||||
let TimerTrigger::Schedule {
|
||||
dtstart: Some(dtstart),
|
||||
rrule: Some(rrule),
|
||||
} = trigger
|
||||
else {
|
||||
return Err("schedule trigger requires dtstart and rrule".to_string());
|
||||
};
|
||||
let naive = parse_dtstart(dtstart)?;
|
||||
let raw_rrule = rrule
|
||||
.strip_prefix("RRULE:")
|
||||
.or_else(|| rrule.strip_prefix("rrule:"))
|
||||
.unwrap_or(rrule);
|
||||
let formatted_dtstart = naive.format(RRULE_DATE_TIME_FORMAT);
|
||||
let dtstart = if timezone.is_local() {
|
||||
format!("DTSTART:{formatted_dtstart}")
|
||||
} else {
|
||||
let timezone_name = timezone.name();
|
||||
format!("DTSTART;TZID={timezone_name}:{formatted_dtstart}")
|
||||
};
|
||||
let rrule_set = format!("{dtstart}\nRRULE:{raw_rrule}");
|
||||
rrule_set
|
||||
.parse::<RRuleSet>()
|
||||
.map_err(|err| format!("invalid schedule rrule `{rrule}`: {err}"))
|
||||
}
|
||||
|
||||
fn validate_dtstart(dtstart: &str, timezone: Tz) -> Result<(), String> {
|
||||
local_dtstart_to_utc(dtstart, timezone).map(|_| ())
|
||||
}
|
||||
|
||||
fn local_dtstart_to_utc(dtstart: &str, timezone: Tz) -> Result<DateTime<Utc>, String> {
|
||||
let naive = parse_dtstart(dtstart)?;
|
||||
match timezone.from_local_datetime(&naive) {
|
||||
LocalResult::Single(dt) => Ok(dt.with_timezone(&Utc)),
|
||||
LocalResult::Ambiguous(earliest, _) => Ok(earliest.with_timezone(&Utc)),
|
||||
LocalResult::None => Err(format!(
|
||||
"schedule dtstart `{dtstart}` does not exist in local timezone {timezone}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_dtstart(dtstart: &str) -> Result<NaiveDateTime, String> {
|
||||
NaiveDateTime::parse_from_str(dtstart, LOCAL_DATE_TIME_FORMAT)
|
||||
.map_err(|_| format!("schedule dtstart `{dtstart}` must use format YYYY-MM-DDTHH:MM:SS"))
|
||||
}
|
||||
|
||||
fn parse_time_only(input: &str) -> Option<NaiveTime> {
|
||||
for format in TIME_ONLY_FORMATS {
|
||||
if let Ok(time) = NaiveTime::parse_from_str(input, format) {
|
||||
return Some(time);
|
||||
}
|
||||
}
|
||||
|
||||
let compact = input.to_ascii_lowercase().replace(' ', "");
|
||||
let (time, is_pm) = if let Some(time) = compact.strip_suffix("am") {
|
||||
(time, false)
|
||||
} else if let Some(time) = compact.strip_suffix("pm") {
|
||||
(time, true)
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let parts = time.split(':').collect::<Vec<_>>();
|
||||
if parts.is_empty() || parts.len() > 3 {
|
||||
return None;
|
||||
}
|
||||
let hour = parts[0].parse::<u32>().ok()?;
|
||||
if !(1..=12).contains(&hour) {
|
||||
return None;
|
||||
}
|
||||
let minute = parts
|
||||
.get(1)
|
||||
.map_or(Some(0), |value| value.parse::<u32>().ok())?;
|
||||
let second = parts
|
||||
.get(2)
|
||||
.map_or(Some(0), |value| value.parse::<u32>().ok())?;
|
||||
let hour = if is_pm { (hour % 12) + 12 } else { hour % 12 };
|
||||
NaiveTime::from_hms_opt(hour, minute, second)
|
||||
}
|
||||
|
||||
fn format_local_dtstart(now: DateTime<Utc>, timezone: Tz) -> String {
|
||||
now.with_timezone(&timezone)
|
||||
.naive_local()
|
||||
.format(LOCAL_DATE_TIME_FORMAT)
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn local_timezone() -> Tz {
|
||||
timezone_from_name(iana_time_zone::get_timezone().ok())
|
||||
}
|
||||
|
||||
fn timezone_from_name(timezone: Option<String>) -> Tz {
|
||||
timezone
|
||||
.as_deref()
|
||||
.and_then(|timezone| timezone.parse::<chrono_tz::Tz>().ok())
|
||||
.map(Tz::Tz)
|
||||
.unwrap_or(Tz::LOCAL)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::TimeZone;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
const TS_1: i64 = 1;
|
||||
const TS_100: i64 = 100;
|
||||
const TS_135: i64 = 135;
|
||||
const TS_1_700_000_000: i64 = 1_700_000_000;
|
||||
|
||||
fn utc(timestamp: i64) -> DateTime<Utc> {
|
||||
Utc.timestamp_opt(timestamp, 0)
|
||||
.single()
|
||||
.expect("valid timestamp")
|
||||
}
|
||||
|
||||
fn utc_datetime(datetime: &str) -> DateTime<Utc> {
|
||||
Utc.from_utc_datetime(
|
||||
&NaiveDateTime::parse_from_str(datetime, LOCAL_DATE_TIME_FORMAT)
|
||||
.expect("valid datetime"),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delay_one_shot_becomes_pending_when_due() {
|
||||
let timing = timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Delay {
|
||||
seconds: 0,
|
||||
repeat: None,
|
||||
},
|
||||
utc(TS_100),
|
||||
utc(TS_100),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(
|
||||
timing,
|
||||
TriggerTiming {
|
||||
trigger: TimerTrigger::Delay {
|
||||
seconds: 0,
|
||||
repeat: None,
|
||||
},
|
||||
pending_run: true,
|
||||
next_run_at: Some(100),
|
||||
timer_delay: None,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delay_repeat_zero_is_idle_recurring() {
|
||||
let timing = timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Delay {
|
||||
seconds: 0,
|
||||
repeat: Some(true),
|
||||
},
|
||||
utc(TS_100),
|
||||
utc(TS_100),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("zero repeat should be a valid idle-recurring trigger");
|
||||
assert_eq!(
|
||||
timing,
|
||||
TriggerTiming {
|
||||
trigger: TimerTrigger::Delay {
|
||||
seconds: 0,
|
||||
repeat: Some(true),
|
||||
},
|
||||
pending_run: true,
|
||||
next_run_at: None,
|
||||
timer_delay: None,
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
next_run_after_due_with_timezone(
|
||||
&TimerTrigger::Delay {
|
||||
seconds: 0,
|
||||
repeat: Some(true),
|
||||
},
|
||||
/*created_at*/ 100,
|
||||
utc(TS_100),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("zero repeat should remain timer-free"),
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delay_repeat_coalesces_overdue_runs() {
|
||||
let timing = timing_for_restored_trigger_with_timezone(
|
||||
TimerTrigger::Delay {
|
||||
seconds: 10,
|
||||
repeat: Some(true),
|
||||
},
|
||||
/*created_at*/ 100,
|
||||
/*persisted_pending_run*/ false,
|
||||
Some(110),
|
||||
utc(TS_135),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(timing.pending_run, true);
|
||||
assert_eq!(timing.next_run_at, Some(140));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schedule_rrule_only_resolves_dtstart_to_now() {
|
||||
let timing = timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: None,
|
||||
rrule: Some("FREQ=HOURLY;BYMINUTE=0;BYSECOND=0".to_string()),
|
||||
},
|
||||
utc(TS_1_700_000_000),
|
||||
utc(TS_1_700_000_000),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(
|
||||
timing.trigger,
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2023-11-14T22:13:20".to_string()),
|
||||
rrule: Some("FREQ=HOURLY;BYMINUTE=0;BYSECOND=0".to_string()),
|
||||
}
|
||||
);
|
||||
assert_eq!(timing.pending_run, false);
|
||||
assert_eq!(timing.next_run_at, Some(1_700_002_800));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schedule_dtstart_only_is_one_shot() {
|
||||
let timing = timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2024-01-01T09:00:00".to_string()),
|
||||
rrule: None,
|
||||
},
|
||||
utc(TS_1),
|
||||
utc(TS_1),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(timing.pending_run, false);
|
||||
assert_eq!(timing.next_run_at, Some(1_704_099_600));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_schedule_dtstart_accepts_time_later_today() {
|
||||
assert_eq!(
|
||||
normalize_schedule_dtstart_input_with_timezone(
|
||||
"15:30",
|
||||
utc_datetime("2026-04-10T14:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("time should normalize"),
|
||||
"2026-04-10T15:30:00"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_schedule_dtstart_accepts_local_time_later_today() {
|
||||
assert_eq!(
|
||||
normalize_schedule_dtstart_input_with_timezone(
|
||||
"8:59",
|
||||
utc_datetime("2026-04-10T15:57:00"),
|
||||
Tz::America__Los_Angeles,
|
||||
)
|
||||
.expect("time should normalize"),
|
||||
"2026-04-10T08:59:00"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_schedule_dtstart_rolls_past_time_to_tomorrow() {
|
||||
assert_eq!(
|
||||
normalize_schedule_dtstart_input_with_timezone(
|
||||
"15:30",
|
||||
utc_datetime("2026-04-10T16:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("time should normalize"),
|
||||
"2026-04-11T15:30:00"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_schedule_dtstart_accepts_ampm_time() {
|
||||
assert_eq!(
|
||||
normalize_schedule_dtstart_input_with_timezone(
|
||||
"3:05 pm",
|
||||
utc_datetime("2026-04-10T14:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("time should normalize"),
|
||||
"2026-04-10T15:05:00"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_schedule_dtstart_preserves_full_datetime() {
|
||||
assert_eq!(
|
||||
normalize_schedule_dtstart_input_with_timezone(
|
||||
"2026-04-10T15:30:45",
|
||||
utc_datetime("2026-04-10T14:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("datetime should normalize"),
|
||||
"2026-04-10T15:30:45"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn local_timezone_fallback_uses_system_local_timezone() {
|
||||
assert!(timezone_from_name(Some("Not/A_Real_Zone".to_string())).is_local());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn recurring_schedule_accepts_system_local_timezone() {
|
||||
let timing = timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2026-04-10T09:00:00".to_string()),
|
||||
rrule: Some("FREQ=DAILY;BYHOUR=9;BYMINUTE=0;BYSECOND=0".to_string()),
|
||||
},
|
||||
utc_datetime("2026-04-10T08:00:00"),
|
||||
utc_datetime("2026-04-10T08:00:00"),
|
||||
Tz::LOCAL,
|
||||
)
|
||||
.expect("local timezone should be valid for recurring schedules");
|
||||
assert!(!timing.pending_run);
|
||||
assert!(timing.next_run_at.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schedule_recurring_historical_dtstart_waits_for_next_future_occurrence() {
|
||||
let timing = timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2024-01-01T09:00:00".to_string()),
|
||||
rrule: Some("FREQ=DAILY;BYHOUR=9;BYMINUTE=0;BYSECOND=0".to_string()),
|
||||
},
|
||||
utc_datetime("2024-01-02T08:00:00"),
|
||||
utc_datetime("2024-01-02T08:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(timing.pending_run, false);
|
||||
assert_eq!(
|
||||
timing.next_run_at,
|
||||
Some(utc_datetime("2024-01-02T09:00:00").timestamp())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schedule_recurring_due_now_becomes_pending() {
|
||||
let timing = timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2024-01-01T09:00:00".to_string()),
|
||||
rrule: Some("FREQ=DAILY;BYHOUR=9;BYMINUTE=0;BYSECOND=0".to_string()),
|
||||
},
|
||||
utc_datetime("2024-01-02T09:00:00"),
|
||||
utc_datetime("2024-01-02T09:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(timing.pending_run, true);
|
||||
assert_eq!(
|
||||
timing.next_run_at,
|
||||
Some(utc_datetime("2024-01-03T09:00:00").timestamp())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn restored_recurring_schedule_without_next_run_remains_inactive() {
|
||||
let timing = timing_for_restored_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2024-01-01T09:00:00".to_string()),
|
||||
rrule: Some("FREQ=DAILY;COUNT=1".to_string()),
|
||||
},
|
||||
utc_datetime("2024-01-01T08:00:00").timestamp(),
|
||||
/*persisted_pending_run*/ false,
|
||||
/*persisted_next_run_at*/ None,
|
||||
utc_datetime("2024-01-02T09:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(timing.pending_run, false);
|
||||
assert_eq!(timing.next_run_at, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn restored_pending_recurring_schedule_without_next_run_stays_pending() {
|
||||
let timing = timing_for_restored_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2024-01-01T09:00:00".to_string()),
|
||||
rrule: Some("FREQ=DAILY;COUNT=1".to_string()),
|
||||
},
|
||||
utc_datetime("2024-01-01T08:00:00").timestamp(),
|
||||
/*persisted_pending_run*/ true,
|
||||
/*persisted_next_run_at*/ None,
|
||||
utc_datetime("2024-01-02T09:00:00"),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect("trigger should be valid");
|
||||
assert_eq!(timing.pending_run, true);
|
||||
assert_eq!(timing.next_run_at, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schedule_rejects_neither_dtstart_nor_rrule() {
|
||||
assert_eq!(
|
||||
timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: None,
|
||||
rrule: None,
|
||||
},
|
||||
utc(TS_1),
|
||||
utc(TS_1),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect_err("empty schedule should be invalid"),
|
||||
"schedule trigger requires dtstart, rrule, or both"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schedule_rejects_invalid_dtstart() {
|
||||
assert_eq!(
|
||||
timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2024-01-01 09:00:00".to_string()),
|
||||
rrule: None,
|
||||
},
|
||||
utc(TS_1),
|
||||
utc(TS_1),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect_err("bad dtstart should be invalid"),
|
||||
"schedule dtstart `2024-01-01 09:00:00` must use format YYYY-MM-DDTHH:MM:SS"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn schedule_rejects_invalid_rrule() {
|
||||
assert!(
|
||||
timing_for_new_trigger_with_timezone(
|
||||
TimerTrigger::Schedule {
|
||||
dtstart: Some("2024-01-01T09:00:00".to_string()),
|
||||
rrule: Some("FREQ=NEVER".to_string()),
|
||||
},
|
||||
utc(TS_1),
|
||||
utc(TS_1),
|
||||
Tz::UTC,
|
||||
)
|
||||
.expect_err("bad rrule should be invalid")
|
||||
.contains("invalid schedule rrule")
|
||||
);
|
||||
}
|
||||
}
|
||||
971
codex-rs/core/src/timers.rs
Normal file
971
codex-rs/core/src/timers.rs
Normal file
@@ -0,0 +1,971 @@
|
||||
//! Persistent thread-local timer scheduling for follow-on turns and same-turn steer delivery.
|
||||
//!
|
||||
//! This module owns the in-memory timer registry, trigger evaluation, the user
|
||||
//! message injected when a timer fires, and the persistent state shape used to
|
||||
//! restore timers after a harness restart.
|
||||
|
||||
use crate::injected_message::InjectedMessage;
|
||||
use crate::injected_message::MessagePayload;
|
||||
use crate::timer_trigger::TimerTrigger;
|
||||
use crate::timer_trigger::TriggerTiming;
|
||||
use crate::timer_trigger::next_run_after_due;
|
||||
use crate::timer_trigger::normalize_schedule_dtstart_input;
|
||||
use crate::timer_trigger::timing_for_new_trigger;
|
||||
use crate::timer_trigger::timing_for_restored_trigger;
|
||||
use chrono::Utc;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
pub const TIMER_UPDATED_BACKGROUND_EVENT_PREFIX: &str = "timer_updated:";
|
||||
pub const TIMER_FIRED_BACKGROUND_EVENT_PREFIX: &str = "timer_fired:";
|
||||
pub const MAX_ACTIVE_TIMERS_PER_THREAD: usize = 256;
|
||||
const RECURRING_TIMER_INSTRUCTIONS: &str = "This timer should keep running on its schedule after this invocation.\nDo not call delete_timer just because you completed this invocation.\nCall delete_timer with {\"id\":\"{{CURRENT_TIMER_ID}}\"} only if the user's timer message included an explicit stop condition, such as \"until\", \"stop when\", or \"while\", and that condition is now satisfied.\nDo not expose scheduler internals unless they matter to the user.";
|
||||
const ONE_SHOT_TIMER_INSTRUCTIONS: &str = "This one-shot timer has already been removed from the schedule, so you do not need to call delete_timer.\nDo not expose scheduler internals unless they matter to the user.";
|
||||
const TIMER_CONTENT_PREVIEW_MAX_CHARS: usize = 160;
|
||||
|
||||
pub use crate::timer_trigger::TimerTrigger as ThreadTimerTrigger;
|
||||
|
||||
pub fn normalize_thread_timer_dtstart_input(input: &str) -> Result<String, String> {
|
||||
normalize_schedule_dtstart_input(input)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum TimerDelivery {
|
||||
AfterTurn,
|
||||
SteerCurrentTurn,
|
||||
}
|
||||
|
||||
impl TimerDelivery {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::AfterTurn => "after-turn",
|
||||
Self::SteerCurrentTurn => "steer-current-turn",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ThreadTimer {
|
||||
pub id: String,
|
||||
pub trigger: TimerTrigger,
|
||||
pub content: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub instructions: Option<String>,
|
||||
#[serde(default)]
|
||||
pub meta: BTreeMap<String, String>,
|
||||
pub delivery: TimerDelivery,
|
||||
pub created_at: i64,
|
||||
pub next_run_at: Option<i64>,
|
||||
pub last_run_at: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct TimerInvocationContext {
|
||||
pub(crate) current_timer_id: String,
|
||||
pub(crate) content: String,
|
||||
pub(crate) instructions: Option<String>,
|
||||
pub(crate) meta: BTreeMap<String, String>,
|
||||
pub(crate) recurring: bool,
|
||||
pub(crate) delivery: TimerDelivery,
|
||||
pub(crate) queued_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct ClaimedTimer {
|
||||
pub(crate) timer: ThreadTimer,
|
||||
pub(crate) context: TimerInvocationContext,
|
||||
pub(crate) deleted_one_shot_timer: bool,
|
||||
pub(crate) previous_last_run_at: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum RecurringTimerPolicy {
|
||||
IncludeOnlyNeverRun,
|
||||
IncludeAll,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CreateTimer {
|
||||
pub(crate) id: String,
|
||||
pub(crate) trigger: TimerTrigger,
|
||||
pub(crate) payload: MessagePayload,
|
||||
pub(crate) delivery: TimerDelivery,
|
||||
pub(crate) now: chrono::DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ThreadTimerStorageCreateParams {
|
||||
pub thread_id: String,
|
||||
pub source: String,
|
||||
pub client_id: String,
|
||||
pub trigger: TimerTrigger,
|
||||
pub payload: MessagePayload,
|
||||
pub delivery: TimerDelivery,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct TimersState {
|
||||
timers: HashMap<String, TimerRuntime>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct TimerRuntime {
|
||||
pub(crate) timer: ThreadTimer,
|
||||
pending_run: bool,
|
||||
pub(crate) timer_cancel: Option<CancellationToken>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub(crate) struct PersistedTimer {
|
||||
pub(crate) timer: ThreadTimer,
|
||||
pub(crate) pending_run: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) struct TimerTaskSpec {
|
||||
pub(crate) delay: std::time::Duration,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct RestoredTimerTask {
|
||||
pub(crate) id: String,
|
||||
pub(crate) timer_spec: TimerTaskSpec,
|
||||
pub(crate) timer_cancel: CancellationToken,
|
||||
}
|
||||
|
||||
impl TimersState {
|
||||
pub(crate) fn list_timers(&self) -> Vec<ThreadTimer> {
|
||||
let mut timers = self
|
||||
.timers
|
||||
.values()
|
||||
.map(|runtime| runtime.timer.clone())
|
||||
.collect::<Vec<_>>();
|
||||
timers.sort_by(|left, right| {
|
||||
left.created_at
|
||||
.cmp(&right.created_at)
|
||||
.then_with(|| left.id.cmp(&right.id))
|
||||
});
|
||||
timers
|
||||
}
|
||||
|
||||
pub(crate) fn persisted_timers(&self) -> Vec<PersistedTimer> {
|
||||
let mut timers = self
|
||||
.timers
|
||||
.values()
|
||||
.map(|runtime| PersistedTimer {
|
||||
timer: runtime.timer.clone(),
|
||||
pending_run: runtime.pending_run,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
timers.sort_by(|left, right| {
|
||||
left.timer
|
||||
.created_at
|
||||
.cmp(&right.timer.created_at)
|
||||
.then_with(|| left.timer.id.cmp(&right.timer.id))
|
||||
});
|
||||
timers
|
||||
}
|
||||
|
||||
pub(crate) fn persisted_timer(&self, id: &str) -> Option<PersistedTimer> {
|
||||
self.timers.get(id).map(|runtime| PersistedTimer {
|
||||
timer: runtime.timer.clone(),
|
||||
pending_run: runtime.pending_run,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn replace_timers_if_changed(
|
||||
&mut self,
|
||||
persisted: Vec<PersistedTimer>,
|
||||
now: chrono::DateTime<Utc>,
|
||||
) -> (bool, Vec<RestoredTimerTask>) {
|
||||
if self.persisted_timers() == persisted {
|
||||
return (false, Vec::new());
|
||||
}
|
||||
|
||||
for runtime in self.timers.values() {
|
||||
Self::cancel_runtime(runtime);
|
||||
}
|
||||
self.timers.clear();
|
||||
|
||||
let mut restored_tasks = Vec::new();
|
||||
for persisted_timer in persisted {
|
||||
let timer_cancel = CancellationToken::new();
|
||||
let timer_id = persisted_timer.timer.id.clone();
|
||||
match self.restore_timer(persisted_timer, now, Some(timer_cancel.clone())) {
|
||||
Ok(Some(timer_spec)) => {
|
||||
restored_tasks.push(RestoredTimerTask {
|
||||
id: timer_id,
|
||||
timer_spec,
|
||||
timer_cancel,
|
||||
});
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(err) => {
|
||||
tracing::warn!("skipping invalid persisted timer {timer_id}: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
(true, restored_tasks)
|
||||
}
|
||||
|
||||
pub(crate) fn create_timer(
|
||||
&mut self,
|
||||
create_timer: CreateTimer,
|
||||
timer_cancel: Option<CancellationToken>,
|
||||
) -> Result<(ThreadTimer, Option<TimerTaskSpec>), String> {
|
||||
if self.timers.len() >= MAX_ACTIVE_TIMERS_PER_THREAD {
|
||||
return Err(format!(
|
||||
"too many active timers; each thread supports at most {MAX_ACTIVE_TIMERS_PER_THREAD} timers"
|
||||
));
|
||||
}
|
||||
let CreateTimer {
|
||||
id,
|
||||
trigger,
|
||||
payload,
|
||||
delivery,
|
||||
now,
|
||||
} = create_timer;
|
||||
let TriggerTiming {
|
||||
trigger,
|
||||
pending_run,
|
||||
next_run_at,
|
||||
timer_delay,
|
||||
} = timing_for_new_trigger(trigger, now, now)?;
|
||||
let timer = ThreadTimer {
|
||||
id: id.clone(),
|
||||
trigger,
|
||||
content: payload.content,
|
||||
instructions: payload.instructions,
|
||||
meta: payload.meta,
|
||||
delivery,
|
||||
created_at: now.timestamp(),
|
||||
next_run_at,
|
||||
last_run_at: None,
|
||||
};
|
||||
self.timers.insert(
|
||||
id,
|
||||
TimerRuntime {
|
||||
timer: timer.clone(),
|
||||
pending_run,
|
||||
timer_cancel,
|
||||
},
|
||||
);
|
||||
Ok((timer, timer_delay.map(|delay| TimerTaskSpec { delay })))
|
||||
}
|
||||
|
||||
pub(crate) fn restore_timer(
|
||||
&mut self,
|
||||
persisted: PersistedTimer,
|
||||
now: chrono::DateTime<Utc>,
|
||||
timer_cancel: Option<CancellationToken>,
|
||||
) -> Result<Option<TimerTaskSpec>, String> {
|
||||
if self.timers.len() >= MAX_ACTIVE_TIMERS_PER_THREAD {
|
||||
return Err(format!(
|
||||
"too many persisted timers; each thread supports at most {MAX_ACTIVE_TIMERS_PER_THREAD} timers"
|
||||
));
|
||||
}
|
||||
let PersistedTimer {
|
||||
timer,
|
||||
pending_run: persisted_pending_run,
|
||||
} = persisted;
|
||||
let TriggerTiming {
|
||||
trigger,
|
||||
pending_run,
|
||||
next_run_at,
|
||||
timer_delay,
|
||||
} = timing_for_restored_trigger(
|
||||
timer.trigger,
|
||||
timer.created_at,
|
||||
persisted_pending_run,
|
||||
timer.next_run_at,
|
||||
now,
|
||||
)?;
|
||||
let timer = ThreadTimer {
|
||||
trigger,
|
||||
next_run_at,
|
||||
..timer
|
||||
};
|
||||
let id = timer.id.clone();
|
||||
self.timers.insert(
|
||||
id,
|
||||
TimerRuntime {
|
||||
timer,
|
||||
pending_run,
|
||||
timer_cancel,
|
||||
},
|
||||
);
|
||||
Ok(timer_delay.map(|delay| TimerTaskSpec { delay }))
|
||||
}
|
||||
|
||||
pub(crate) fn remove_timer(&mut self, id: &str) -> Option<TimerRuntime> {
|
||||
self.timers.remove(id)
|
||||
}
|
||||
|
||||
pub(crate) fn cancel_runtime(runtime: &TimerRuntime) {
|
||||
if let Some(cancel) = runtime.timer_cancel.as_ref() {
|
||||
cancel.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn mark_timer_due(&mut self, id: &str, now: chrono::DateTime<Utc>) -> bool {
|
||||
let Some(runtime) = self.timers.get_mut(id) else {
|
||||
return false;
|
||||
};
|
||||
let mut changed = !runtime.pending_run;
|
||||
runtime.pending_run = true;
|
||||
match next_run_after_due(&runtime.timer.trigger, runtime.timer.created_at, now) {
|
||||
Ok(next_run_at) if runtime.timer.next_run_at != next_run_at => {
|
||||
runtime.timer.next_run_at = next_run_at;
|
||||
changed = true;
|
||||
}
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
tracing::warn!(
|
||||
"failed to advance timer {} trigger: {err}",
|
||||
runtime.timer.id
|
||||
);
|
||||
}
|
||||
}
|
||||
changed
|
||||
}
|
||||
|
||||
pub(crate) fn timer_spec_for_timer(
|
||||
&self,
|
||||
id: &str,
|
||||
now: chrono::DateTime<Utc>,
|
||||
) -> Option<TimerTaskSpec> {
|
||||
let runtime = self.timers.get(id)?;
|
||||
let next_run_at = runtime.timer.next_run_at?;
|
||||
if runtime.pending_run && !runtime.timer.trigger.is_recurring() {
|
||||
return None;
|
||||
}
|
||||
Some(TimerTaskSpec {
|
||||
delay: if next_run_at <= now.timestamp() {
|
||||
std::time::Duration::ZERO
|
||||
} else {
|
||||
let delay = u64::try_from(next_run_at - now.timestamp()).ok()?;
|
||||
std::time::Duration::from_secs(delay)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn claim_next_timer(
|
||||
&mut self,
|
||||
now: chrono::DateTime<Utc>,
|
||||
can_after_turn: bool,
|
||||
can_steer_current_turn: bool,
|
||||
recurring_timer_policy: RecurringTimerPolicy,
|
||||
) -> Option<ClaimedTimer> {
|
||||
let (next_timer_id, actual_delivery) = self
|
||||
.timers
|
||||
.values()
|
||||
.filter(|runtime| runtime.pending_run)
|
||||
.filter_map(|runtime| {
|
||||
if runtime.timer.trigger.is_recurring()
|
||||
&& recurring_timer_policy == RecurringTimerPolicy::IncludeOnlyNeverRun
|
||||
&& runtime.timer.last_run_at.is_some()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
if runtime.timer.trigger.is_idle_recurring() {
|
||||
if can_after_turn {
|
||||
return Some((runtime, TimerDelivery::AfterTurn));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
let actual_delivery = match runtime.timer.delivery {
|
||||
TimerDelivery::AfterTurn if can_after_turn => TimerDelivery::AfterTurn,
|
||||
TimerDelivery::AfterTurn => return None,
|
||||
TimerDelivery::SteerCurrentTurn if can_steer_current_turn => {
|
||||
TimerDelivery::SteerCurrentTurn
|
||||
}
|
||||
TimerDelivery::SteerCurrentTurn if can_after_turn => TimerDelivery::AfterTurn,
|
||||
TimerDelivery::SteerCurrentTurn => return None,
|
||||
};
|
||||
Some((runtime, actual_delivery))
|
||||
})
|
||||
.min_by(|(left, _), (right, _)| {
|
||||
left.timer
|
||||
.last_run_at
|
||||
.unwrap_or(left.timer.created_at)
|
||||
.cmp(&right.timer.last_run_at.unwrap_or(right.timer.created_at))
|
||||
.then_with(|| left.timer.created_at.cmp(&right.timer.created_at))
|
||||
.then_with(|| left.timer.id.cmp(&right.timer.id))
|
||||
})
|
||||
.map(|(runtime, actual_delivery)| (runtime.timer.id.clone(), actual_delivery))?;
|
||||
|
||||
let runtime = self.timers.remove(&next_timer_id)?;
|
||||
let TimerRuntime {
|
||||
mut timer,
|
||||
pending_run: _,
|
||||
timer_cancel,
|
||||
} = runtime;
|
||||
let is_recurring = timer.trigger.is_recurring();
|
||||
let delete_after_claim =
|
||||
!is_recurring || (!timer.trigger.is_idle_recurring() && timer.next_run_at.is_none());
|
||||
let previous_last_run_at = timer.last_run_at;
|
||||
if delete_after_claim {
|
||||
if let Some(cancel) = timer_cancel.as_ref() {
|
||||
cancel.cancel();
|
||||
}
|
||||
} else {
|
||||
timer.last_run_at = Some(
|
||||
previous_last_run_at
|
||||
.map(|previous| now.timestamp().max(previous.saturating_add(1)))
|
||||
.unwrap_or_else(|| now.timestamp()),
|
||||
);
|
||||
let pending_run = timer.trigger.is_idle_recurring();
|
||||
self.timers.insert(
|
||||
timer.id.clone(),
|
||||
TimerRuntime {
|
||||
timer: timer.clone(),
|
||||
pending_run,
|
||||
timer_cancel,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(ClaimedTimer {
|
||||
timer: timer.clone(),
|
||||
context: TimerInvocationContext {
|
||||
current_timer_id: timer.id,
|
||||
content: timer.content,
|
||||
instructions: timer.instructions,
|
||||
meta: timer.meta,
|
||||
recurring: !delete_after_claim,
|
||||
delivery: actual_delivery,
|
||||
queued_at: now.timestamp(),
|
||||
},
|
||||
deleted_one_shot_timer: delete_after_claim,
|
||||
previous_last_run_at,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_thread_timer_create_params(
|
||||
params: ThreadTimerStorageCreateParams,
|
||||
) -> Result<codex_state::ThreadTimerCreateParams, String> {
|
||||
let id = uuid::Uuid::new_v4().to_string();
|
||||
let now = Utc::now();
|
||||
let TriggerTiming {
|
||||
trigger,
|
||||
pending_run,
|
||||
next_run_at,
|
||||
timer_delay: _,
|
||||
} = timing_for_new_trigger(params.trigger, now, now)?;
|
||||
Ok(codex_state::ThreadTimerCreateParams {
|
||||
id,
|
||||
thread_id: params.thread_id,
|
||||
source: params.source,
|
||||
client_id: params.client_id,
|
||||
trigger_json: serde_json::to_string(&trigger)
|
||||
.map_err(|err| format!("failed to serialize timer trigger: {err}"))?,
|
||||
content: params.payload.content,
|
||||
instructions: params.payload.instructions,
|
||||
meta_json: serde_json::to_string(¶ms.payload.meta)
|
||||
.map_err(|err| format!("failed to serialize timer metadata: {err}"))?,
|
||||
delivery: params.delivery.as_str().to_string(),
|
||||
created_at: now.timestamp(),
|
||||
next_run_at,
|
||||
last_run_at: None,
|
||||
pending_run,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn timer_prompt_input_item(
|
||||
timer: &TimerInvocationContext,
|
||||
) -> codex_protocol::models::ResponseInputItem {
|
||||
timer_injected_message(timer).prompt_input_item()
|
||||
}
|
||||
|
||||
pub(crate) fn timer_injected_message(timer: &TimerInvocationContext) -> InjectedMessage {
|
||||
InjectedMessage::Timer {
|
||||
timer_id: timer.current_timer_id.clone(),
|
||||
content: timer_message_content(timer),
|
||||
instructions: timer_message_instructions(timer),
|
||||
}
|
||||
}
|
||||
|
||||
fn timer_message_content(timer: &TimerInvocationContext) -> String {
|
||||
let preview = timer
|
||||
.content
|
||||
.split_whitespace()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
if preview.is_empty() {
|
||||
return "Timer fired.".to_string();
|
||||
}
|
||||
|
||||
let preview = if preview.chars().count() > TIMER_CONTENT_PREVIEW_MAX_CHARS {
|
||||
let mut truncated = preview
|
||||
.chars()
|
||||
.take(TIMER_CONTENT_PREVIEW_MAX_CHARS.saturating_sub(3))
|
||||
.collect::<String>();
|
||||
truncated.push_str("...");
|
||||
truncated
|
||||
} else {
|
||||
preview
|
||||
};
|
||||
format!("Timer fired: {preview}")
|
||||
}
|
||||
|
||||
fn timer_message_instructions(timer: &TimerInvocationContext) -> Option<String> {
|
||||
let timer_instructions = if timer.recurring {
|
||||
RECURRING_TIMER_INSTRUCTIONS.replace("{{CURRENT_TIMER_ID}}", &timer.current_timer_id)
|
||||
} else {
|
||||
ONE_SHOT_TIMER_INSTRUCTIONS.to_string()
|
||||
};
|
||||
let content = timer.content.trim();
|
||||
let mut parts = Vec::new();
|
||||
if !content.is_empty() {
|
||||
parts.push(content.to_string());
|
||||
}
|
||||
if let Some(instructions) = timer.instructions.as_deref()
|
||||
&& !instructions.trim().is_empty()
|
||||
{
|
||||
parts.push(instructions.trim().to_string());
|
||||
}
|
||||
parts.push(timer_instructions);
|
||||
Some(parts.join("\n\n"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::CreateTimer;
|
||||
use super::MAX_ACTIVE_TIMERS_PER_THREAD;
|
||||
use super::PersistedTimer;
|
||||
use super::RecurringTimerPolicy;
|
||||
use super::ThreadTimer;
|
||||
use super::TimerDelivery;
|
||||
use super::TimerInvocationContext;
|
||||
use super::TimersState;
|
||||
use super::timer_prompt_input_item;
|
||||
use crate::injected_message::MessagePayload;
|
||||
use crate::timer_trigger::TimerTrigger;
|
||||
use chrono::TimeZone;
|
||||
use chrono::Utc;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseInputItem;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
const ZERO_SECONDS: u64 = 0;
|
||||
const TEN_SECONDS: u64 = 10;
|
||||
const SIXTY_SECONDS: u64 = 60;
|
||||
|
||||
fn delay(seconds: u64, repeat: Option<bool>) -> TimerTrigger {
|
||||
TimerTrigger::Delay { seconds, repeat }
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn claim_one_shot_timer_removes_it() {
|
||||
let now = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
let (timer, timer_spec) = timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-1".to_string(),
|
||||
trigger: delay(ZERO_SECONDS, /*repeat*/ None),
|
||||
payload: MessagePayload {
|
||||
content: "run tests".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
assert_eq!(timer_spec, None);
|
||||
assert_eq!(timers.list_timers(), vec![timer]);
|
||||
|
||||
let claimed = timers
|
||||
.claim_next_timer(
|
||||
now,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeAll,
|
||||
)
|
||||
.expect("timer should be claimed");
|
||||
assert_eq!(claimed.context.current_timer_id, "timer-1");
|
||||
assert!(claimed.deleted_one_shot_timer);
|
||||
assert!(timers.list_timers().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exhausted_recurring_schedule_is_removed_after_final_claim() {
|
||||
let now = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
let (timer, timer_spec) = timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-1".to_string(),
|
||||
trigger: TimerTrigger::Schedule {
|
||||
dtstart: None,
|
||||
rrule: Some("FREQ=MINUTELY;COUNT=1".to_string()),
|
||||
},
|
||||
payload: MessagePayload {
|
||||
content: "final scheduled run".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
assert_eq!(timer_spec, None);
|
||||
assert_eq!(
|
||||
timers.persisted_timers(),
|
||||
vec![PersistedTimer {
|
||||
timer: ThreadTimer {
|
||||
next_run_at: None,
|
||||
..timer
|
||||
},
|
||||
pending_run: true,
|
||||
}]
|
||||
);
|
||||
|
||||
let claimed = timers
|
||||
.claim_next_timer(
|
||||
now,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeAll,
|
||||
)
|
||||
.expect("timer should be claimed");
|
||||
assert!(claimed.deleted_one_shot_timer);
|
||||
assert!(!claimed.context.recurring);
|
||||
assert!(timers.list_timers().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn claim_next_timer_prefers_pending_timer_that_ran_least_recently() {
|
||||
let create_first = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let create_second = Utc.timestamp_opt(101, 0).single().expect("valid timestamp");
|
||||
let first_claimed_at = Utc.timestamp_opt(110, 0).single().expect("valid timestamp");
|
||||
let second_claimed_at = Utc.timestamp_opt(111, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-1".to_string(),
|
||||
trigger: delay(TEN_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: "older recurring timer".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now: create_first,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-2".to_string(),
|
||||
trigger: delay(TEN_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: "newer recurring timer".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now: create_second,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
timers.mark_timer_due("timer-1", first_claimed_at);
|
||||
timers.mark_timer_due("timer-2", first_claimed_at);
|
||||
|
||||
let first = timers
|
||||
.claim_next_timer(
|
||||
first_claimed_at,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeAll,
|
||||
)
|
||||
.expect("first timer should be claimed");
|
||||
assert_eq!(first.context.current_timer_id, "timer-1");
|
||||
|
||||
let second = timers
|
||||
.claim_next_timer(
|
||||
second_claimed_at,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeAll,
|
||||
)
|
||||
.expect("second timer should be claimed");
|
||||
assert_eq!(second.context.current_timer_id, "timer-2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idle_recurring_timer_remains_pending_after_claim() {
|
||||
let now = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
let (timer, timer_spec) = timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-1".to_string(),
|
||||
trigger: delay(ZERO_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: "keep going".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
assert_eq!(timer_spec, None);
|
||||
|
||||
let claimed = timers
|
||||
.claim_next_timer(
|
||||
now,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeAll,
|
||||
)
|
||||
.expect("timer should be claimed");
|
||||
assert!(!claimed.deleted_one_shot_timer);
|
||||
assert_eq!(
|
||||
timers.persisted_timers(),
|
||||
vec![PersistedTimer {
|
||||
timer: ThreadTimer {
|
||||
last_run_at: Some(100),
|
||||
..timer
|
||||
},
|
||||
pending_run: true,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idle_recurring_timer_waits_for_idle_even_if_delivery_requests_steer() {
|
||||
let now = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-1".to_string(),
|
||||
trigger: delay(ZERO_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: "keep going".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::SteerCurrentTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
|
||||
assert_eq!(
|
||||
timers.claim_next_timer(
|
||||
now,
|
||||
/*can_after_turn*/ false,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeAll,
|
||||
),
|
||||
None
|
||||
);
|
||||
let claimed = timers
|
||||
.claim_next_timer(
|
||||
now,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ false,
|
||||
RecurringTimerPolicy::IncludeAll,
|
||||
)
|
||||
.expect("timer should be claimed when idle");
|
||||
assert_eq!(claimed.context.delivery, TimerDelivery::AfterTurn);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idle_recurring_policy_can_exclude_timer_that_already_ran() {
|
||||
let now = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-1".to_string(),
|
||||
trigger: delay(ZERO_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: "keep going".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
|
||||
let claimed = timers
|
||||
.claim_next_timer(
|
||||
now,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeOnlyNeverRun,
|
||||
)
|
||||
.expect("never-run idle timer should be claimed");
|
||||
assert_eq!(claimed.context.current_timer_id, "timer-1");
|
||||
|
||||
assert_eq!(
|
||||
timers.claim_next_timer(
|
||||
now,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeOnlyNeverRun,
|
||||
),
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn recurring_policy_can_exclude_timer_that_already_ran() {
|
||||
let now = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let next_due = Utc.timestamp_opt(110, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-1".to_string(),
|
||||
trigger: delay(TEN_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: "run again".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
timers.mark_timer_due("timer-1", next_due);
|
||||
|
||||
let claimed = timers
|
||||
.claim_next_timer(
|
||||
next_due,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeOnlyNeverRun,
|
||||
)
|
||||
.expect("never-run recurring timer should be claimed");
|
||||
assert_eq!(claimed.context.current_timer_id, "timer-1");
|
||||
timers.mark_timer_due("timer-1", next_due);
|
||||
|
||||
assert_eq!(
|
||||
timers.claim_next_timer(
|
||||
next_due,
|
||||
/*can_after_turn*/ true,
|
||||
/*can_steer_current_turn*/ true,
|
||||
RecurringTimerPolicy::IncludeOnlyNeverRun,
|
||||
),
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_timer_rejects_more_than_maximum_active_timers() {
|
||||
let now = Utc.timestamp_opt(100, 0).single().expect("valid timestamp");
|
||||
let mut timers = TimersState::default();
|
||||
for index in 0..MAX_ACTIVE_TIMERS_PER_THREAD {
|
||||
timers
|
||||
.create_timer(
|
||||
CreateTimer {
|
||||
id: format!("timer-{index}"),
|
||||
trigger: delay(SIXTY_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: format!("content-{index}"),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
)
|
||||
.expect("timer should be created");
|
||||
}
|
||||
|
||||
let result = timers.create_timer(
|
||||
CreateTimer {
|
||||
id: "timer-overflow".to_string(),
|
||||
trigger: delay(SIXTY_SECONDS, Some(true)),
|
||||
payload: MessagePayload {
|
||||
content: "overflow".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
},
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
now,
|
||||
},
|
||||
/*timer_cancel*/ None,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
Err(format!(
|
||||
"too many active timers; each thread supports at most {MAX_ACTIVE_TIMERS_PER_THREAD} timers"
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn timer_prompt_input_is_visible_user_input() {
|
||||
let item = timer_prompt_input_item(&TimerInvocationContext {
|
||||
current_timer_id: "timer-1".to_string(),
|
||||
content: "run tests".to_string(),
|
||||
instructions: None,
|
||||
meta: BTreeMap::new(),
|
||||
recurring: true,
|
||||
delivery: TimerDelivery::SteerCurrentTurn,
|
||||
queued_at: 100,
|
||||
});
|
||||
assert_eq!(
|
||||
item,
|
||||
ResponseInputItem::Message {
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "<timer_message>\n<timer_id>timer-1</timer_id>\n<content>\nTimer fired: run tests\n</content>\n<instructions>\nrun tests\n\nThis timer should keep running on its schedule after this invocation.\nDo not call delete_timer just because you completed this invocation.\nCall delete_timer with {"id":"timer-1"} only if the user's timer message included an explicit stop condition, such as "until", "stop when", or "while", and that condition is now satisfied.\nDo not expose scheduler internals unless they matter to the user.\n</instructions>\n</timer_message>".to_string(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn one_shot_timer_prompt_input_omits_delete_instruction() {
|
||||
let item = timer_prompt_input_item(&TimerInvocationContext {
|
||||
current_timer_id: "timer-1".to_string(),
|
||||
content: "run tests once".to_string(),
|
||||
instructions: Some("user-specific instruction".to_string()),
|
||||
meta: BTreeMap::from([("ticket".to_string(), "ABC_123".to_string())]),
|
||||
recurring: false,
|
||||
delivery: TimerDelivery::AfterTurn,
|
||||
queued_at: 101,
|
||||
});
|
||||
assert_eq!(
|
||||
item,
|
||||
ResponseInputItem::Message {
|
||||
role: "user".to_string(),
|
||||
content: vec![ContentItem::InputText {
|
||||
text: "<timer_message>\n<timer_id>timer-1</timer_id>\n<content>\nTimer fired: run tests once\n</content>\n<instructions>\nrun tests once\n\nuser-specific instruction\n\nThis one-shot timer has already been removed from the schedule, so you do not need to call delete_timer.\nDo not expose scheduler internals unless they matter to the user.\n</instructions>\n</timer_message>".to_string(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@ mod request_permissions;
|
||||
mod request_user_input;
|
||||
mod shell;
|
||||
mod test_sync;
|
||||
mod timers;
|
||||
mod tool_search;
|
||||
mod tool_suggest;
|
||||
pub(crate) mod unified_exec;
|
||||
@@ -47,6 +48,9 @@ pub use request_user_input::RequestUserInputHandler;
|
||||
pub use shell::ShellCommandHandler;
|
||||
pub use shell::ShellHandler;
|
||||
pub use test_sync::TestSyncHandler;
|
||||
pub use timers::CreateTimerHandler;
|
||||
pub use timers::DeleteTimerHandler;
|
||||
pub use timers::ListTimersHandler;
|
||||
pub use tool_search::ToolSearchHandler;
|
||||
pub use tool_suggest::ToolSuggestHandler;
|
||||
pub use unified_exec::UnifiedExecHandler;
|
||||
|
||||
123
codex-rs/core/src/tools/handlers/timers.rs
Normal file
123
codex-rs/core/src/tools/handlers/timers.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
//! Built-in tool handlers for thread-local persistent timer management.
|
||||
//!
|
||||
//! These handlers bridge timer tool calls onto the current thread session's
|
||||
//! timer registry.
|
||||
|
||||
use serde::Deserialize;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::function_tool::FunctionCallError;
|
||||
use crate::injected_message::MessagePayload;
|
||||
use crate::timers::ThreadTimerTrigger;
|
||||
use crate::timers::TimerDelivery;
|
||||
use crate::tools::context::FunctionToolOutput;
|
||||
use crate::tools::context::ToolInvocation;
|
||||
use crate::tools::context::ToolPayload;
|
||||
use crate::tools::handlers::parse_arguments;
|
||||
use crate::tools::registry::ToolHandler;
|
||||
use crate::tools::registry::ToolKind;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CreateTimerArgs {
|
||||
trigger: ThreadTimerTrigger,
|
||||
content: Option<String>,
|
||||
#[serde(default)]
|
||||
meta: BTreeMap<String, String>,
|
||||
delivery: TimerDelivery,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DeleteTimerArgs {
|
||||
id: String,
|
||||
}
|
||||
|
||||
pub struct CreateTimerHandler;
|
||||
|
||||
impl ToolHandler for CreateTimerHandler {
|
||||
type Output = FunctionToolOutput;
|
||||
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
|
||||
let ToolPayload::Function { arguments } = invocation.payload else {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"create_timer received unsupported payload".to_string(),
|
||||
));
|
||||
};
|
||||
let args: CreateTimerArgs = parse_arguments(&arguments)?;
|
||||
let content = args.content.ok_or_else(|| {
|
||||
FunctionCallError::RespondToModel("create_timer requires `content`".to_string())
|
||||
})?;
|
||||
let timer = invocation
|
||||
.session
|
||||
.create_timer(
|
||||
args.trigger,
|
||||
MessagePayload {
|
||||
content,
|
||||
instructions: None,
|
||||
meta: args.meta,
|
||||
},
|
||||
args.delivery,
|
||||
)
|
||||
.await
|
||||
.map_err(FunctionCallError::RespondToModel)?;
|
||||
let content = serde_json::to_string(&timer).map_err(|err| {
|
||||
FunctionCallError::Fatal(format!("failed to serialize create_timer response: {err}"))
|
||||
})?;
|
||||
Ok(FunctionToolOutput::from_text(content, Some(true)))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DeleteTimerHandler;
|
||||
|
||||
impl ToolHandler for DeleteTimerHandler {
|
||||
type Output = FunctionToolOutput;
|
||||
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
|
||||
let ToolPayload::Function { arguments } = invocation.payload else {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"delete_timer received unsupported payload".to_string(),
|
||||
));
|
||||
};
|
||||
let args: DeleteTimerArgs = parse_arguments(&arguments)?;
|
||||
let deleted = invocation
|
||||
.session
|
||||
.delete_timer(&args.id)
|
||||
.await
|
||||
.map_err(FunctionCallError::RespondToModel)?;
|
||||
let content = serde_json::json!({ "deleted": deleted }).to_string();
|
||||
Ok(FunctionToolOutput::from_text(content, Some(deleted)))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ListTimersHandler;
|
||||
|
||||
impl ToolHandler for ListTimersHandler {
|
||||
type Output = FunctionToolOutput;
|
||||
|
||||
fn kind(&self) -> ToolKind {
|
||||
ToolKind::Function
|
||||
}
|
||||
|
||||
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
|
||||
match invocation.payload {
|
||||
ToolPayload::Function { .. } => {}
|
||||
_ => {
|
||||
return Err(FunctionCallError::RespondToModel(
|
||||
"list_timers received unsupported payload".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
let timers = invocation.session.list_timers().await;
|
||||
let content = serde_json::to_string(&timers).map_err(|err| {
|
||||
FunctionCallError::Fatal(format!("failed to serialize list_timers response: {err}"))
|
||||
})?;
|
||||
Ok(FunctionToolOutput::from_text(content, Some(true)))
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
use crate::shell::Shell;
|
||||
use crate::shell::ShellType;
|
||||
use crate::tools::handlers::CreateTimerHandler;
|
||||
use crate::tools::handlers::DeleteTimerHandler;
|
||||
use crate::tools::handlers::ListTimersHandler;
|
||||
use crate::tools::handlers::agent_jobs::BatchJobHandler;
|
||||
use crate::tools::handlers::multi_agents_common::DEFAULT_WAIT_TIMEOUT_MS;
|
||||
use crate::tools::handlers::multi_agents_common::MAX_WAIT_TIMEOUT_MS;
|
||||
@@ -161,6 +164,15 @@ pub(crate) fn build_specs_with_discoverable_tools(
|
||||
|
||||
for handler in plan.handlers {
|
||||
match handler.kind {
|
||||
ToolHandlerKind::CreateTimer => {
|
||||
builder.register_handler(handler.name, Arc::new(CreateTimerHandler));
|
||||
}
|
||||
ToolHandlerKind::DeleteTimer => {
|
||||
builder.register_handler(handler.name, Arc::new(DeleteTimerHandler));
|
||||
}
|
||||
ToolHandlerKind::ListTimers => {
|
||||
builder.register_handler(handler.name, Arc::new(ListTimersHandler));
|
||||
}
|
||||
ToolHandlerKind::AgentJobs => {
|
||||
builder.register_handler(handler.name, Arc::new(BatchJobHandler));
|
||||
}
|
||||
|
||||
13
codex-rs/core/templates/timers/one_shot_prompt.md
Normal file
13
codex-rs/core/templates/timers/one_shot_prompt.md
Normal file
@@ -0,0 +1,13 @@
|
||||
<timer_fired>
|
||||
<id>{{CURRENT_TIMER_ID}}</id>
|
||||
<trigger>{{TRIGGER}}</trigger>
|
||||
<delivery>{{DELIVERY}}</delivery>
|
||||
<recurring>false</recurring>
|
||||
<prompt>
|
||||
{{PROMPT}}
|
||||
</prompt>
|
||||
<instructions>
|
||||
This one-shot timer has already been removed from the schedule, so you do not need to call delete_timer.
|
||||
Do not expose scheduler internals unless they matter to the user.
|
||||
</instructions>
|
||||
</timer_fired>
|
||||
15
codex-rs/core/templates/timers/recurring_prompt.md
Normal file
15
codex-rs/core/templates/timers/recurring_prompt.md
Normal file
@@ -0,0 +1,15 @@
|
||||
<timer_fired>
|
||||
<id>{{CURRENT_TIMER_ID}}</id>
|
||||
<trigger>{{TRIGGER}}</trigger>
|
||||
<delivery>{{DELIVERY}}</delivery>
|
||||
<recurring>true</recurring>
|
||||
<prompt>
|
||||
{{PROMPT}}
|
||||
</prompt>
|
||||
<instructions>
|
||||
This timer should keep running on its schedule after this invocation.
|
||||
Do not call delete_timer just because you completed this invocation.
|
||||
Call delete_timer with {"id":"{{CURRENT_TIMER_ID}}"} only if the user's timer prompt included an explicit stop condition, such as "until", "stop when", or "while", and that condition is now satisfied.
|
||||
Do not expose scheduler internals unless they matter to the user.
|
||||
</instructions>
|
||||
</timer_fired>
|
||||
@@ -143,6 +143,7 @@ mod sqlite_state;
|
||||
mod stream_error_allows_next_turn;
|
||||
mod stream_no_completed;
|
||||
mod subagent_notifications;
|
||||
mod timers;
|
||||
mod tool_harness;
|
||||
mod tool_parallelism;
|
||||
mod tool_suggest;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use codex_core::CodexThread;
|
||||
use codex_protocol::AgentPath;
|
||||
@@ -31,6 +32,7 @@ use serde_json::Value;
|
||||
use serde_json::from_slice;
|
||||
use serde_json::json;
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::time::timeout;
|
||||
|
||||
fn ev_message_item_done(id: &str, text: &str) -> Value {
|
||||
serde_json::json!({
|
||||
@@ -159,6 +161,22 @@ async fn submit_queue_only_agent_mail(codex: &CodexThread, text: &str) {
|
||||
.unwrap_or_else(|err| panic!("submit queue-only agent mail: {err}"));
|
||||
}
|
||||
|
||||
async fn wait_for_pending_input(codex: &CodexThread) {
|
||||
if timeout(Duration::from_secs(5), async {
|
||||
loop {
|
||||
if codex.has_pending_input().await {
|
||||
return;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(10)).await;
|
||||
}
|
||||
})
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
panic!("mailbox message should become pending input");
|
||||
}
|
||||
}
|
||||
|
||||
async fn wait_for_reasoning_item_started(codex: &CodexThread) {
|
||||
wait_for_event(codex, |event| {
|
||||
matches!(
|
||||
@@ -346,6 +364,7 @@ async fn queued_inter_agent_mail_triggers_follow_up_after_reasoning_item() {
|
||||
wait_for_reasoning_item_started(&codex).await;
|
||||
|
||||
submit_queue_only_agent_mail(&codex, "queued child update").await;
|
||||
wait_for_pending_input(&codex).await;
|
||||
|
||||
let _ = gate_reasoning_done_tx.send(());
|
||||
|
||||
@@ -408,6 +427,7 @@ async fn queued_inter_agent_mail_triggers_follow_up_after_commentary_message_ite
|
||||
.await;
|
||||
|
||||
submit_queue_only_agent_mail(&codex, "queued child update").await;
|
||||
wait_for_pending_input(&codex).await;
|
||||
|
||||
let _ = gate_message_done_tx.send(());
|
||||
|
||||
|
||||
629
codex-rs/core/tests/suite/timers.rs
Normal file
629
codex-rs/core/tests/suite/timers.rs
Normal file
@@ -0,0 +1,629 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use chrono::Utc;
|
||||
use codex_core::injected_message::MessagePayload;
|
||||
use codex_core::timers::TIMER_FIRED_BACKGROUND_EVENT_PREFIX;
|
||||
use codex_core::timers::ThreadTimer;
|
||||
use codex_core::timers::ThreadTimerTrigger;
|
||||
use codex_core::timers::TimerDelivery;
|
||||
use codex_features::Feature;
|
||||
use codex_protocol::protocol::EventMsg;
|
||||
use codex_protocol::protocol::Op;
|
||||
use core_test_support::responses::ResponseMock;
|
||||
use core_test_support::responses::ResponsesRequest;
|
||||
use core_test_support::responses::ev_assistant_message;
|
||||
use core_test_support::responses::ev_completed;
|
||||
use core_test_support::responses::ev_response_created;
|
||||
use core_test_support::responses::mount_sse_once;
|
||||
use core_test_support::responses::mount_sse_sequence;
|
||||
use core_test_support::responses::sse;
|
||||
use core_test_support::responses::start_mock_server;
|
||||
use core_test_support::test_codex::test_codex;
|
||||
use core_test_support::wait_for_event;
|
||||
use core_test_support::wait_for_event_with_timeout;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::time::Duration;
|
||||
|
||||
const TIMER_INTEGRATION_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn create_timer_emits_fired_background_event_when_timer_starts() -> Result<()> {
|
||||
assert_after_turn_timer_starts_and_emits_fired_event().await
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "current_thread")]
|
||||
async fn create_timer_starts_on_current_thread_runtime() -> Result<()> {
|
||||
assert_after_turn_timer_starts_and_emits_fired_event().await
|
||||
}
|
||||
|
||||
async fn assert_after_turn_timer_starts_and_emits_fired_event() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_assistant_message("msg-1", "timer ran"),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let created = test
|
||||
.codex
|
||||
.create_timer(
|
||||
ThreadTimerTrigger::Delay {
|
||||
seconds: 0,
|
||||
repeat: None,
|
||||
},
|
||||
MessagePayload {
|
||||
content: "run timer".to_string(),
|
||||
instructions: None,
|
||||
meta: Default::default(),
|
||||
},
|
||||
TimerDelivery::AfterTurn,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
let payload = wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| match event {
|
||||
EventMsg::BackgroundEvent(event) => event
|
||||
.message
|
||||
.strip_prefix(TIMER_FIRED_BACKGROUND_EVENT_PREFIX)
|
||||
.is_some(),
|
||||
_ => false,
|
||||
},
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
let EventMsg::BackgroundEvent(event) = payload else {
|
||||
unreachable!("event predicate only matches timer fired background events");
|
||||
};
|
||||
let payload = event
|
||||
.message
|
||||
.strip_prefix(TIMER_FIRED_BACKGROUND_EVENT_PREFIX)
|
||||
.ok_or_else(|| anyhow!("timer fired event prefix missing"))?;
|
||||
let fired: ThreadTimer = serde_json::from_str(payload)?;
|
||||
assert_eq!(fired, created);
|
||||
|
||||
wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| matches!(event, EventMsg::TurnComplete(_)),
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
|
||||
let user_messages = mock.single_request().message_input_texts("user");
|
||||
let timer_messages = user_messages
|
||||
.iter()
|
||||
.filter(|message| message.contains("<timer_message>"))
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(timer_messages.len(), 1);
|
||||
let timer_message = timer_messages[0];
|
||||
assert!(timer_message.contains(&format!("<timer_id>{}</timer_id>", created.id)));
|
||||
assert!(timer_message.contains("<content>\nTimer fired: run timer\n</content>"));
|
||||
assert!(timer_message.contains("<instructions>\nrun timer\n\nThis one-shot timer"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn create_timer_persists_source_and_client_metadata() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let created = test
|
||||
.codex
|
||||
.create_timer(
|
||||
ThreadTimerTrigger::Delay {
|
||||
seconds: 60,
|
||||
repeat: Some(true),
|
||||
},
|
||||
MessagePayload {
|
||||
content: "run timer".to_string(),
|
||||
instructions: None,
|
||||
meta: Default::default(),
|
||||
},
|
||||
TimerDelivery::AfterTurn,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
let timers = db
|
||||
.list_thread_timers(&test.session_configured.session_id.to_string())
|
||||
.await?;
|
||||
|
||||
assert_eq!(timers.len(), 1);
|
||||
assert_eq!(timers[0].id, created.id);
|
||||
assert_eq!(timers[0].source, "agent");
|
||||
assert_eq!(timers[0].client_id, "codex-cli");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn create_timer_rejects_ephemeral_thread() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config.ephemeral = true;
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
|
||||
let err = test
|
||||
.codex
|
||||
.create_timer(
|
||||
ThreadTimerTrigger::Delay {
|
||||
seconds: 60,
|
||||
repeat: Some(true),
|
||||
},
|
||||
MessagePayload {
|
||||
content: "run timer".to_string(),
|
||||
instructions: None,
|
||||
meta: Default::default(),
|
||||
},
|
||||
TimerDelivery::AfterTurn,
|
||||
)
|
||||
.await
|
||||
.expect_err("ephemeral sessions should not create durable timers");
|
||||
|
||||
assert!(err.contains("timer storage is unavailable for ephemeral sessions"));
|
||||
assert!(test.codex.state_db().is_none());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn resume_due_timer_runs_after_history_reconstruction() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mock = mount_sse_sequence(
|
||||
&server,
|
||||
vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_assistant_message("msg-1", "recorded before resume"),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_response_created("resp-2"),
|
||||
ev_assistant_message("msg-2", "timer after resume"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let initial = builder.build(&server).await?;
|
||||
initial.submit_turn("context before resume").await?;
|
||||
|
||||
let db = initial.codex.state_db().expect("state db enabled");
|
||||
let home = initial.home.clone();
|
||||
let rollout_path = initial
|
||||
.codex
|
||||
.rollout_path()
|
||||
.expect("rollout path should be materialized");
|
||||
let thread_id = initial.session_configured.session_id.to_string();
|
||||
|
||||
initial.codex.submit(Op::Shutdown).await?;
|
||||
wait_for_event(&initial.codex, |event| {
|
||||
matches!(event, EventMsg::ShutdownComplete)
|
||||
})
|
||||
.await;
|
||||
|
||||
let now = Utc::now().timestamp();
|
||||
db.create_thread_timer(&codex_state::ThreadTimerCreateParams {
|
||||
id: "resume-due-timer".to_string(),
|
||||
thread_id,
|
||||
source: "external".to_string(),
|
||||
client_id: "codex-test".to_string(),
|
||||
trigger_json: r#"{"kind":"delay","seconds":0,"repeat":false}"#.to_string(),
|
||||
content: "resume timer".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: TimerDelivery::AfterTurn.as_str().to_string(),
|
||||
created_at: now - 10,
|
||||
next_run_at: Some(now - 1),
|
||||
last_run_at: None,
|
||||
pending_run: false,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut resume_builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let _resumed = resume_builder.resume(&server, home, rollout_path).await?;
|
||||
|
||||
let resumed_request = wait_for_request_containing(&mock, "Timer fired: resume timer").await;
|
||||
assert!(resumed_request.body_contains_text("context before resume"));
|
||||
assert!(resumed_request.body_contains_text("recorded before resume"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn wait_for_request_containing(mock: &ResponseMock, text: &str) -> ResponsesRequest {
|
||||
tokio::time::timeout(TIMER_INTEGRATION_TIMEOUT, async {
|
||||
loop {
|
||||
if let Some(request) = mock
|
||||
.requests()
|
||||
.into_iter()
|
||||
.find(|request| request.body_contains_text(text))
|
||||
{
|
||||
return request;
|
||||
}
|
||||
tokio::time::sleep(Duration::from_millis(50)).await;
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap_or_else(|_| panic!("timed out waiting for request containing {text:?}"))
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn list_timers_discovers_externally_inserted_timer() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
let created_at = Utc::now().timestamp();
|
||||
|
||||
db.create_thread_timer(&codex_state::ThreadTimerCreateParams {
|
||||
id: "external-timer".to_string(),
|
||||
thread_id: test.session_configured.session_id.to_string(),
|
||||
source: "client".to_string(),
|
||||
client_id: "external-client".to_string(),
|
||||
trigger_json: r#"{"kind":"delay","seconds":60,"repeat":true}"#.to_string(),
|
||||
content: "external timer".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: "after-turn".to_string(),
|
||||
created_at,
|
||||
next_run_at: Some(created_at + 60),
|
||||
last_run_at: None,
|
||||
pending_run: false,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let timers = test.codex.list_timers().await;
|
||||
|
||||
assert_eq!(timers.len(), 1);
|
||||
assert_eq!(timers[0].id, "external-timer");
|
||||
assert_eq!(
|
||||
timers[0].trigger,
|
||||
ThreadTimerTrigger::Delay {
|
||||
seconds: 60,
|
||||
repeat: Some(true),
|
||||
}
|
||||
);
|
||||
assert_eq!(timers[0].content, "external timer");
|
||||
assert_eq!(timers[0].delivery, TimerDelivery::AfterTurn);
|
||||
assert_eq!(timers[0].created_at, created_at);
|
||||
assert_eq!(timers[0].last_run_at, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn queued_messages_feature_consumes_messages_without_timers() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_assistant_message("msg-1", "queued turn"),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::QueuedMessages)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
let thread_id = test.session_configured.session_id.to_string();
|
||||
db.create_external_message(&codex_state::ExternalMessageCreateParams::new(
|
||||
thread_id,
|
||||
"external".to_string(),
|
||||
"queued hello".to_string(),
|
||||
/*instructions*/ None,
|
||||
"{}".to_string(),
|
||||
TimerDelivery::AfterTurn.as_str().to_string(),
|
||||
Utc::now().timestamp(),
|
||||
))
|
||||
.await?;
|
||||
|
||||
wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| match event {
|
||||
EventMsg::InjectedMessage(event) => {
|
||||
event.source == "external" && event.content == "queued hello"
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| matches!(event, EventMsg::TurnComplete(_)),
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
|
||||
let requests = mock.requests();
|
||||
assert_eq!(requests.len(), 1);
|
||||
assert!(requests.iter().any(|request| {
|
||||
request
|
||||
.message_input_texts("user")
|
||||
.iter()
|
||||
.any(|message| message.contains("<content>\nqueued hello\n</content>"))
|
||||
}));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn queued_message_runs_after_idle_recurring_timer() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mock = mount_sse_sequence(
|
||||
&server,
|
||||
vec![
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_assistant_message("msg-1", "timer turn"),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
sse(vec![
|
||||
ev_response_created("resp-2"),
|
||||
ev_assistant_message("msg-2", "queued turn"),
|
||||
ev_completed("resp-2"),
|
||||
]),
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Timers)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::QueuedMessages)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
let timer = test
|
||||
.codex
|
||||
.create_timer(
|
||||
ThreadTimerTrigger::Delay {
|
||||
seconds: 0,
|
||||
repeat: Some(true),
|
||||
},
|
||||
MessagePayload {
|
||||
content: "keep going".to_string(),
|
||||
instructions: None,
|
||||
meta: Default::default(),
|
||||
},
|
||||
TimerDelivery::AfterTurn,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| match event {
|
||||
EventMsg::InjectedMessage(event) => event.source == format!("timer {}", timer.id),
|
||||
_ => false,
|
||||
},
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
let thread_id = test.session_configured.session_id.to_string();
|
||||
db.create_external_message(&codex_state::ExternalMessageCreateParams::new(
|
||||
thread_id,
|
||||
"external".to_string(),
|
||||
"queued hello".to_string(),
|
||||
/*instructions*/ None,
|
||||
"{}".to_string(),
|
||||
TimerDelivery::AfterTurn.as_str().to_string(),
|
||||
Utc::now().timestamp(),
|
||||
))
|
||||
.await?;
|
||||
assert!(
|
||||
test.codex
|
||||
.delete_timer(&timer.id)
|
||||
.await
|
||||
.map_err(|err| anyhow!("{err}"))?,
|
||||
"test should delete the idle recurring timer before it can schedule another turn"
|
||||
);
|
||||
|
||||
wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| matches!(event, EventMsg::TurnComplete(_)),
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| match event {
|
||||
EventMsg::InjectedMessage(event) => event.source == "external",
|
||||
_ => false,
|
||||
},
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
wait_for_event_with_timeout(
|
||||
&test.codex,
|
||||
|event| matches!(event, EventMsg::TurnComplete(_)),
|
||||
TIMER_INTEGRATION_TIMEOUT,
|
||||
)
|
||||
.await;
|
||||
|
||||
let requests = mock.requests();
|
||||
assert!(
|
||||
requests.len() >= 2,
|
||||
"expected timer and queued-message turns to run"
|
||||
);
|
||||
assert!(
|
||||
requests[0]
|
||||
.message_input_texts("user")
|
||||
.iter()
|
||||
.any(|message| message.contains("<content>\nTimer fired: keep going\n</content>"))
|
||||
);
|
||||
assert!(
|
||||
requests[1]
|
||||
.message_input_texts("user")
|
||||
.iter()
|
||||
.any(|message| message.contains("<content>\nqueued hello\n</content>"))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
target_os = "windows",
|
||||
ignore = "timer/message integration tests currently exceed the Windows Bazel job timeout"
|
||||
)]
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn queued_messages_feature_disabled_leaves_messages_queued() -> Result<()> {
|
||||
let server = start_mock_server().await;
|
||||
let mock = mount_sse_once(
|
||||
&server,
|
||||
sse(vec![
|
||||
ev_response_created("resp-1"),
|
||||
ev_assistant_message("msg-1", "first turn"),
|
||||
ev_completed("resp-1"),
|
||||
]),
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut builder = test_codex().with_config(|config| {
|
||||
config
|
||||
.features
|
||||
.enable(Feature::Sqlite)
|
||||
.unwrap_or_else(|err| panic!("test config should allow feature update: {err}"));
|
||||
});
|
||||
let test = builder.build(&server).await?;
|
||||
let db = test.codex.state_db().expect("state db enabled");
|
||||
let thread_id = test.session_configured.session_id.to_string();
|
||||
db.create_external_message(&codex_state::ExternalMessageCreateParams::new(
|
||||
thread_id.clone(),
|
||||
"external".to_string(),
|
||||
"queued hello".to_string(),
|
||||
/*instructions*/ None,
|
||||
"{}".to_string(),
|
||||
TimerDelivery::AfterTurn.as_str().to_string(),
|
||||
Utc::now().timestamp(),
|
||||
))
|
||||
.await?;
|
||||
|
||||
test.submit_turn("start").await?;
|
||||
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||
|
||||
assert_eq!(mock.requests().len(), 1);
|
||||
assert!(
|
||||
db.claim_next_external_message(
|
||||
&thread_id, /*can_after_turn*/ true, /*can_steer_current_turn*/ true,
|
||||
)
|
||||
.await?
|
||||
.is_some()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -172,6 +172,10 @@ pub enum Feature {
|
||||
Personality,
|
||||
/// Enable native artifact tools.
|
||||
Artifact,
|
||||
/// Enable per-thread persistent timer scheduling tools and APIs.
|
||||
Timers,
|
||||
/// Enable queued message creation and delivery.
|
||||
QueuedMessages,
|
||||
/// Enable Fast mode selection in the TUI and request layer.
|
||||
FastMode,
|
||||
/// Enable experimental realtime voice conversation mode in the TUI.
|
||||
@@ -857,6 +861,18 @@ pub const FEATURES: &[FeatureSpec] = &[
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::Timers,
|
||||
key: "timers",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::QueuedMessages,
|
||||
key: "queued_messages",
|
||||
stage: Stage::UnderDevelopment,
|
||||
default_enabled: false,
|
||||
},
|
||||
FeatureSpec {
|
||||
id: Feature::FastMode,
|
||||
key: "fast_mode",
|
||||
|
||||
@@ -151,6 +151,24 @@ fn image_generation_is_under_development() {
|
||||
assert_eq!(Feature::ImageGeneration.default_enabled(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn timers_are_under_development() {
|
||||
assert_eq!(feature_for_key("timers"), Some(Feature::Timers));
|
||||
assert_eq!(feature_for_key("timer_tool"), None);
|
||||
assert_eq!(Feature::Timers.stage(), Stage::UnderDevelopment);
|
||||
assert_eq!(Feature::Timers.default_enabled(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn queued_messages_are_under_development() {
|
||||
assert_eq!(
|
||||
feature_for_key("queued_messages"),
|
||||
Some(Feature::QueuedMessages)
|
||||
);
|
||||
assert_eq!(Feature::QueuedMessages.stage(), Stage::UnderDevelopment);
|
||||
assert_eq!(Feature::QueuedMessages.default_enabled(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_call_mcp_elicitation_is_stable_and_enabled_by_default() {
|
||||
assert_eq!(Feature::ToolCallMcpElicitation.stage(), Stage::Stable);
|
||||
|
||||
@@ -355,6 +355,7 @@ async fn run_codex_tool_session_inner(
|
||||
| EventMsg::PlanUpdate(_)
|
||||
| EventMsg::TurnAborted(_)
|
||||
| EventMsg::UserMessage(_)
|
||||
| EventMsg::InjectedMessage(_)
|
||||
| EventMsg::ShutdownComplete
|
||||
| EventMsg::ViewImageToolCall(_)
|
||||
| EventMsg::ImageGenerationBegin(_)
|
||||
|
||||
@@ -1412,6 +1412,9 @@ pub enum EventMsg {
|
||||
/// User/system input message (what was sent to the model)
|
||||
UserMessage(UserMessageEvent),
|
||||
|
||||
/// Transcript-safe display text for a timer-fired or external queued message.
|
||||
InjectedMessage(InjectedMessageEvent),
|
||||
|
||||
/// Agent text output delta message
|
||||
AgentMessageDelta(AgentMessageDeltaEvent),
|
||||
|
||||
@@ -2283,6 +2286,19 @@ pub struct UserMessageEvent {
|
||||
pub text_elements: Vec<crate::user_input::TextElement>,
|
||||
}
|
||||
|
||||
/// Display-only event for a timer-fired or external queued message.
|
||||
///
|
||||
/// The corresponding model-visible XML is recorded separately as a raw response
|
||||
/// item. Clients should render this event's content instead of parsing or
|
||||
/// hiding that XML themselves.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS, PartialEq, Eq)]
|
||||
pub struct InjectedMessageEvent {
|
||||
/// Human-facing message text to show in transcript views.
|
||||
pub content: String,
|
||||
/// Origin label for the injected input, such as "external" or "timer <id>".
|
||||
pub source: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema, TS)]
|
||||
pub struct AgentMessageDeltaEvent {
|
||||
pub delta: String,
|
||||
|
||||
@@ -56,6 +56,7 @@ pub use recorder::RolloutRecorder;
|
||||
pub use recorder::RolloutRecorderParams;
|
||||
pub use recorder::append_rollout_item_to_path;
|
||||
pub use session_index::append_thread_name;
|
||||
pub use session_index::find_thread_ids_by_name;
|
||||
pub use session_index::find_thread_meta_by_name_str;
|
||||
pub use session_index::find_thread_name_by_id;
|
||||
pub use session_index::find_thread_names_by_ids;
|
||||
|
||||
@@ -93,6 +93,7 @@ fn should_persist_event_msg_extended(ev: &EventMsg) -> bool {
|
||||
fn event_msg_persistence_mode(ev: &EventMsg) -> Option<EventPersistenceMode> {
|
||||
match ev {
|
||||
EventMsg::UserMessage(_)
|
||||
| EventMsg::InjectedMessage(_)
|
||||
| EventMsg::AgentMessage(_)
|
||||
| EventMsg::AgentReasoning(_)
|
||||
| EventMsg::AgentReasoningRawContent(_)
|
||||
|
||||
@@ -112,6 +112,48 @@ pub async fn find_thread_names_by_ids(
|
||||
Ok(names)
|
||||
}
|
||||
|
||||
/// Find all thread ids whose latest indexed thread name exactly matches `name`.
|
||||
pub async fn find_thread_ids_by_name(
|
||||
codex_home: &Path,
|
||||
name: &str,
|
||||
) -> std::io::Result<Vec<ThreadId>> {
|
||||
let name = name.trim();
|
||||
if name.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let path = session_index_path(codex_home);
|
||||
if !path.exists() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let file = tokio::fs::File::open(&path).await?;
|
||||
let reader = tokio::io::BufReader::new(file);
|
||||
let mut lines = reader.lines();
|
||||
let mut latest_names = HashMap::<ThreadId, String>::new();
|
||||
|
||||
while let Some(line) = lines.next_line().await? {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let Ok(entry) = serde_json::from_str::<SessionIndexEntry>(trimmed) else {
|
||||
continue;
|
||||
};
|
||||
let thread_name = entry.thread_name.trim();
|
||||
if thread_name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
latest_names.insert(entry.id, thread_name.to_string());
|
||||
}
|
||||
|
||||
let mut ids = latest_names
|
||||
.into_iter()
|
||||
.filter_map(|(thread_id, thread_name)| (thread_name == name).then_some(thread_id))
|
||||
.collect::<Vec<_>>();
|
||||
ids.sort_by_key(ToString::to_string);
|
||||
Ok(ids)
|
||||
}
|
||||
|
||||
/// Locate a recorded thread rollout and read its session metadata by thread name.
|
||||
/// Returns the newest indexed name that still has a readable rollout header.
|
||||
pub async fn find_thread_meta_by_name_str(
|
||||
|
||||
@@ -265,6 +265,47 @@ async fn find_thread_names_by_ids_prefers_latest_entry() -> std::io::Result<()>
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn find_thread_ids_by_name_uses_latest_name_per_thread() -> std::io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
let path = session_index_path(temp.path());
|
||||
let id1 = ThreadId::new();
|
||||
let id2 = ThreadId::new();
|
||||
let id3 = ThreadId::new();
|
||||
let lines = vec![
|
||||
SessionIndexEntry {
|
||||
id: id1,
|
||||
thread_name: "target".to_string(),
|
||||
updated_at: "2024-01-01T00:00:00Z".to_string(),
|
||||
},
|
||||
SessionIndexEntry {
|
||||
id: id2,
|
||||
thread_name: "target".to_string(),
|
||||
updated_at: "2024-01-01T00:00:00Z".to_string(),
|
||||
},
|
||||
SessionIndexEntry {
|
||||
id: id1,
|
||||
thread_name: "renamed".to_string(),
|
||||
updated_at: "2024-01-02T00:00:00Z".to_string(),
|
||||
},
|
||||
SessionIndexEntry {
|
||||
id: id3,
|
||||
thread_name: "target".to_string(),
|
||||
updated_at: "2024-01-03T00:00:00Z".to_string(),
|
||||
},
|
||||
];
|
||||
write_index(&path, &lines)?;
|
||||
|
||||
let found = find_thread_ids_by_name(temp.path(), "target").await?;
|
||||
let expected = {
|
||||
let mut ids = vec![id2, id3];
|
||||
ids.sort_by_key(ToString::to_string);
|
||||
ids
|
||||
};
|
||||
assert_eq!(found, expected);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scan_index_finds_latest_match_among_mixed_entries() -> std::io::Result<()> {
|
||||
let temp = TempDir::new()?;
|
||||
|
||||
22
codex-rs/state/migrations/0025_thread_timers.sql
Normal file
22
codex-rs/state/migrations/0025_thread_timers.sql
Normal file
@@ -0,0 +1,22 @@
|
||||
CREATE TABLE thread_timers (
|
||||
id TEXT PRIMARY KEY,
|
||||
thread_id TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
client_id TEXT NOT NULL,
|
||||
trigger_json TEXT NOT NULL,
|
||||
prompt TEXT NOT NULL,
|
||||
delivery TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
next_run_at INTEGER,
|
||||
last_run_at INTEGER,
|
||||
pending_run INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_thread_timers_thread_created
|
||||
ON thread_timers(thread_id, created_at, id);
|
||||
|
||||
CREATE INDEX idx_thread_timers_thread_pending
|
||||
ON thread_timers(thread_id, pending_run, created_at, id);
|
||||
|
||||
CREATE INDEX idx_thread_timers_thread_next_run
|
||||
ON thread_timers(thread_id, next_run_at);
|
||||
77
codex-rs/state/migrations/0026_external_messages.sql
Normal file
77
codex-rs/state/migrations/0026_external_messages.sql
Normal file
@@ -0,0 +1,77 @@
|
||||
ALTER TABLE thread_timers RENAME TO thread_timers_old;
|
||||
|
||||
CREATE TABLE thread_timers (
|
||||
id TEXT PRIMARY KEY,
|
||||
thread_id TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
client_id TEXT NOT NULL,
|
||||
trigger_json TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
instructions TEXT,
|
||||
meta_json TEXT NOT NULL,
|
||||
delivery TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
next_run_at INTEGER,
|
||||
last_run_at INTEGER,
|
||||
pending_run INTEGER NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO thread_timers (
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
client_id,
|
||||
trigger_json,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
created_at,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
pending_run
|
||||
)
|
||||
SELECT
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
client_id,
|
||||
trigger_json,
|
||||
prompt,
|
||||
NULL,
|
||||
'{}',
|
||||
delivery,
|
||||
created_at,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
pending_run
|
||||
FROM thread_timers_old;
|
||||
|
||||
DROP TABLE thread_timers_old;
|
||||
|
||||
CREATE INDEX idx_thread_timers_thread_created
|
||||
ON thread_timers(thread_id, created_at, id);
|
||||
|
||||
CREATE INDEX idx_thread_timers_thread_pending
|
||||
ON thread_timers(thread_id, pending_run, created_at, id);
|
||||
|
||||
CREATE INDEX idx_thread_timers_thread_next_run
|
||||
ON thread_timers(thread_id, next_run_at);
|
||||
|
||||
CREATE TABLE external_messages (
|
||||
seq INTEGER PRIMARY KEY,
|
||||
id TEXT NOT NULL UNIQUE,
|
||||
thread_id TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
instructions TEXT,
|
||||
meta_json TEXT NOT NULL,
|
||||
delivery TEXT NOT NULL,
|
||||
queued_at INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX external_messages_thread_order_idx
|
||||
ON external_messages(thread_id, queued_at, seq);
|
||||
|
||||
CREATE INDEX external_messages_thread_delivery_order_idx
|
||||
ON external_messages(thread_id, delivery, queued_at, seq);
|
||||
@@ -36,6 +36,9 @@ pub use model::BackfillState;
|
||||
pub use model::BackfillStats;
|
||||
pub use model::BackfillStatus;
|
||||
pub use model::DirectionalThreadSpawnEdgeStatus;
|
||||
pub use model::ExternalMessage;
|
||||
pub use model::ExternalMessageClaim;
|
||||
pub use model::ExternalMessageCreateParams;
|
||||
pub use model::ExtractionOutcome;
|
||||
pub use model::SortKey;
|
||||
pub use model::Stage1JobClaim;
|
||||
@@ -45,8 +48,12 @@ pub use model::Stage1OutputRef;
|
||||
pub use model::Stage1StartupClaimParams;
|
||||
pub use model::ThreadMetadata;
|
||||
pub use model::ThreadMetadataBuilder;
|
||||
pub use model::ThreadTimer;
|
||||
pub use model::ThreadTimerCreateParams;
|
||||
pub use model::ThreadTimerUpdateParams;
|
||||
pub use model::ThreadsPage;
|
||||
pub use runtime::RemoteControlEnrollmentRecord;
|
||||
pub use runtime::TimerDataVersionChecker;
|
||||
pub use runtime::logs_db_filename;
|
||||
pub use runtime::logs_db_path;
|
||||
pub use runtime::state_db_filename;
|
||||
|
||||
85
codex-rs/state/src/model/external_message.rs
Normal file
85
codex-rs/state/src/model/external_message.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use sqlx::FromRow;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExternalMessageCreateParams {
|
||||
pub id: String,
|
||||
pub thread_id: String,
|
||||
pub source: String,
|
||||
pub content: String,
|
||||
pub instructions: Option<String>,
|
||||
pub meta_json: String,
|
||||
pub delivery: String,
|
||||
pub queued_at: i64,
|
||||
}
|
||||
|
||||
impl ExternalMessageCreateParams {
|
||||
pub fn new(
|
||||
thread_id: String,
|
||||
source: String,
|
||||
content: String,
|
||||
instructions: Option<String>,
|
||||
meta_json: String,
|
||||
delivery: String,
|
||||
queued_at: i64,
|
||||
) -> Self {
|
||||
Self {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
thread_id,
|
||||
source,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
queued_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExternalMessage {
|
||||
pub seq: i64,
|
||||
pub id: String,
|
||||
pub thread_id: String,
|
||||
pub source: String,
|
||||
pub content: String,
|
||||
pub instructions: Option<String>,
|
||||
pub meta_json: String,
|
||||
pub delivery: String,
|
||||
pub queued_at: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ExternalMessageClaim {
|
||||
Claimed(ExternalMessage),
|
||||
Invalid { id: String, reason: String },
|
||||
NotReady,
|
||||
}
|
||||
|
||||
#[derive(Debug, FromRow)]
|
||||
pub(crate) struct ExternalMessageRow {
|
||||
pub seq: i64,
|
||||
pub id: String,
|
||||
pub thread_id: String,
|
||||
pub source: String,
|
||||
pub content: String,
|
||||
pub instructions: Option<String>,
|
||||
pub meta_json: String,
|
||||
pub delivery: String,
|
||||
pub queued_at: i64,
|
||||
}
|
||||
|
||||
impl From<ExternalMessageRow> for ExternalMessage {
|
||||
fn from(row: ExternalMessageRow) -> Self {
|
||||
Self {
|
||||
seq: row.seq,
|
||||
id: row.id,
|
||||
thread_id: row.thread_id,
|
||||
source: row.source,
|
||||
content: row.content,
|
||||
instructions: row.instructions,
|
||||
meta_json: row.meta_json,
|
||||
delivery: row.delivery,
|
||||
queued_at: row.queued_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,11 @@
|
||||
mod agent_job;
|
||||
mod backfill_state;
|
||||
mod external_message;
|
||||
mod graph;
|
||||
mod log;
|
||||
mod memories;
|
||||
mod thread_metadata;
|
||||
mod thread_timer;
|
||||
|
||||
pub use agent_job::AgentJob;
|
||||
pub use agent_job::AgentJobCreateParams;
|
||||
@@ -14,6 +16,9 @@ pub use agent_job::AgentJobProgress;
|
||||
pub use agent_job::AgentJobStatus;
|
||||
pub use backfill_state::BackfillState;
|
||||
pub use backfill_state::BackfillStatus;
|
||||
pub use external_message::ExternalMessage;
|
||||
pub use external_message::ExternalMessageClaim;
|
||||
pub use external_message::ExternalMessageCreateParams;
|
||||
pub use graph::DirectionalThreadSpawnEdgeStatus;
|
||||
pub use log::LogEntry;
|
||||
pub use log::LogQuery;
|
||||
@@ -32,11 +37,16 @@ pub use thread_metadata::SortKey;
|
||||
pub use thread_metadata::ThreadMetadata;
|
||||
pub use thread_metadata::ThreadMetadataBuilder;
|
||||
pub use thread_metadata::ThreadsPage;
|
||||
pub use thread_timer::ThreadTimer;
|
||||
pub use thread_timer::ThreadTimerCreateParams;
|
||||
pub use thread_timer::ThreadTimerUpdateParams;
|
||||
|
||||
pub(crate) use agent_job::AgentJobItemRow;
|
||||
pub(crate) use agent_job::AgentJobRow;
|
||||
pub(crate) use external_message::ExternalMessageRow;
|
||||
pub(crate) use memories::Stage1OutputRow;
|
||||
pub(crate) use memories::stage1_output_ref_from_parts;
|
||||
pub(crate) use thread_metadata::ThreadRow;
|
||||
pub(crate) use thread_metadata::anchor_from_item;
|
||||
pub(crate) use thread_metadata::datetime_to_epoch_seconds;
|
||||
pub(crate) use thread_timer::ThreadTimerRow;
|
||||
|
||||
84
codex-rs/state/src/model/thread_timer.rs
Normal file
84
codex-rs/state/src/model/thread_timer.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use sqlx::FromRow;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ThreadTimerCreateParams {
|
||||
pub id: String,
|
||||
pub thread_id: String,
|
||||
pub source: String,
|
||||
pub client_id: String,
|
||||
pub trigger_json: String,
|
||||
pub content: String,
|
||||
pub instructions: Option<String>,
|
||||
pub meta_json: String,
|
||||
pub delivery: String,
|
||||
pub created_at: i64,
|
||||
pub next_run_at: Option<i64>,
|
||||
pub last_run_at: Option<i64>,
|
||||
pub pending_run: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ThreadTimerUpdateParams {
|
||||
pub trigger_json: String,
|
||||
pub content: String,
|
||||
pub instructions: Option<String>,
|
||||
pub meta_json: String,
|
||||
pub delivery: String,
|
||||
pub next_run_at: Option<i64>,
|
||||
pub last_run_at: Option<i64>,
|
||||
pub pending_run: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ThreadTimer {
|
||||
pub id: String,
|
||||
pub thread_id: String,
|
||||
pub source: String,
|
||||
pub client_id: String,
|
||||
pub trigger_json: String,
|
||||
pub content: String,
|
||||
pub instructions: Option<String>,
|
||||
pub meta_json: String,
|
||||
pub delivery: String,
|
||||
pub created_at: i64,
|
||||
pub next_run_at: Option<i64>,
|
||||
pub last_run_at: Option<i64>,
|
||||
pub pending_run: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, FromRow)]
|
||||
pub(crate) struct ThreadTimerRow {
|
||||
pub id: String,
|
||||
pub thread_id: String,
|
||||
pub source: String,
|
||||
pub client_id: String,
|
||||
pub trigger_json: String,
|
||||
pub content: String,
|
||||
pub instructions: Option<String>,
|
||||
pub meta_json: String,
|
||||
pub delivery: String,
|
||||
pub created_at: i64,
|
||||
pub next_run_at: Option<i64>,
|
||||
pub last_run_at: Option<i64>,
|
||||
pub pending_run: i64,
|
||||
}
|
||||
|
||||
impl From<ThreadTimerRow> for ThreadTimer {
|
||||
fn from(row: ThreadTimerRow) -> Self {
|
||||
Self {
|
||||
id: row.id,
|
||||
thread_id: row.thread_id,
|
||||
source: row.source,
|
||||
client_id: row.client_id,
|
||||
trigger_json: row.trigger_json,
|
||||
content: row.content,
|
||||
instructions: row.instructions,
|
||||
meta_json: row.meta_json,
|
||||
delivery: row.delivery,
|
||||
created_at: row.created_at,
|
||||
next_run_at: row.next_run_at,
|
||||
last_run_at: row.last_run_at,
|
||||
pending_run: row.pending_run != 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,9 @@ use crate::AgentJobItemCreateParams;
|
||||
use crate::AgentJobItemStatus;
|
||||
use crate::AgentJobProgress;
|
||||
use crate::AgentJobStatus;
|
||||
use crate::ExternalMessage;
|
||||
use crate::ExternalMessageClaim;
|
||||
use crate::ExternalMessageCreateParams;
|
||||
use crate::LOGS_DB_FILENAME;
|
||||
use crate::LOGS_DB_VERSION;
|
||||
use crate::LogEntry;
|
||||
@@ -15,6 +18,9 @@ use crate::STATE_DB_VERSION;
|
||||
use crate::SortKey;
|
||||
use crate::ThreadMetadata;
|
||||
use crate::ThreadMetadataBuilder;
|
||||
use crate::ThreadTimer;
|
||||
use crate::ThreadTimerCreateParams;
|
||||
use crate::ThreadTimerUpdateParams;
|
||||
use crate::ThreadsPage;
|
||||
use crate::apply_rollout_item;
|
||||
use crate::migrations::runtime_logs_migrator;
|
||||
@@ -52,14 +58,18 @@ use tracing::warn;
|
||||
|
||||
mod agent_jobs;
|
||||
mod backfill;
|
||||
mod delivery_state;
|
||||
mod external_messages;
|
||||
mod logs;
|
||||
mod memories;
|
||||
mod remote_control;
|
||||
#[cfg(test)]
|
||||
mod test_support;
|
||||
mod threads;
|
||||
mod timers;
|
||||
|
||||
pub use remote_control::RemoteControlEnrollmentRecord;
|
||||
pub use timers::TimerDataVersionChecker;
|
||||
|
||||
// "Partition" is the retained-log-content bucket we cap at 10 MiB:
|
||||
// - one bucket per non-null thread_id
|
||||
|
||||
131
codex-rs/state/src/runtime/delivery_state.rs
Normal file
131
codex-rs/state/src/runtime/delivery_state.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
//! Cleanup operations for per-thread delivery state.
|
||||
//!
|
||||
//! Timers and queued external messages are stored independently because they have
|
||||
//! different runtime behavior, but thread lifecycle operations need to treat
|
||||
//! them as one unit. This module owns that cross-table cleanup.
|
||||
|
||||
use super::*;
|
||||
|
||||
impl StateRuntime {
|
||||
/// Delete all queued external messages and timers associated with `thread_id`.
|
||||
pub async fn delete_thread_delivery_state(&self, thread_id: &str) -> anyhow::Result<()> {
|
||||
let mut tx = self.pool.begin().await?;
|
||||
sqlx::query("DELETE FROM external_messages WHERE thread_id = ?")
|
||||
.bind(thread_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
sqlx::query("DELETE FROM thread_timers WHERE thread_id = ?")
|
||||
.bind(thread_id)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
tx.commit().await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::StateRuntime;
|
||||
use super::test_support::unique_temp_dir;
|
||||
use crate::ExternalMessageCreateParams;
|
||||
use crate::ThreadTimerCreateParams;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn message_params(id: &str, thread_id: &str) -> ExternalMessageCreateParams {
|
||||
ExternalMessageCreateParams {
|
||||
id: id.to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
source: "external".to_string(),
|
||||
content: "do something".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: "after-turn".to_string(),
|
||||
queued_at: 100,
|
||||
}
|
||||
}
|
||||
|
||||
fn timer_params(id: &str, thread_id: &str) -> ThreadTimerCreateParams {
|
||||
ThreadTimerCreateParams {
|
||||
id: id.to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
source: "agent".to_string(),
|
||||
client_id: "codex-tui".to_string(),
|
||||
trigger_json: r#"{"kind":"delay","seconds":10,"repeat":false}"#.to_string(),
|
||||
content: "run tests".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: "after-turn".to_string(),
|
||||
created_at: 100,
|
||||
next_run_at: Some(110),
|
||||
last_run_at: None,
|
||||
pending_run: false,
|
||||
}
|
||||
}
|
||||
|
||||
async fn test_runtime() -> std::sync::Arc<StateRuntime> {
|
||||
StateRuntime::init(unique_temp_dir(), "test-provider".to_string())
|
||||
.await
|
||||
.expect("initialize runtime")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_thread_delivery_state_removes_messages_and_timers_for_thread() {
|
||||
let runtime = test_runtime().await;
|
||||
runtime
|
||||
.create_external_message(&message_params("message-1", "thread-1"))
|
||||
.await
|
||||
.expect("create thread-1 message");
|
||||
runtime
|
||||
.create_external_message(&message_params("message-2", "thread-2"))
|
||||
.await
|
||||
.expect("create thread-2 message");
|
||||
runtime
|
||||
.create_thread_timer(&timer_params("timer-1", "thread-1"))
|
||||
.await
|
||||
.expect("create thread-1 timer");
|
||||
runtime
|
||||
.create_thread_timer(&timer_params("timer-2", "thread-2"))
|
||||
.await
|
||||
.expect("create thread-2 timer");
|
||||
|
||||
runtime
|
||||
.delete_thread_delivery_state("thread-1")
|
||||
.await
|
||||
.expect("delete delivery state");
|
||||
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_external_messages("thread-1")
|
||||
.await
|
||||
.expect("list thread-1 messages"),
|
||||
Vec::new()
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list thread-1 timers"),
|
||||
Vec::new()
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_external_messages("thread-2")
|
||||
.await
|
||||
.expect("list thread-2 messages")
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["message-2".to_string()]
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_thread_timers("thread-2")
|
||||
.await
|
||||
.expect("list thread-2 timers")
|
||||
.into_iter()
|
||||
.map(|timer| timer.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["timer-2".to_string()]
|
||||
);
|
||||
}
|
||||
}
|
||||
443
codex-rs/state/src/runtime/external_messages.rs
Normal file
443
codex-rs/state/src/runtime/external_messages.rs
Normal file
@@ -0,0 +1,443 @@
|
||||
//! SQLite-backed state operations for queued external messages.
|
||||
//!
|
||||
//! This module extends [`StateRuntime`] with the storage APIs used by message
|
||||
//! producers and active threads. Claiming a message deletes the row inside the
|
||||
//! same transaction, so competing runtimes deliver each queued message at most
|
||||
//! once.
|
||||
|
||||
use super::*;
|
||||
use crate::model::ExternalMessageRow;
|
||||
|
||||
const DELIVERY_AFTER_TURN: &str = "after-turn";
|
||||
const DELIVERY_STEER_CURRENT_TURN: &str = "steer-current-turn";
|
||||
|
||||
impl StateRuntime {
|
||||
pub async fn create_external_message(
|
||||
&self,
|
||||
params: &ExternalMessageCreateParams,
|
||||
) -> anyhow::Result<()> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO external_messages (
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
queued_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"#,
|
||||
)
|
||||
.bind(params.id.as_str())
|
||||
.bind(params.thread_id.as_str())
|
||||
.bind(params.source.as_str())
|
||||
.bind(params.content.as_str())
|
||||
.bind(params.instructions.as_deref())
|
||||
.bind(params.meta_json.as_str())
|
||||
.bind(params.delivery.as_str())
|
||||
.bind(params.queued_at)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn list_external_messages(
|
||||
&self,
|
||||
thread_id: &str,
|
||||
) -> anyhow::Result<Vec<ExternalMessage>> {
|
||||
let rows = sqlx::query_as::<_, ExternalMessageRow>(
|
||||
r#"
|
||||
SELECT
|
||||
seq,
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
queued_at
|
||||
FROM external_messages
|
||||
WHERE thread_id = ?
|
||||
ORDER BY queued_at ASC, seq ASC
|
||||
"#,
|
||||
)
|
||||
.bind(thread_id)
|
||||
.fetch_all(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(rows.into_iter().map(ExternalMessage::from).collect())
|
||||
}
|
||||
|
||||
pub async fn delete_external_message(&self, thread_id: &str, id: &str) -> anyhow::Result<bool> {
|
||||
let result = sqlx::query("DELETE FROM external_messages WHERE thread_id = ? AND id = ?")
|
||||
.bind(thread_id)
|
||||
.bind(id)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
|
||||
pub async fn claim_next_external_message(
|
||||
&self,
|
||||
thread_id: &str,
|
||||
can_after_turn: bool,
|
||||
can_steer_current_turn: bool,
|
||||
) -> anyhow::Result<Option<ExternalMessageClaim>> {
|
||||
let row = sqlx::query_as::<_, ExternalMessageRow>(
|
||||
r#"
|
||||
DELETE FROM external_messages
|
||||
WHERE seq = (
|
||||
SELECT seq
|
||||
FROM external_messages
|
||||
WHERE thread_id = ?
|
||||
ORDER BY queued_at ASC, seq ASC
|
||||
LIMIT 1
|
||||
)
|
||||
AND (
|
||||
delivery NOT IN (?, ?)
|
||||
OR (delivery = ? AND ?)
|
||||
OR (delivery = ? AND ?)
|
||||
)
|
||||
RETURNING
|
||||
seq,
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
queued_at
|
||||
"#,
|
||||
)
|
||||
.bind(thread_id)
|
||||
.bind(DELIVERY_AFTER_TURN)
|
||||
.bind(DELIVERY_STEER_CURRENT_TURN)
|
||||
.bind(DELIVERY_AFTER_TURN)
|
||||
.bind(can_after_turn)
|
||||
.bind(DELIVERY_STEER_CURRENT_TURN)
|
||||
.bind(can_steer_current_turn || can_after_turn)
|
||||
.fetch_optional(self.pool.as_ref())
|
||||
.await?;
|
||||
|
||||
if let Some(row) = row {
|
||||
return match row.delivery.as_str() {
|
||||
DELIVERY_AFTER_TURN | DELIVERY_STEER_CURRENT_TURN => Ok(Some(
|
||||
ExternalMessageClaim::Claimed(ExternalMessage::from(row)),
|
||||
)),
|
||||
delivery => Ok(Some(ExternalMessageClaim::Invalid {
|
||||
id: row.id,
|
||||
reason: format!("invalid delivery `{delivery}`"),
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
let oldest_delivery = sqlx::query_scalar::<_, String>(
|
||||
r#"
|
||||
SELECT delivery
|
||||
FROM external_messages
|
||||
WHERE thread_id = ?
|
||||
ORDER BY queued_at ASC, seq ASC
|
||||
LIMIT 1
|
||||
"#,
|
||||
)
|
||||
.bind(thread_id)
|
||||
.fetch_optional(self.pool.as_ref())
|
||||
.await?;
|
||||
|
||||
match oldest_delivery.as_deref() {
|
||||
Some(DELIVERY_AFTER_TURN) if !can_after_turn => {
|
||||
Ok(Some(ExternalMessageClaim::NotReady))
|
||||
}
|
||||
Some(DELIVERY_STEER_CURRENT_TURN) if !(can_steer_current_turn || can_after_turn) => {
|
||||
Ok(Some(ExternalMessageClaim::NotReady))
|
||||
}
|
||||
None | Some(_) => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::StateRuntime;
|
||||
use super::test_support::unique_temp_dir;
|
||||
use crate::ExternalMessageClaim;
|
||||
use crate::ExternalMessageCreateParams;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn message_params(id: &str, thread_id: &str, queued_at: i64) -> ExternalMessageCreateParams {
|
||||
ExternalMessageCreateParams {
|
||||
id: id.to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
source: "external".to_string(),
|
||||
content: "do something".to_string(),
|
||||
instructions: Some("be concise".to_string()),
|
||||
meta_json: r#"{"ticket":"ABC_123"}"#.to_string(),
|
||||
delivery: "after-turn".to_string(),
|
||||
queued_at,
|
||||
}
|
||||
}
|
||||
|
||||
async fn test_runtime() -> std::sync::Arc<StateRuntime> {
|
||||
StateRuntime::init(unique_temp_dir(), "test-provider".to_string())
|
||||
.await
|
||||
.expect("initialize runtime")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_messages_table_and_indexes_exist() {
|
||||
let runtime = test_runtime().await;
|
||||
let names = sqlx::query_scalar::<_, String>(
|
||||
r#"
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE tbl_name = 'external_messages'
|
||||
AND name NOT LIKE 'sqlite_autoindex_%'
|
||||
ORDER BY name
|
||||
"#,
|
||||
)
|
||||
.fetch_all(runtime.pool.as_ref())
|
||||
.await
|
||||
.expect("query schema objects");
|
||||
|
||||
assert_eq!(
|
||||
names,
|
||||
vec![
|
||||
"external_messages",
|
||||
"external_messages_thread_delivery_order_idx",
|
||||
"external_messages_thread_order_idx",
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn external_message_rows_round_trip() {
|
||||
let runtime = test_runtime().await;
|
||||
let params = message_params("message-1", "thread-1", /*queued_at*/ 100);
|
||||
|
||||
runtime
|
||||
.create_external_message(¶ms)
|
||||
.await
|
||||
.expect("create message");
|
||||
let messages = runtime
|
||||
.list_external_messages("thread-1")
|
||||
.await
|
||||
.expect("list messages");
|
||||
|
||||
assert_eq!(messages.len(), 1);
|
||||
let message = &messages[0];
|
||||
assert_eq!(message.id, params.id);
|
||||
assert_eq!(message.thread_id, params.thread_id);
|
||||
assert_eq!(message.source, params.source);
|
||||
assert_eq!(message.content, params.content);
|
||||
assert_eq!(message.instructions, params.instructions);
|
||||
assert_eq!(message.meta_json, params.meta_json);
|
||||
assert_eq!(message.delivery, params.delivery);
|
||||
assert_eq!(message.queued_at, params.queued_at);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn delete_external_message_is_scoped_to_thread_id() {
|
||||
let runtime = test_runtime().await;
|
||||
runtime
|
||||
.create_external_message(&message_params(
|
||||
"message-1",
|
||||
"thread-1",
|
||||
/*queued_at*/ 100,
|
||||
))
|
||||
.await
|
||||
.expect("create thread-1 message");
|
||||
runtime
|
||||
.create_external_message(&message_params(
|
||||
"message-2",
|
||||
"thread-2",
|
||||
/*queued_at*/ 100,
|
||||
))
|
||||
.await
|
||||
.expect("create thread-2 message");
|
||||
|
||||
let deleted_wrong_thread = runtime
|
||||
.delete_external_message("thread-2", "message-1")
|
||||
.await
|
||||
.expect("delete wrong-external message");
|
||||
assert!(!deleted_wrong_thread);
|
||||
let deleted = runtime
|
||||
.delete_external_message("thread-1", "message-1")
|
||||
.await
|
||||
.expect("delete thread-1 message");
|
||||
assert!(deleted);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_external_messages("thread-1")
|
||||
.await
|
||||
.expect("list thread-1 messages"),
|
||||
Vec::new()
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_external_messages("thread-2")
|
||||
.await
|
||||
.expect("list thread-2 messages")
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["message-2".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn claim_is_scoped_to_thread_id_and_ordered() {
|
||||
let runtime = test_runtime().await;
|
||||
runtime
|
||||
.create_external_message(&message_params("newer", "thread-1", /*queued_at*/ 200))
|
||||
.await
|
||||
.expect("create newer message");
|
||||
runtime
|
||||
.create_external_message(&message_params(
|
||||
"other-thread",
|
||||
"thread-2",
|
||||
/*queued_at*/ 50,
|
||||
))
|
||||
.await
|
||||
.expect("create other external message");
|
||||
runtime
|
||||
.create_external_message(&message_params("older", "thread-1", /*queued_at*/ 100))
|
||||
.await
|
||||
.expect("create older message");
|
||||
|
||||
let claim = runtime
|
||||
.claim_next_external_message(
|
||||
"thread-1", /*can_after_turn*/ true, /*can_steer_current_turn*/ true,
|
||||
)
|
||||
.await
|
||||
.expect("claim message");
|
||||
|
||||
let Some(ExternalMessageClaim::Claimed(claimed)) = claim else {
|
||||
panic!("expected claimed message");
|
||||
};
|
||||
assert_eq!(claimed.id, "older");
|
||||
assert_eq!(claimed.thread_id, "thread-1");
|
||||
assert_eq!(claimed.queued_at, 100);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_external_messages("thread-1")
|
||||
.await
|
||||
.expect("list remaining thread-1 messages")
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["newer".to_string()]
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_external_messages("thread-2")
|
||||
.await
|
||||
.expect("list thread-2 messages")
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["other-thread".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn claim_consumes_message_once() {
|
||||
let runtime = test_runtime().await;
|
||||
runtime
|
||||
.create_external_message(&message_params(
|
||||
"message-1",
|
||||
"thread-1",
|
||||
/*queued_at*/ 100,
|
||||
))
|
||||
.await
|
||||
.expect("create message");
|
||||
|
||||
assert!(matches!(
|
||||
runtime
|
||||
.claim_next_external_message(
|
||||
"thread-1", /*can_after_turn*/ true, /*can_steer_current_turn*/ true,
|
||||
)
|
||||
.await
|
||||
.expect("claim message"),
|
||||
Some(ExternalMessageClaim::Claimed(_))
|
||||
));
|
||||
assert_eq!(
|
||||
runtime
|
||||
.claim_next_external_message(
|
||||
"thread-1", /*can_after_turn*/ true, /*can_steer_current_turn*/ true,
|
||||
)
|
||||
.await
|
||||
.expect("claim message again"),
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn oldest_unclaimable_message_blocks_later_messages() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut steer = message_params("steer", "thread-1", /*queued_at*/ 100);
|
||||
steer.delivery = "steer-current-turn".to_string();
|
||||
runtime
|
||||
.create_external_message(&steer)
|
||||
.await
|
||||
.expect("create steer message");
|
||||
runtime
|
||||
.create_external_message(&message_params("after", "thread-1", /*queued_at*/ 200))
|
||||
.await
|
||||
.expect("create after-turn message");
|
||||
|
||||
assert_eq!(
|
||||
runtime
|
||||
.claim_next_external_message(
|
||||
"thread-1", /*can_after_turn*/ false,
|
||||
/*can_steer_current_turn*/ false,
|
||||
)
|
||||
.await
|
||||
.expect("claim message"),
|
||||
Some(ExternalMessageClaim::NotReady)
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_external_messages("thread-1")
|
||||
.await
|
||||
.expect("list messages")
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["steer".to_string(), "after".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn invalid_delivery_is_deleted_without_claiming() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = message_params("bad", "thread-1", /*queued_at*/ 100);
|
||||
params.delivery = "bad-delivery".to_string();
|
||||
runtime
|
||||
.create_external_message(¶ms)
|
||||
.await
|
||||
.expect("create message");
|
||||
|
||||
assert_eq!(
|
||||
runtime
|
||||
.claim_next_external_message(
|
||||
"thread-1", /*can_after_turn*/ true, /*can_steer_current_turn*/ true,
|
||||
)
|
||||
.await
|
||||
.expect("claim message"),
|
||||
Some(ExternalMessageClaim::Invalid {
|
||||
id: "bad".to_string(),
|
||||
reason: "invalid delivery `bad-delivery`".to_string(),
|
||||
})
|
||||
);
|
||||
assert!(
|
||||
runtime
|
||||
.list_external_messages("thread-1")
|
||||
.await
|
||||
.expect("list messages")
|
||||
.is_empty()
|
||||
);
|
||||
}
|
||||
}
|
||||
730
codex-rs/state/src/runtime/timers.rs
Normal file
730
codex-rs/state/src/runtime/timers.rs
Normal file
@@ -0,0 +1,730 @@
|
||||
//! SQLite-backed state operations for per-thread timers.
|
||||
//!
|
||||
//! This module extends [`StateRuntime`] with timer CRUD, due-state updates, and
|
||||
//! atomic pending-run claims. It also exposes a lightweight `PRAGMA
|
||||
//! data_version` checker so active threads can notice cross-process timer
|
||||
//! changes without constantly reconciling full timer rows.
|
||||
|
||||
use super::*;
|
||||
use crate::model::ThreadTimerRow;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
pub struct TimerDataVersionChecker {
|
||||
conn: Mutex<SqliteConnection>,
|
||||
}
|
||||
|
||||
impl TimerDataVersionChecker {
|
||||
pub async fn data_version(&self) -> anyhow::Result<i64> {
|
||||
let mut conn = self.conn.lock().await;
|
||||
let version = sqlx::query_scalar::<_, i64>("PRAGMA data_version")
|
||||
.fetch_one(&mut *conn)
|
||||
.await?;
|
||||
Ok(version)
|
||||
}
|
||||
}
|
||||
|
||||
impl StateRuntime {
|
||||
pub async fn timer_data_version_checker(&self) -> anyhow::Result<TimerDataVersionChecker> {
|
||||
let state_path = state_db_path(self.codex_home());
|
||||
let options = base_sqlite_options(state_path.as_path());
|
||||
let conn = options.connect().await?;
|
||||
Ok(TimerDataVersionChecker {
|
||||
conn: Mutex::new(conn),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create_thread_timer(
|
||||
&self,
|
||||
params: &ThreadTimerCreateParams,
|
||||
) -> anyhow::Result<()> {
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO thread_timers (
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
client_id,
|
||||
trigger_json,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
created_at,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
pending_run
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"#,
|
||||
)
|
||||
.bind(params.id.as_str())
|
||||
.bind(params.thread_id.as_str())
|
||||
.bind(params.source.as_str())
|
||||
.bind(params.client_id.as_str())
|
||||
.bind(params.trigger_json.as_str())
|
||||
.bind(params.content.as_str())
|
||||
.bind(params.instructions.as_deref())
|
||||
.bind(params.meta_json.as_str())
|
||||
.bind(params.delivery.as_str())
|
||||
.bind(params.created_at)
|
||||
.bind(params.next_run_at)
|
||||
.bind(params.last_run_at)
|
||||
.bind(i64::from(params.pending_run))
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn create_thread_timer_if_below_limit(
|
||||
&self,
|
||||
params: &ThreadTimerCreateParams,
|
||||
max_thread_timers: usize,
|
||||
) -> anyhow::Result<bool> {
|
||||
let max_thread_timers = i64::try_from(max_thread_timers)?;
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
INSERT INTO thread_timers (
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
client_id,
|
||||
trigger_json,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
created_at,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
pending_run
|
||||
)
|
||||
SELECT ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
||||
WHERE (
|
||||
SELECT COUNT(*)
|
||||
FROM thread_timers
|
||||
WHERE thread_id = ?
|
||||
) < ?
|
||||
"#,
|
||||
)
|
||||
.bind(params.id.as_str())
|
||||
.bind(params.thread_id.as_str())
|
||||
.bind(params.source.as_str())
|
||||
.bind(params.client_id.as_str())
|
||||
.bind(params.trigger_json.as_str())
|
||||
.bind(params.content.as_str())
|
||||
.bind(params.instructions.as_deref())
|
||||
.bind(params.meta_json.as_str())
|
||||
.bind(params.delivery.as_str())
|
||||
.bind(params.created_at)
|
||||
.bind(params.next_run_at)
|
||||
.bind(params.last_run_at)
|
||||
.bind(i64::from(params.pending_run))
|
||||
.bind(params.thread_id.as_str())
|
||||
.bind(max_thread_timers)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
|
||||
pub async fn list_thread_timers(&self, thread_id: &str) -> anyhow::Result<Vec<ThreadTimer>> {
|
||||
let rows = sqlx::query_as::<_, ThreadTimerRow>(
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
thread_id,
|
||||
source,
|
||||
client_id,
|
||||
trigger_json,
|
||||
content,
|
||||
instructions,
|
||||
meta_json,
|
||||
delivery,
|
||||
created_at,
|
||||
next_run_at,
|
||||
last_run_at,
|
||||
pending_run
|
||||
FROM thread_timers
|
||||
WHERE thread_id = ?
|
||||
ORDER BY created_at ASC, id ASC
|
||||
"#,
|
||||
)
|
||||
.bind(thread_id)
|
||||
.fetch_all(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(rows.into_iter().map(ThreadTimer::from).collect())
|
||||
}
|
||||
|
||||
pub async fn delete_thread_timer(&self, thread_id: &str, id: &str) -> anyhow::Result<bool> {
|
||||
let result = sqlx::query("DELETE FROM thread_timers WHERE thread_id = ? AND id = ?")
|
||||
.bind(thread_id)
|
||||
.bind(id)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
|
||||
pub async fn update_thread_timer_due(
|
||||
&self,
|
||||
thread_id: &str,
|
||||
id: &str,
|
||||
due_at: i64,
|
||||
next_run_at: Option<i64>,
|
||||
) -> anyhow::Result<bool> {
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
UPDATE thread_timers
|
||||
SET pending_run = 1,
|
||||
next_run_at = ?
|
||||
WHERE thread_id = ?
|
||||
AND id = ?
|
||||
AND pending_run = 0
|
||||
AND next_run_at IS NOT NULL
|
||||
AND next_run_at <= ?
|
||||
"#,
|
||||
)
|
||||
.bind(next_run_at)
|
||||
.bind(thread_id)
|
||||
.bind(id)
|
||||
.bind(due_at)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
|
||||
pub async fn claim_one_shot_thread_timer(
|
||||
&self,
|
||||
thread_id: &str,
|
||||
id: &str,
|
||||
due_at: i64,
|
||||
) -> anyhow::Result<bool> {
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
DELETE FROM thread_timers
|
||||
WHERE thread_id = ?
|
||||
AND id = ?
|
||||
AND (
|
||||
pending_run = 1
|
||||
OR (
|
||||
pending_run = 0
|
||||
AND next_run_at IS NOT NULL
|
||||
AND next_run_at <= ?
|
||||
)
|
||||
)
|
||||
"#,
|
||||
)
|
||||
.bind(thread_id)
|
||||
.bind(id)
|
||||
.bind(due_at)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
|
||||
pub async fn claim_recurring_thread_timer(
|
||||
&self,
|
||||
thread_id: &str,
|
||||
id: &str,
|
||||
due_at: i64,
|
||||
expected_last_run_at: Option<i64>,
|
||||
params: &ThreadTimerUpdateParams,
|
||||
) -> anyhow::Result<bool> {
|
||||
let result = sqlx::query(
|
||||
r#"
|
||||
UPDATE thread_timers
|
||||
SET trigger_json = ?,
|
||||
content = ?,
|
||||
instructions = ?,
|
||||
meta_json = ?,
|
||||
delivery = ?,
|
||||
next_run_at = ?,
|
||||
last_run_at = ?,
|
||||
pending_run = ?
|
||||
WHERE thread_id = ?
|
||||
AND id = ?
|
||||
AND (
|
||||
pending_run = 1
|
||||
OR (
|
||||
pending_run = 0
|
||||
AND next_run_at IS NOT NULL
|
||||
AND next_run_at <= ?
|
||||
)
|
||||
)
|
||||
AND (
|
||||
(last_run_at IS NULL AND ? IS NULL)
|
||||
OR last_run_at = ?
|
||||
)
|
||||
"#,
|
||||
)
|
||||
.bind(params.trigger_json.as_str())
|
||||
.bind(params.content.as_str())
|
||||
.bind(params.instructions.as_deref())
|
||||
.bind(params.meta_json.as_str())
|
||||
.bind(params.delivery.as_str())
|
||||
.bind(params.next_run_at)
|
||||
.bind(params.last_run_at)
|
||||
.bind(i64::from(params.pending_run))
|
||||
.bind(thread_id)
|
||||
.bind(id)
|
||||
.bind(due_at)
|
||||
.bind(expected_last_run_at)
|
||||
.bind(expected_last_run_at)
|
||||
.execute(self.pool.as_ref())
|
||||
.await?;
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::StateRuntime;
|
||||
use super::test_support::unique_temp_dir;
|
||||
use crate::ThreadTimer;
|
||||
use crate::ThreadTimerCreateParams;
|
||||
use crate::ThreadTimerUpdateParams;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn timer_params(id: &str, thread_id: &str) -> ThreadTimerCreateParams {
|
||||
ThreadTimerCreateParams {
|
||||
id: id.to_string(),
|
||||
thread_id: thread_id.to_string(),
|
||||
source: "agent".to_string(),
|
||||
client_id: "codex-tui".to_string(),
|
||||
trigger_json: r#"{"kind":"delay","seconds":10,"repeat":true}"#.to_string(),
|
||||
content: "run tests".to_string(),
|
||||
instructions: Some("keep output brief".to_string()),
|
||||
meta_json: r#"{"ticket":"ABC_123"}"#.to_string(),
|
||||
delivery: "after-turn".to_string(),
|
||||
created_at: 100,
|
||||
next_run_at: Some(110),
|
||||
last_run_at: None,
|
||||
pending_run: false,
|
||||
}
|
||||
}
|
||||
|
||||
async fn test_runtime() -> std::sync::Arc<StateRuntime> {
|
||||
StateRuntime::init(unique_temp_dir(), "test-provider".to_string())
|
||||
.await
|
||||
.expect("initialize runtime")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_timers_table_and_indexes_exist() {
|
||||
let runtime = test_runtime().await;
|
||||
let names = sqlx::query_scalar::<_, String>(
|
||||
r#"
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE tbl_name = 'thread_timers'
|
||||
AND name NOT LIKE 'sqlite_autoindex_%'
|
||||
ORDER BY name
|
||||
"#,
|
||||
)
|
||||
.fetch_all(runtime.pool.as_ref())
|
||||
.await
|
||||
.expect("query schema objects");
|
||||
|
||||
assert_eq!(
|
||||
names,
|
||||
vec![
|
||||
"idx_thread_timers_thread_created",
|
||||
"idx_thread_timers_thread_next_run",
|
||||
"idx_thread_timers_thread_pending",
|
||||
"thread_timers",
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_timer_rows_round_trip_source_and_client_metadata() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = timer_params("timer-1", "thread-1");
|
||||
params.pending_run = true;
|
||||
params.last_run_at = Some(105);
|
||||
|
||||
runtime
|
||||
.create_thread_timer(¶ms)
|
||||
.await
|
||||
.expect("create timer");
|
||||
let timers = runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list timers");
|
||||
|
||||
assert_eq!(timers.len(), 1);
|
||||
let timer = &timers[0];
|
||||
assert_eq!(timer.id, params.id);
|
||||
assert_eq!(timer.thread_id, params.thread_id);
|
||||
assert_eq!(timer.source, params.source);
|
||||
assert_eq!(timer.client_id, params.client_id);
|
||||
assert_eq!(timer.trigger_json, params.trigger_json);
|
||||
assert_eq!(timer.content, params.content);
|
||||
assert_eq!(timer.instructions, params.instructions);
|
||||
assert_eq!(timer.meta_json, params.meta_json);
|
||||
assert_eq!(timer.delivery, params.delivery);
|
||||
assert_eq!(timer.created_at, params.created_at);
|
||||
assert_eq!(timer.next_run_at, params.next_run_at);
|
||||
assert_eq!(timer.last_run_at, params.last_run_at);
|
||||
assert_eq!(timer.pending_run, params.pending_run);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn thread_timer_crud_is_scoped_to_thread_id() {
|
||||
let runtime = test_runtime().await;
|
||||
runtime
|
||||
.create_thread_timer(&timer_params("timer-1", "thread-1"))
|
||||
.await
|
||||
.expect("create thread-1 timer");
|
||||
runtime
|
||||
.create_thread_timer(&timer_params("timer-2", "thread-2"))
|
||||
.await
|
||||
.expect("create thread-2 timer");
|
||||
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list thread-1 timers")
|
||||
.into_iter()
|
||||
.map(|timer| timer.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["timer-1".to_string()]
|
||||
);
|
||||
assert!(
|
||||
!runtime
|
||||
.delete_thread_timer("thread-1", "timer-2")
|
||||
.await
|
||||
.expect("delete wrong thread timer")
|
||||
);
|
||||
assert!(
|
||||
runtime
|
||||
.delete_thread_timer("thread-2", "timer-2")
|
||||
.await
|
||||
.expect("delete correct thread timer")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_thread_timer_if_below_limit_rejects_full_thread() {
|
||||
let runtime = test_runtime().await;
|
||||
assert!(
|
||||
runtime
|
||||
.create_thread_timer_if_below_limit(
|
||||
&timer_params("timer-1", "thread-1"),
|
||||
/*max_thread_timers*/ 2,
|
||||
)
|
||||
.await
|
||||
.expect("create first timer")
|
||||
);
|
||||
assert!(
|
||||
runtime
|
||||
.create_thread_timer_if_below_limit(
|
||||
&timer_params("timer-2", "thread-1"),
|
||||
/*max_thread_timers*/ 2,
|
||||
)
|
||||
.await
|
||||
.expect("create second timer")
|
||||
);
|
||||
assert!(
|
||||
!runtime
|
||||
.create_thread_timer_if_below_limit(
|
||||
&timer_params("timer-3", "thread-1"),
|
||||
/*max_thread_timers*/ 2,
|
||||
)
|
||||
.await
|
||||
.expect("reject third timer")
|
||||
);
|
||||
assert!(
|
||||
runtime
|
||||
.create_thread_timer_if_below_limit(
|
||||
&timer_params("timer-4", "thread-2"),
|
||||
/*max_thread_timers*/ 2,
|
||||
)
|
||||
.await
|
||||
.expect("create timer for different thread")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list thread-1 timers")
|
||||
.into_iter()
|
||||
.map(|timer| timer.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["timer-1".to_string(), "timer-2".to_string()]
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_thread_timers("thread-2")
|
||||
.await
|
||||
.expect("list thread-2 timers")
|
||||
.into_iter()
|
||||
.map(|timer| timer.id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec!["timer-4".to_string()]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn one_shot_claim_consumes_pending_timer_once() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = timer_params("timer-1", "thread-1");
|
||||
params.pending_run = true;
|
||||
params.next_run_at = None;
|
||||
runtime
|
||||
.create_thread_timer(¶ms)
|
||||
.await
|
||||
.expect("create pending timer");
|
||||
|
||||
assert!(
|
||||
runtime
|
||||
.claim_one_shot_thread_timer("thread-1", "timer-1", /*due_at*/ 110)
|
||||
.await
|
||||
.expect("claim timer")
|
||||
);
|
||||
assert!(
|
||||
!runtime
|
||||
.claim_one_shot_thread_timer("thread-1", "timer-1", /*due_at*/ 110)
|
||||
.await
|
||||
.expect("claim timer again")
|
||||
);
|
||||
assert!(
|
||||
runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list timers")
|
||||
.is_empty()
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn recurring_claim_updates_pending_timer_once() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = timer_params("timer-1", "thread-1");
|
||||
params.pending_run = true;
|
||||
runtime
|
||||
.create_thread_timer(¶ms)
|
||||
.await
|
||||
.expect("create pending timer");
|
||||
let update = ThreadTimerUpdateParams {
|
||||
trigger_json: params.trigger_json.clone(),
|
||||
content: "updated content".to_string(),
|
||||
instructions: None,
|
||||
meta_json: "{}".to_string(),
|
||||
delivery: "steer-current-turn".to_string(),
|
||||
next_run_at: Some(120),
|
||||
last_run_at: Some(110),
|
||||
pending_run: false,
|
||||
};
|
||||
|
||||
assert!(
|
||||
runtime
|
||||
.claim_recurring_thread_timer(
|
||||
"thread-1", "timer-1", /*due_at*/ 110, /*expected_last_run_at*/ None,
|
||||
&update,
|
||||
)
|
||||
.await
|
||||
.expect("claim recurring timer")
|
||||
);
|
||||
assert!(
|
||||
!runtime
|
||||
.claim_recurring_thread_timer(
|
||||
"thread-1", "timer-1", /*due_at*/ 110, /*expected_last_run_at*/ None,
|
||||
&update,
|
||||
)
|
||||
.await
|
||||
.expect("claim recurring timer again")
|
||||
);
|
||||
let timers = runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list timers");
|
||||
assert_eq!(timers.len(), 1);
|
||||
assert_eq!(timers[0].delivery, "steer-current-turn");
|
||||
assert_eq!(timers[0].content, "updated content");
|
||||
assert_eq!(timers[0].instructions, None);
|
||||
assert_eq!(timers[0].meta_json, "{}");
|
||||
assert_eq!(timers[0].next_run_at, Some(120));
|
||||
assert_eq!(timers[0].last_run_at, Some(110));
|
||||
assert!(!timers[0].pending_run);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn one_shot_claim_consumes_overdue_timer_after_restart() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = timer_params("timer-1", "thread-1");
|
||||
params.trigger_json = r#"{"kind":"delay","seconds":10,"repeat":false}"#.to_string();
|
||||
params.next_run_at = Some(110);
|
||||
params.pending_run = false;
|
||||
runtime
|
||||
.create_thread_timer(¶ms)
|
||||
.await
|
||||
.expect("create overdue one-shot timer");
|
||||
|
||||
assert!(
|
||||
runtime
|
||||
.claim_one_shot_thread_timer("thread-1", "timer-1", /*due_at*/ 110)
|
||||
.await
|
||||
.expect("claim overdue one-shot timer")
|
||||
);
|
||||
assert!(
|
||||
runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list timers")
|
||||
.is_empty()
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn recurring_claim_consumes_overdue_timer_after_restart() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = timer_params("timer-1", "thread-1");
|
||||
params.next_run_at = Some(110);
|
||||
params.pending_run = false;
|
||||
runtime
|
||||
.create_thread_timer(¶ms)
|
||||
.await
|
||||
.expect("create overdue recurring timer");
|
||||
let update = ThreadTimerUpdateParams {
|
||||
trigger_json: params.trigger_json.clone(),
|
||||
content: params.content.clone(),
|
||||
instructions: params.instructions.clone(),
|
||||
meta_json: params.meta_json.clone(),
|
||||
delivery: params.delivery.clone(),
|
||||
next_run_at: Some(120),
|
||||
last_run_at: Some(110),
|
||||
pending_run: false,
|
||||
};
|
||||
|
||||
assert!(
|
||||
runtime
|
||||
.claim_recurring_thread_timer(
|
||||
"thread-1", "timer-1", /*due_at*/ 110, /*expected_last_run_at*/ None,
|
||||
&update,
|
||||
)
|
||||
.await
|
||||
.expect("claim overdue recurring timer")
|
||||
);
|
||||
let timers = runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list timers");
|
||||
assert_eq!(timers.len(), 1);
|
||||
assert_eq!(timers[0].next_run_at, Some(120));
|
||||
assert_eq!(timers[0].last_run_at, Some(110));
|
||||
assert!(!timers[0].pending_run);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn due_update_rejects_stale_timer_row_after_claim() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = timer_params("timer-1", "thread-1");
|
||||
params.next_run_at = Some(110);
|
||||
params.pending_run = false;
|
||||
runtime
|
||||
.create_thread_timer(¶ms)
|
||||
.await
|
||||
.expect("create overdue recurring timer");
|
||||
let update = ThreadTimerUpdateParams {
|
||||
trigger_json: params.trigger_json.clone(),
|
||||
content: params.content.clone(),
|
||||
instructions: params.instructions.clone(),
|
||||
meta_json: params.meta_json.clone(),
|
||||
delivery: params.delivery.clone(),
|
||||
next_run_at: Some(120),
|
||||
last_run_at: Some(110),
|
||||
pending_run: false,
|
||||
};
|
||||
assert!(
|
||||
runtime
|
||||
.claim_recurring_thread_timer(
|
||||
"thread-1", "timer-1", /*due_at*/ 110, /*expected_last_run_at*/ None,
|
||||
&update,
|
||||
)
|
||||
.await
|
||||
.expect("claim overdue recurring timer")
|
||||
);
|
||||
|
||||
assert!(
|
||||
!runtime
|
||||
.update_thread_timer_due("thread-1", "timer-1", /*due_at*/ 110, Some(130))
|
||||
.await
|
||||
.expect("stale due update should be rejected")
|
||||
);
|
||||
assert_eq!(
|
||||
runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list timers"),
|
||||
vec![ThreadTimer {
|
||||
id: params.id,
|
||||
thread_id: params.thread_id,
|
||||
source: params.source,
|
||||
client_id: params.client_id,
|
||||
trigger_json: params.trigger_json,
|
||||
content: params.content,
|
||||
instructions: params.instructions,
|
||||
meta_json: params.meta_json,
|
||||
delivery: params.delivery,
|
||||
created_at: params.created_at,
|
||||
next_run_at: Some(120),
|
||||
last_run_at: Some(110),
|
||||
pending_run: false,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn recurring_idle_claim_rejects_stale_last_run_at_even_when_pending_stays_true() {
|
||||
let runtime = test_runtime().await;
|
||||
let mut params = timer_params("timer-1", "thread-1");
|
||||
params.pending_run = true;
|
||||
params.last_run_at = Some(100);
|
||||
runtime
|
||||
.create_thread_timer(¶ms)
|
||||
.await
|
||||
.expect("create pending timer");
|
||||
let update = ThreadTimerUpdateParams {
|
||||
trigger_json: params.trigger_json.clone(),
|
||||
content: params.content.clone(),
|
||||
instructions: params.instructions.clone(),
|
||||
meta_json: params.meta_json.clone(),
|
||||
delivery: params.delivery.clone(),
|
||||
next_run_at: Some(120),
|
||||
last_run_at: Some(110),
|
||||
pending_run: true,
|
||||
};
|
||||
|
||||
assert!(
|
||||
runtime
|
||||
.claim_recurring_thread_timer(
|
||||
"thread-1",
|
||||
"timer-1",
|
||||
/*due_at*/ 110,
|
||||
/*expected_last_run_at*/ Some(100),
|
||||
&update,
|
||||
)
|
||||
.await
|
||||
.expect("claim recurring idle timer")
|
||||
);
|
||||
assert!(
|
||||
!runtime
|
||||
.claim_recurring_thread_timer(
|
||||
"thread-1",
|
||||
"timer-1",
|
||||
/*due_at*/ 110,
|
||||
/*expected_last_run_at*/ Some(100),
|
||||
&update,
|
||||
)
|
||||
.await
|
||||
.expect("claim recurring idle timer again")
|
||||
);
|
||||
let timers = runtime
|
||||
.list_thread_timers("thread-1")
|
||||
.await
|
||||
.expect("list timers");
|
||||
assert_eq!(timers.len(), 1);
|
||||
assert_eq!(timers[0].last_run_at, Some(110));
|
||||
assert!(timers[0].pending_run);
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,7 @@ mod mcp_tool;
|
||||
mod plan_tool;
|
||||
mod request_user_input_tool;
|
||||
mod responses_api;
|
||||
mod timer_tool;
|
||||
mod tool_config;
|
||||
mod tool_definition;
|
||||
mod tool_discovery;
|
||||
@@ -89,6 +90,9 @@ pub use responses_api::dynamic_tool_to_responses_api_tool;
|
||||
pub use responses_api::mcp_tool_to_deferred_responses_api_tool;
|
||||
pub use responses_api::mcp_tool_to_responses_api_tool;
|
||||
pub use responses_api::tool_definition_to_responses_api_tool;
|
||||
pub use timer_tool::create_delete_timer_tool;
|
||||
pub use timer_tool::create_list_timers_tool;
|
||||
pub use timer_tool::create_timer_tool;
|
||||
pub use tool_config::ShellCommandBackendConfig;
|
||||
pub use tool_config::ToolUserShellType;
|
||||
pub use tool_config::ToolsConfig;
|
||||
|
||||
125
codex-rs/tools/src/timer_tool.rs
Normal file
125
codex-rs/tools/src/timer_tool.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
//! Responses API tool specs for thread-local persistent timer management.
|
||||
//!
|
||||
//! These specs expose the `create_timer`, `delete_timer`, and `list_timers`
|
||||
//! built-in tools.
|
||||
|
||||
use crate::JsonSchema;
|
||||
use crate::ResponsesApiTool;
|
||||
use crate::ToolSpec;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
pub fn create_timer_tool() -> ToolSpec {
|
||||
let trigger_properties = BTreeMap::from([
|
||||
(
|
||||
"kind".to_string(),
|
||||
JsonSchema::string(Some(
|
||||
"Trigger kind. Use `delay` or `schedule`.".to_string(),
|
||||
)),
|
||||
),
|
||||
(
|
||||
"seconds".to_string(),
|
||||
JsonSchema::number(Some(
|
||||
"Delay trigger seconds from creation time.".to_string(),
|
||||
)),
|
||||
),
|
||||
(
|
||||
"repeat".to_string(),
|
||||
JsonSchema::boolean(Some(
|
||||
"Delay trigger recurrence flag. With seconds 0, repeat means run whenever the thread is idle."
|
||||
.to_string(),
|
||||
)),
|
||||
),
|
||||
(
|
||||
"dtstart".to_string(),
|
||||
JsonSchema::string(Some(
|
||||
"Schedule trigger floating local datetime in YYYY-MM-DDTHH:MM:SS format."
|
||||
.to_string(),
|
||||
)),
|
||||
),
|
||||
(
|
||||
"rrule".to_string(),
|
||||
JsonSchema::string(Some("Schedule trigger RRULE string.".to_string())),
|
||||
),
|
||||
]);
|
||||
let properties = BTreeMap::from([
|
||||
(
|
||||
"trigger".to_string(),
|
||||
JsonSchema::object(
|
||||
trigger_properties,
|
||||
Some(vec!["kind".to_string()]),
|
||||
Some(false.into()),
|
||||
),
|
||||
),
|
||||
(
|
||||
"content".to_string(),
|
||||
JsonSchema::string(Some(
|
||||
"Message content to execute when the timer fires.".to_string(),
|
||||
)),
|
||||
),
|
||||
(
|
||||
"meta".to_string(),
|
||||
JsonSchema::object(BTreeMap::new(), /*required*/ None, Some(true.into())),
|
||||
),
|
||||
(
|
||||
"delivery".to_string(),
|
||||
JsonSchema::string(Some(
|
||||
"Delivery mode for the timer. Use `after-turn` or `steer-current-turn`."
|
||||
.to_string(),
|
||||
)),
|
||||
),
|
||||
]);
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "create_timer".to_string(),
|
||||
description:
|
||||
"Create a thread timer using a structured trigger, message content, and delivery mode."
|
||||
.to_string(),
|
||||
strict: false,
|
||||
defer_loading: None,
|
||||
parameters: JsonSchema::object(
|
||||
properties,
|
||||
Some(vec![
|
||||
"trigger".to_string(),
|
||||
"content".to_string(),
|
||||
"delivery".to_string(),
|
||||
]),
|
||||
Some(false.into()),
|
||||
),
|
||||
output_schema: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_delete_timer_tool() -> ToolSpec {
|
||||
let properties = BTreeMap::from([(
|
||||
"id".to_string(),
|
||||
JsonSchema::string(Some("Identifier of the timer to delete.".to_string())),
|
||||
)]);
|
||||
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "delete_timer".to_string(),
|
||||
description: "Delete a thread timer by id.".to_string(),
|
||||
strict: false,
|
||||
defer_loading: None,
|
||||
parameters: JsonSchema::object(
|
||||
properties,
|
||||
Some(vec!["id".to_string()]),
|
||||
Some(false.into()),
|
||||
),
|
||||
output_schema: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_list_timers_tool() -> ToolSpec {
|
||||
ToolSpec::Function(ResponsesApiTool {
|
||||
name: "list_timers".to_string(),
|
||||
description: "List thread timers for the current thread.".to_string(),
|
||||
strict: false,
|
||||
defer_loading: None,
|
||||
parameters: JsonSchema::object(BTreeMap::new(), /*required*/ None, Some(false.into())),
|
||||
output_schema: None,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[path = "timer_tool_tests.rs"]
|
||||
mod tests;
|
||||
43
codex-rs/tools/src/timer_tool_tests.rs
Normal file
43
codex-rs/tools/src/timer_tool_tests.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use crate::ResponsesApiTool;
|
||||
use crate::ToolSpec;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::create_delete_timer_tool;
|
||||
use super::create_list_timers_tool;
|
||||
use super::create_timer_tool;
|
||||
|
||||
#[test]
|
||||
fn timer_create_tool_uses_expected_name() {
|
||||
let ToolSpec::Function(ResponsesApiTool { name, .. }) = create_timer_tool() else {
|
||||
panic!("expected function tool");
|
||||
};
|
||||
assert_eq!(name, "create_timer");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn timer_create_tool_exposes_only_core_timer_payload_fields() {
|
||||
let ToolSpec::Function(ResponsesApiTool { parameters, .. }) = create_timer_tool() else {
|
||||
panic!("expected function tool");
|
||||
};
|
||||
let properties = parameters.properties.expect("create_timer properties");
|
||||
|
||||
assert!(properties.contains_key("content"));
|
||||
assert!(!properties.contains_key("prompt"));
|
||||
assert!(!properties.contains_key("instructions"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn timer_delete_tool_uses_expected_name() {
|
||||
let ToolSpec::Function(ResponsesApiTool { name, .. }) = create_delete_timer_tool() else {
|
||||
panic!("expected function tool");
|
||||
};
|
||||
assert_eq!(name, "delete_timer");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn timer_list_tool_uses_expected_name() {
|
||||
let ToolSpec::Function(ResponsesApiTool { name, .. }) = create_list_timers_tool() else {
|
||||
panic!("expected function tool");
|
||||
};
|
||||
assert_eq!(name, "list_timers");
|
||||
}
|
||||
@@ -105,6 +105,7 @@ pub struct ToolsConfig {
|
||||
pub collab_tools: bool,
|
||||
pub multi_agent_v2: bool,
|
||||
pub hide_spawn_agent_metadata: bool,
|
||||
pub timer_scheduler: bool,
|
||||
pub spawn_agent_usage_hint: bool,
|
||||
pub spawn_agent_usage_hint_text: Option<String>,
|
||||
pub default_mode_request_user_input: bool,
|
||||
@@ -146,6 +147,7 @@ impl ToolsConfig {
|
||||
let include_collab_tools = features.enabled(Feature::Collab);
|
||||
let include_multi_agent_v2 = features.enabled(Feature::MultiAgentV2);
|
||||
let include_agent_jobs = features.enabled(Feature::SpawnCsv);
|
||||
let include_timers = features.enabled(Feature::Timers);
|
||||
let include_default_mode_request_user_input =
|
||||
features.enabled(Feature::DefaultModeRequestUserInput);
|
||||
let include_search_tool =
|
||||
@@ -226,6 +228,7 @@ impl ToolsConfig {
|
||||
collab_tools: include_collab_tools,
|
||||
multi_agent_v2: include_multi_agent_v2,
|
||||
hide_spawn_agent_metadata: false,
|
||||
timer_scheduler: include_timers,
|
||||
spawn_agent_usage_hint: true,
|
||||
spawn_agent_usage_hint_text: None,
|
||||
default_mode_request_user_input: include_default_mode_request_user_input,
|
||||
|
||||
@@ -22,6 +22,7 @@ use crate::create_apply_patch_json_tool;
|
||||
use crate::create_close_agent_tool_v1;
|
||||
use crate::create_close_agent_tool_v2;
|
||||
use crate::create_code_mode_tool;
|
||||
use crate::create_delete_timer_tool;
|
||||
use crate::create_exec_command_tool;
|
||||
use crate::create_followup_task_tool;
|
||||
use crate::create_image_generation_tool;
|
||||
@@ -31,6 +32,7 @@ use crate::create_list_agents_tool;
|
||||
use crate::create_list_dir_tool;
|
||||
use crate::create_list_mcp_resource_templates_tool;
|
||||
use crate::create_list_mcp_resources_tool;
|
||||
use crate::create_list_timers_tool;
|
||||
use crate::create_local_shell_tool;
|
||||
use crate::create_read_mcp_resource_tool;
|
||||
use crate::create_report_agent_job_result_tool;
|
||||
@@ -45,6 +47,7 @@ use crate::create_spawn_agent_tool_v1;
|
||||
use crate::create_spawn_agent_tool_v2;
|
||||
use crate::create_spawn_agents_on_csv_tool;
|
||||
use crate::create_test_sync_tool;
|
||||
use crate::create_timer_tool;
|
||||
use crate::create_tool_search_tool;
|
||||
use crate::create_tool_suggest_tool;
|
||||
use crate::create_update_plan_tool;
|
||||
@@ -247,6 +250,27 @@ pub fn build_tool_registry_plan(
|
||||
plan.register_handler("request_permissions", ToolHandlerKind::RequestPermissions);
|
||||
}
|
||||
|
||||
if config.timer_scheduler {
|
||||
plan.push_spec(
|
||||
create_timer_tool(),
|
||||
/*supports_parallel_tool_calls*/ false,
|
||||
config.code_mode_enabled,
|
||||
);
|
||||
plan.push_spec(
|
||||
create_delete_timer_tool(),
|
||||
/*supports_parallel_tool_calls*/ false,
|
||||
config.code_mode_enabled,
|
||||
);
|
||||
plan.push_spec(
|
||||
create_list_timers_tool(),
|
||||
/*supports_parallel_tool_calls*/ false,
|
||||
config.code_mode_enabled,
|
||||
);
|
||||
plan.register_handler("create_timer", ToolHandlerKind::CreateTimer);
|
||||
plan.register_handler("delete_timer", ToolHandlerKind::DeleteTimer);
|
||||
plan.register_handler("list_timers", ToolHandlerKind::ListTimers);
|
||||
}
|
||||
|
||||
if config.search_tool
|
||||
&& let Some(deferred_mcp_tools) = params.deferred_mcp_tools
|
||||
{
|
||||
|
||||
@@ -11,6 +11,9 @@ use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ToolHandlerKind {
|
||||
CreateTimer,
|
||||
DeleteTimer,
|
||||
ListTimers,
|
||||
AgentJobs,
|
||||
ApplyPatch,
|
||||
CloseAgentV1,
|
||||
|
||||
@@ -6735,6 +6735,17 @@ impl ChatWidget {
|
||||
self.on_user_message_event(ev);
|
||||
}
|
||||
}
|
||||
EventMsg::InjectedMessage(ev) => {
|
||||
if !ev.content.trim().is_empty() {
|
||||
self.add_to_history(history_cell::new_user_prompt(
|
||||
ev.content,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
));
|
||||
self.needs_final_message_separator = false;
|
||||
}
|
||||
}
|
||||
EventMsg::EnteredReviewMode(review_request) => {
|
||||
self.on_entered_review_mode(review_request, from_replay)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user