Compare commits

...

10 Commits

Author SHA1 Message Date
Ryan Vogel
d6962522a4 discord: ignore local env and data artifacts 2026-02-14 16:04:18 -05:00
Ryan Vogel
292ff126c4 discord: simplify Docker workflow with Makefile and update documentation
Replace complex Docker commands with simple Make targets for building,
running, and managing the Discord bot container. This makes it easier
for developers to get started without memorizing lengthy Docker flags.

Also removes outdated CLAUDE.md and adds AGENTS.md files to guide AI
agents working on conversation, database, actors, and sandbox modules.
2026-02-14 15:39:09 -05:00
Kit Langton
46cc9e7567 refactor: restructure conversation runtime around thread entities 2026-02-14 13:40:52 -05:00
Kit Langton
1885db3d8b discord: simplify ConversationLedger to offsets + in-memory dedup
Replace the full inbox state machine (admit/start/setTarget/setPrompt/
setResponse/complete/retry/prune/replayPending) with three methods:
dedup (bounded in-memory Set), getOffset, and setOffset.

Discord is already the durable inbox — on startup we just resume from
persisted offsets per source. Within a session, in-memory dedup prevents
double-processing. This removes ~565 lines of inbox lifecycle code,
the conversation_inbox SQLite table, MessageState, ReliabilityError,
and the prune schedule.
2026-02-14 10:37:47 -05:00
Kit Langton
3c22e16386 discord: idiomatic Effect refactor with conversation service, durable ledger, and split sandbox architecture
Refactor the Discord bot to idiomatic Effect TypeScript:

- Branded types (ThreadId, ChannelId, etc.) and Schema.Class for all data
- Split SandboxManager into SandboxProvisioner (stateless lifecycle) + ThreadAgentPool (per-thread orchestration)
- Pure Conversation service with port interfaces (Inbox/Outbox/History/Threads)
- ConversationLedger for message dedup, at-least-once delivery, and replay on restart
- Per-thread serialized execution via ActorMap with idle timeouts
- Discord slash commands (/status, /reset) and in-thread commands (!status, !reset)
- Catch-up on missed messages at startup via offset tracking
- Typed errors (Schema.TaggedError) with retriable/non-retriable classification
- Local CLI (conversation:cli) and automation CLI (conversation:ctl)
- Test coverage for conversation service, ledger, session store, and actors
2026-02-14 00:51:01 -05:00
Ryan Vogel
ef92226c33 core: let teams deploy discord bot as a single container 2026-02-12 18:20:13 -05:00
Ryan Vogel
c332258f54 docs: add setup guide and env variable reference to discord bot README 2026-02-12 13:15:17 -05:00
Ryan Vogel
bbab5b10d3 core: replace Neon Postgres with bun:sqlite to eliminate external DB signup 2026-02-12 13:09:26 -05:00
Ryan Vogel
9444c95eb2 wip: remove bun.lock and rename package to @opencode/discord 2026-02-12 12:55:55 -05:00
Ryan Vogel
b0e49eb1ac wip: discord bot that provisions daytona sandboxes for opencode sessions in threads 2026-02-12 12:51:38 -05:00
76 changed files with 8548 additions and 22 deletions

356
bun.lock
View File

@@ -213,6 +213,31 @@
"vite": "catalog:",
},
},
"packages/discord": {
"name": "@opencode/discord",
"version": "0.1.0",
"dependencies": {
"@daytonaio/sdk": "latest",
"@effect/ai": "0.33.2",
"@effect/ai-anthropic": "0.23.0",
"@effect/cluster": "0.56.4",
"@effect/experimental": "0.58.0",
"@effect/platform": "0.94.5",
"@effect/platform-bun": "0.87.1",
"@effect/rpc": "0.73.1",
"@effect/sql-sqlite-bun": "0.50.2",
"@opencode-ai/sdk": "latest",
"discord.js": "^14",
"effect": "3.19.17",
},
"devDependencies": {
"@effect/language-service": "0.73.1",
"@effect/sql": "0.49.0",
"@types/bun": "latest",
"@types/node": "^22",
"typescript": "^5",
},
},
"packages/enterprise": {
"name": "@opencode-ai/enterprise",
"version": "1.1.61",
@@ -496,6 +521,7 @@
},
},
"trustedDependencies": [
"protobufjs",
"esbuild",
"web-tree-sitter",
"tree-sitter-bash",
@@ -617,6 +643,8 @@
"@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.71.2", "", { "dependencies": { "json-schema-to-ts": "^3.1.1" }, "peerDependencies": { "zod": "^3.25.0 || ^4.0.0" }, "optionalPeers": ["zod"], "bin": { "anthropic-ai-sdk": "bin/cli" } }, "sha512-TGNDEUuEstk/DKu0/TflXAEt+p+p/WhTlFzEnoosvbaDU2LTjm42igSdlL0VijrKpWejtOKxX0b8A7uc+XiSAQ=="],
"@anthropic-ai/tokenizer": ["@anthropic-ai/tokenizer@0.0.4", "", { "dependencies": { "@types/node": "^18.11.18", "tiktoken": "^1.0.10" } }, "sha512-EHRKbxlxlc8W4KCBEseByJ7YwyYCmgu9OyN59H9+IYIGPoKv8tXyQXinkeGDI+cI8Tiuz9wk2jZb/kK7AyvL7g=="],
"@anycable/core": ["@anycable/core@0.9.2", "", { "dependencies": { "nanoevents": "^7.0.1" } }, "sha512-x5ZXDcW/N4cxWl93CnbHs/u7qq4793jS2kNPWm+duPrXlrva+ml2ZGT7X9tuOBKzyIHf60zWCdIK7TUgMPAwXA=="],
"@astrojs/check": ["@astrojs/check@0.9.6", "", { "dependencies": { "@astrojs/language-server": "^2.16.1", "chokidar": "^4.0.1", "kleur": "^4.1.5", "yargs": "^17.7.2" }, "peerDependencies": { "typescript": "^5.0.0" }, "bin": { "astro-check": "bin/astro-check.js" } }, "sha512-jlaEu5SxvSgmfGIFfNgcn5/f+29H61NJzEMfAZ82Xopr4XBchXB1GVlcJsE+elUlsYSbXlptZLX+JMG3b/wZEA=="],
@@ -683,6 +711,8 @@
"@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.933.0", "", { "dependencies": { "@aws-sdk/core": "3.932.0", "@aws-sdk/nested-clients": "3.933.0", "@aws-sdk/types": "3.930.0", "@smithy/property-provider": "^4.2.5", "@smithy/shared-ini-file-loader": "^4.4.0", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-c7Eccw2lhFx2/+qJn3g+uIDWRuWi2A6Sz3PVvckFUEzPsP0dPUo19hlvtarwP5GzrsXn0yEPRVhpewsIaSCGaQ=="],
"@aws-sdk/lib-storage": ["@aws-sdk/lib-storage@3.990.0", "", { "dependencies": { "@smithy/abort-controller": "^4.2.8", "@smithy/middleware-endpoint": "^4.4.14", "@smithy/smithy-client": "^4.11.3", "buffer": "5.6.0", "events": "3.3.0", "stream-browserify": "3.0.0", "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-s3": "^3.990.0" } }, "sha512-SHog6kMWXwLBTeVwFAd+EHwr1874Ei5ob1DTL5mLmJDwbmkFog2VDKN+9BmI4di0yxY057Ps2vhhWLhKy89wuA=="],
"@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@aws-sdk/util-arn-parser": "3.893.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-cnCLWeKPYgvV4yRYPFH6pWMdUByvu2cy2BAlfsPpvnm4RaVioztyvxmQj5PmVN5fvWs5w/2d6U7le8X9iye2sA=="],
"@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.930.0", "", { "dependencies": { "@aws-sdk/types": "3.930.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-5HEQ+JU4DrLNWeY27wKg/jeVa8Suy62ivJHOSUf6e6hZdVIMx0h/kXS1fHEQNNiLu2IzSEP/bFXsKBaW7x7s0g=="],
@@ -851,12 +881,54 @@
"@ctrl/tinycolor": ["@ctrl/tinycolor@4.2.0", "", {}, "sha512-kzyuwOAQnXJNLS9PSyrk0CWk35nWJW/zl/6KvnTBMFK65gm7U1/Z5BqjxeapjZCIhQcM/DsrEmcbRwDyXyXK4A=="],
"@daytonaio/api-client": ["@daytonaio/api-client@0.143.0", "", { "dependencies": { "axios": "^1.6.1" } }, "sha512-LcrvExGcwyngt1JnVoxuBMD19fL9F+vwGnp18Hav5HJEbICM0pbrJc2FJ242sQJquNIF5mq+5jgznWg6wshZJA=="],
"@daytonaio/sdk": ["@daytonaio/sdk@0.143.0", "", { "dependencies": { "@aws-sdk/client-s3": "^3.787.0", "@aws-sdk/lib-storage": "^3.798.0", "@daytonaio/api-client": "0.143.0", "@daytonaio/toolbox-api-client": "0.143.0", "@iarna/toml": "^2.2.5", "@opentelemetry/api": "^1.9.0", "@opentelemetry/exporter-trace-otlp-http": "^0.207.0", "@opentelemetry/instrumentation-http": "^0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-node": "^0.207.0", "@opentelemetry/sdk-trace-base": "^2.2.0", "@opentelemetry/semantic-conventions": "^1.37.0", "axios": "^1.13.5", "busboy": "^1.0.0", "dotenv": "^17.0.1", "expand-tilde": "^2.0.2", "fast-glob": "^3.3.0", "form-data": "^4.0.4", "isomorphic-ws": "^5.0.0", "pathe": "^2.0.3", "shell-quote": "^1.8.2", "tar": "^7.5.7" } }, "sha512-itQ409W+WuuqLd8edIRVUWeRUyujSlOKInnN6GeC3OqyOWT27wyD/KzkesXDJtQ6HthGtqa4uRtHqnayd5wRCQ=="],
"@daytonaio/toolbox-api-client": ["@daytonaio/toolbox-api-client@0.143.0", "", { "dependencies": { "axios": "^1.6.1" } }, "sha512-E6+yHPnFygNqRIctoDheISHCpzQkHydAU7fiBfsA5pnElfB/yLTOXQlnZfP8gfvOmUkRNU8QCXKzaualRTlI7w=="],
"@dimforge/rapier2d-simd-compat": ["@dimforge/rapier2d-simd-compat@0.17.3", "", {}, "sha512-bijvwWz6NHsNj5e5i1vtd3dU2pDhthSaTUZSh14DUGGKJfw8eMnlWZsxwHBxB/a3AXVNDjL9abuHw1k9FGR+jg=="],
"@discordjs/builders": ["@discordjs/builders@1.13.1", "", { "dependencies": { "@discordjs/formatters": "^0.6.2", "@discordjs/util": "^1.2.0", "@sapphire/shapeshift": "^4.0.0", "discord-api-types": "^0.38.33", "fast-deep-equal": "^3.1.3", "ts-mixer": "^6.0.4", "tslib": "^2.6.3" } }, "sha512-cOU0UDHc3lp/5nKByDxkmRiNZBpdp0kx55aarbiAfakfKJHlxv/yFW1zmIqCAmwH5CRlrH9iMFKJMpvW4DPB+w=="],
"@discordjs/collection": ["@discordjs/collection@1.5.3", "", {}, "sha512-SVb428OMd3WO1paV3rm6tSjM4wC+Kecaa1EUGX7vc6/fddvw/6lg90z4QtCqm21zvVe92vMMDt9+DkIvjXImQQ=="],
"@discordjs/formatters": ["@discordjs/formatters@0.6.2", "", { "dependencies": { "discord-api-types": "^0.38.33" } }, "sha512-y4UPwWhH6vChKRkGdMB4odasUbHOUwy7KL+OVwF86PvT6QVOwElx+TiI1/6kcmcEe+g5YRXJFiXSXUdabqZOvQ=="],
"@discordjs/rest": ["@discordjs/rest@2.6.0", "", { "dependencies": { "@discordjs/collection": "^2.1.1", "@discordjs/util": "^1.1.1", "@sapphire/async-queue": "^1.5.3", "@sapphire/snowflake": "^3.5.3", "@vladfrangu/async_event_emitter": "^2.4.6", "discord-api-types": "^0.38.16", "magic-bytes.js": "^1.10.0", "tslib": "^2.6.3", "undici": "6.21.3" } }, "sha512-RDYrhmpB7mTvmCKcpj+pc5k7POKszS4E2O9TYc+U+Y4iaCP+r910QdO43qmpOja8LRr1RJ0b3U+CqVsnPqzf4w=="],
"@discordjs/util": ["@discordjs/util@1.2.0", "", { "dependencies": { "discord-api-types": "^0.38.33" } }, "sha512-3LKP7F2+atl9vJFhaBjn4nOaSWahZ/yWjOvA4e5pnXkt2qyXRCHLxoBQy81GFtLGCq7K9lPm9R517M1U+/90Qg=="],
"@discordjs/ws": ["@discordjs/ws@1.2.3", "", { "dependencies": { "@discordjs/collection": "^2.1.0", "@discordjs/rest": "^2.5.1", "@discordjs/util": "^1.1.0", "@sapphire/async-queue": "^1.5.2", "@types/ws": "^8.5.10", "@vladfrangu/async_event_emitter": "^2.2.4", "discord-api-types": "^0.38.1", "tslib": "^2.6.2", "ws": "^8.17.0" } }, "sha512-wPlQDxEmlDg5IxhJPuxXr3Vy9AjYq5xCvFWGJyD7w7Np8ZGu+Mc+97LCoEc/+AYCo2IDpKioiH0/c/mj5ZR9Uw=="],
"@dot/log": ["@dot/log@0.1.5", "", { "dependencies": { "chalk": "^4.1.2", "loglevelnext": "^6.0.0", "p-defer": "^3.0.0" } }, "sha512-ECraEVJWv2f2mWK93lYiefUkphStVlKD6yKDzisuoEmxuLKrxO9iGetHK2DoEAkj7sxjE886n0OUVVCUx0YPNg=="],
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
"@effect/ai": ["@effect/ai@0.33.2", "", { "dependencies": { "find-my-way-ts": "^0.1.6" }, "peerDependencies": { "@effect/experimental": "^0.58.0", "@effect/platform": "^0.94.1", "@effect/rpc": "^0.73.0", "effect": "^3.19.14" } }, "sha512-iJ6pz9qiKFXhcnriJJSBQ5+Bz3oEg+6hVQ7OPmmTa0dVkKUPIgX9nHqHfstcwLnDfdXBj/zCl79U+iMpGtQBnA=="],
"@effect/ai-anthropic": ["@effect/ai-anthropic@0.23.0", "", { "dependencies": { "@anthropic-ai/tokenizer": "^0.0.4" }, "peerDependencies": { "@effect/ai": "^0.33.0", "@effect/experimental": "^0.58.0", "@effect/platform": "^0.94.0", "effect": "^3.19.13" } }, "sha512-ftkyQpTY4OCPTe9hlB53WCXseWm11lOf+zXDNCJG2nAqW5mZ+4kd/PPiqV7jd2zDukAtLc0I+R3EGs1fOLCG5A=="],
"@effect/cluster": ["@effect/cluster@0.56.4", "", { "dependencies": { "kubernetes-types": "^1.30.0" }, "peerDependencies": { "@effect/platform": "^0.94.5", "@effect/rpc": "^0.73.1", "@effect/sql": "^0.49.0", "@effect/workflow": "^0.16.0", "effect": "^3.19.17" } }, "sha512-7Je5/JlbZOlsSxsbKjr97dJed2cNGWsb+TLNgMcr5mRDbcWlFOTUGvsrisEJV6waosYLIg+2omPdvnvRoYKdhA=="],
"@effect/experimental": ["@effect/experimental@0.58.0", "", { "dependencies": { "uuid": "^11.0.3" }, "peerDependencies": { "@effect/platform": "^0.94.0", "effect": "^3.19.13", "ioredis": "^5", "lmdb": "^3" }, "optionalPeers": ["ioredis", "lmdb"] }, "sha512-IEP9sapjF6rFy5TkoqDPc86st/fnqUfjT7Xa3pWJrFGr1hzaMXHo+mWsYOZS9LAOVKnpHuVziDK97EP5qsCHVA=="],
"@effect/language-service": ["@effect/language-service@0.73.1", "", { "bin": { "effect-language-service": "cli.js" } }, "sha512-FbOKzXmP1QM6/YMvDFZGTZ0Gk0AjKqRSY48dpAN0zUdVzq5xV/Us/6vZ5FcdmG6GjgSWP2rpESy59OMvfPeylw=="],
"@effect/platform": ["@effect/platform@0.94.5", "", { "dependencies": { "find-my-way-ts": "^0.1.6", "msgpackr": "^1.11.4", "multipasta": "^0.2.7" }, "peerDependencies": { "effect": "^3.19.17" } }, "sha512-z05APUiDDPbodhTkH/RJqOLoCU11bU2IZLfcwLFrld03+ob1VeqRnELQlmueLIYm6NZifHAtjl32V+GRt34y4A=="],
"@effect/platform-bun": ["@effect/platform-bun@0.87.1", "", { "dependencies": { "@effect/platform-node-shared": "^0.57.1", "multipasta": "^0.2.7" }, "peerDependencies": { "@effect/cluster": "^0.56.1", "@effect/platform": "^0.94.2", "@effect/rpc": "^0.73.0", "@effect/sql": "^0.49.0", "effect": "^3.19.15" } }, "sha512-I88d0YqWbvLY2GGeIxK3r5k0l/MoUCCnxiHJG+X6gqaHu+pIs0djDtJ+ORhw/3qha9ojcVu6pyaBmnUjgzQHWQ=="],
"@effect/platform-node-shared": ["@effect/platform-node-shared@0.57.1", "", { "dependencies": { "@parcel/watcher": "^2.5.1", "multipasta": "^0.2.7", "ws": "^8.18.2" }, "peerDependencies": { "@effect/cluster": "^0.56.1", "@effect/platform": "^0.94.2", "@effect/rpc": "^0.73.0", "@effect/sql": "^0.49.0", "effect": "^3.19.15" } }, "sha512-oX/bApMdoKsyrDiNdJxo7U9Rz1RXsjRv+ecfAPp1qGlSdGIo32wVRvJ2XCHqYj0sqaYJS0pU0/GCulRfVGuJag=="],
"@effect/rpc": ["@effect/rpc@0.73.1", "", { "dependencies": { "msgpackr": "^1.11.4" }, "peerDependencies": { "@effect/platform": "^0.94.5", "effect": "^3.19.17" } }, "sha512-1Pn6GJl+gCdVl0O38zK8wU/I5UsG6RArDKd8Ndx8BhntZAPIBbuUFgG7KlAKTJjumBTO/YOEgKYdwma0rJq/Tw=="],
"@effect/sql": ["@effect/sql@0.49.0", "", { "dependencies": { "uuid": "^11.0.3" }, "peerDependencies": { "@effect/experimental": "^0.58.0", "@effect/platform": "^0.94.0", "effect": "^3.19.13" } }, "sha512-9UEKR+z+MrI/qMAmSvb/RiD9KlgIazjZUCDSpwNgm0lEK9/Q6ExEyfziiYFVCPiptp52cBw8uBHRic8hHnwqXA=="],
"@effect/sql-sqlite-bun": ["@effect/sql-sqlite-bun@0.50.2", "", { "peerDependencies": { "@effect/experimental": "^0.58.0", "@effect/platform": "^0.94.4", "@effect/sql": "^0.49.0", "effect": "^3.19.16" } }, "sha512-bd+rwFMjZ53OZhOnzQ0Zdef9IK2lgd0xP3M/553Y+aZjqOctMjhZmd3PVi8JjR+pRgSzN5XBu9Gly97t0aPsug=="],
"@effect/workflow": ["@effect/workflow@0.16.0", "", { "peerDependencies": { "@effect/experimental": "^0.58.0", "@effect/platform": "^0.94.0", "@effect/rpc": "^0.73.0", "effect": "^3.19.13" } }, "sha512-MiAdlxx3TixkgHdbw+Yf1Z3tHAAE0rOQga12kIydJqj05Fnod+W/I+kQGRMY/XWRg+QUsVxhmh1qTr7Ype6lrw=="],
"@emmetio/abbreviation": ["@emmetio/abbreviation@2.3.3", "", { "dependencies": { "@emmetio/scanner": "^1.0.4" } }, "sha512-mgv58UrU3rh4YgbE/TzgLQwJ3pFsHHhCLqY20aJq+9comytTXUDNGG/SMtSeMJdkpxgXSXunBGLD8Boka3JyVA=="],
"@emmetio/css-abbreviation": ["@emmetio/css-abbreviation@2.1.8", "", { "dependencies": { "@emmetio/scanner": "^1.0.4" } }, "sha512-s9yjhJ6saOO/uk1V74eifykk2CBYi01STTK3WlXWGOepyKa23ymJ053+DNQjpFcy1ingpaO7AxCcwLvHFY9tuw=="],
@@ -979,6 +1051,10 @@
"@graphql-typed-document-node/core": ["@graphql-typed-document-node/core@3.2.0", "", { "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ=="],
"@grpc/grpc-js": ["@grpc/grpc-js@1.14.3", "", { "dependencies": { "@grpc/proto-loader": "^0.8.0", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA=="],
"@grpc/proto-loader": ["@grpc/proto-loader@0.8.0", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.5.3", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ=="],
"@happy-dom/global-registrator": ["@happy-dom/global-registrator@20.0.11", "", { "dependencies": { "@types/node": "^20.0.0", "happy-dom": "^20.0.11" } }, "sha512-GqNqiShBT/lzkHTMC/slKBrvN0DsD4Di8ssBk4aDaVgEn+2WMzE6DXxq701ndSXj7/0cJ8mNT71pM7Bnrr6JRw=="],
"@hey-api/codegen-core": ["@hey-api/codegen-core@0.5.5", "", { "dependencies": { "@hey-api/types": "0.1.2", "ansi-colors": "4.1.3", "c12": "3.3.3", "color-support": "1.1.3" }, "peerDependencies": { "typescript": ">=5.5.3" } }, "sha512-f2ZHucnA2wBGAY8ipB4wn/mrEYW+WUxU2huJmUvfDO6AE2vfILSHeF3wCO39Pz4wUYPoAWZByaauftLrOfC12Q=="],
@@ -995,6 +1071,8 @@
"@hono/zod-validator": ["@hono/zod-validator@0.4.2", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-1rrlBg+EpDPhzOV4hT9pxr5+xDVmKuz6YJl+la7VCwK6ass5ldyKm5fD+umJdV2zhHD6jROoCCv8NbTwyfhT0g=="],
"@iarna/toml": ["@iarna/toml@2.2.5", "", {}, "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg=="],
"@ibm/plex": ["@ibm/plex@6.4.1", "", { "dependencies": { "@ibm/telemetry-js": "^1.5.1" } }, "sha512-fnsipQywHt3zWvsnlyYKMikcVI7E2fEwpiPnIHFqlbByXVfQfANAAeJk1IV4mNnxhppUIDlhU0TzwYwL++Rn2g=="],
"@ibm/telemetry-js": ["@ibm/telemetry-js@1.11.0", "", { "bin": { "ibmtelemetry": "dist/collect.js" } }, "sha512-RO/9j+URJnSfseWg9ZkEX9p+a3Ousd33DBU7rOafoZB08RqdzxFVYJ2/iM50dkBuD0o7WX7GYt1sLbNgCoE+pA=="],
@@ -1117,6 +1195,8 @@
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
"@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="],
"@jsdevtools/ono": ["@jsdevtools/ono@7.1.3", "", {}, "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="],
"@jsx-email/all": ["@jsx-email/all@2.2.3", "", { "dependencies": { "@jsx-email/body": "1.0.2", "@jsx-email/button": "1.0.4", "@jsx-email/column": "1.0.3", "@jsx-email/container": "1.0.2", "@jsx-email/font": "1.0.3", "@jsx-email/head": "1.0.2", "@jsx-email/heading": "1.0.2", "@jsx-email/hr": "1.0.2", "@jsx-email/html": "1.0.2", "@jsx-email/img": "1.0.2", "@jsx-email/link": "1.0.2", "@jsx-email/markdown": "2.0.4", "@jsx-email/preview": "1.0.2", "@jsx-email/render": "1.1.1", "@jsx-email/row": "1.0.2", "@jsx-email/section": "1.0.2", "@jsx-email/tailwind": "2.4.4", "@jsx-email/text": "1.0.2" }, "peerDependencies": { "react": "^18.2.0" } }, "sha512-OBvLe/hVSQc0LlMSTJnkjFoqs3bmxcC4zpy/5pT5agPCSKMvAKQjzmsc2xJ2wO73jSpRV1K/g38GmvdCfrhSoQ=="],
@@ -1189,6 +1269,18 @@
"@motionone/utils": ["@motionone/utils@10.18.0", "", { "dependencies": { "@motionone/types": "^10.17.1", "hey-listen": "^1.0.8", "tslib": "^2.3.1" } }, "sha512-3XVF7sgyTSI2KWvTf6uLlBJ5iAgRgmvp3bpuOiQJvInd4nZ19ET8lX5unn30SlmRH7hXbBbH+Gxd0m0klJ3Xtw=="],
"@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="],
"@msgpackr-extract/msgpackr-extract-darwin-x64": ["@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw=="],
"@msgpackr-extract/msgpackr-extract-linux-arm": ["@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3", "", { "os": "linux", "cpu": "arm" }, "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw=="],
"@msgpackr-extract/msgpackr-extract-linux-arm64": ["@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg=="],
"@msgpackr-extract/msgpackr-extract-linux-x64": ["@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg=="],
"@msgpackr-extract/msgpackr-extract-win32-x64": ["@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3", "", { "os": "win32", "cpu": "x64" }, "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ=="],
"@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@1.1.1", "", { "dependencies": { "@emnapi/core": "^1.7.1", "@emnapi/runtime": "^1.7.1", "@tybys/wasm-util": "^0.10.1" } }, "sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A=="],
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
@@ -1273,12 +1365,70 @@
"@opencode-ai/web": ["@opencode-ai/web@workspace:packages/web"],
"@opencode/discord": ["@opencode/discord@workspace:packages/discord"],
"@openrouter/ai-sdk-provider": ["@openrouter/ai-sdk-provider@1.5.4", "", { "dependencies": { "@openrouter/sdk": "^0.1.27" }, "peerDependencies": { "ai": "^5.0.0", "zod": "^3.24.1 || ^v4" } }, "sha512-xrSQPUIH8n9zuyYZR0XK7Ba0h2KsjJcMkxnwaYfmv13pKs3sDkjPzVPPhlhzqBGddHb5cFEwJ9VFuFeDcxCDSw=="],
"@openrouter/sdk": ["@openrouter/sdk@0.1.27", "", { "dependencies": { "zod": "^3.25.0 || ^4.0.0" } }, "sha512-RH//L10bSmc81q25zAZudiI4kNkLgxF2E+WU42vghp3N6TEvZ6F0jK7uT3tOxkEn91gzmMw9YVmDENy7SJsajQ=="],
"@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
"@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-lAb0jQRVyleQQGiuuvCOTDVspc14nx6XJjP4FspJ1sNARo3Regq4ZZbrc3rN4b1TYSuUCvgH+UXUPug4SLOqEQ=="],
"@opentelemetry/context-async-hooks": ["@opentelemetry/context-async-hooks@2.2.0", "", { "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-qRkLWiUEZNAmYapZ7KGS5C4OmBLcP/H2foXeOEaowYCR0wi89fHejrfYfbuLVCMLp/dWZXKvQusdbUEZjERfwQ=="],
"@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="],
"@opentelemetry/exporter-logs-otlp-grpc": ["@opentelemetry/exporter-logs-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/sdk-logs": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-K92RN+kQGTMzFDsCzsYNGqOsXRUnko/Ckk+t/yPJao72MewOLgBUTWVHhebgkNfRCYqDz1v3K0aPT9OJkemvgg=="],
"@opentelemetry/exporter-logs-otlp-http": ["@opentelemetry/exporter-logs-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/sdk-logs": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-JpOh7MguEUls8eRfkVVW3yRhClo5b9LqwWTOg8+i4gjr/+8eiCtquJnC7whvpTIGyff06cLZ2NsEj+CVP3Mjeg=="],
"@opentelemetry/exporter-logs-otlp-proto": ["@opentelemetry/exporter-logs-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-RQJEV/K6KPbQrIUbsrRkEe0ufks1o5OGLHy6jbDD8tRjeCsbFHWfg99lYBRqBV33PYZJXsigqMaAbjWGTFYzLw=="],
"@opentelemetry/exporter-metrics-otlp-grpc": ["@opentelemetry/exporter-metrics-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-6flX89W54gkwmqYShdcTBR1AEF5C1Ob0O8pDgmLPikTKyEv27lByr9yBmO5WrP0+5qJuNPHrLfgFQFYi6npDGA=="],
"@opentelemetry/exporter-metrics-otlp-http": ["@opentelemetry/exporter-metrics-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-fG8FAJmvXOrKXGIRN8+y41U41IfVXxPRVwyB05LoMqYSjugx/FSBkMZUZXUT/wclTdmBKtS5MKoi0bEKkmRhSw=="],
"@opentelemetry/exporter-metrics-otlp-proto": ["@opentelemetry/exporter-metrics-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-kDBxiTeQjaRlUQzS1COT9ic+et174toZH6jxaVuVAvGqmxOkgjpLOjrI5ff8SMMQE69r03L3Ll3nPKekLopLwg=="],
"@opentelemetry/exporter-prometheus": ["@opentelemetry/exporter-prometheus@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-metrics": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-Y5p1s39FvIRmU+F1++j7ly8/KSqhMmn6cMfpQqiDCqDjdDHwUtSq0XI0WwL3HYGnZeaR/VV4BNmsYQJ7GAPrhw=="],
"@opentelemetry/exporter-trace-otlp-grpc": ["@opentelemetry/exporter-trace-otlp-grpc@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-grpc-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-7u2ZmcIx6D4KG/+5np4X2qA0o+O0K8cnUDhR4WI/vr5ZZ0la9J9RG+tkSjC7Yz+2XgL6760gSIM7/nyd3yaBLA=="],
"@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-HSRBzXHIC7C8UfPQdu15zEEoBGv0yWkhEwxqgPCHVUKUQ9NLHVGXkVrf65Uaj7UwmAkC1gQfkuVYvLlD//AnUQ=="],
"@opentelemetry/exporter-trace-otlp-proto": ["@opentelemetry/exporter-trace-otlp-proto@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-ruUQB4FkWtxHjNmSXjrhmJZFvyMm+tBzHyMm7YPQshApy4wvZUTcrpPyP/A/rCl/8M4BwoVIZdiwijMdbZaq4w=="],
"@opentelemetry/exporter-zipkin": ["@opentelemetry/exporter-zipkin@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": "^1.0.0" } }, "sha512-VV4QzhGCT7cWrGasBWxelBjqbNBbyHicWWS/66KoZoe9BzYwFB72SH2/kkc4uAviQlO8iwv2okIJy+/jqqEHTg=="],
"@opentelemetry/instrumentation": ["@opentelemetry/instrumentation@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "import-in-the-middle": "^2.0.0", "require-in-the-middle": "^8.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-y6eeli9+TLKnznrR8AZlQMSJT7wILpXH+6EYq5Vf/4Ao+huI7EedxQHwRgVUOMLFbe7VFDvHJrX9/f4lcwnJsA=="],
"@opentelemetry/instrumentation-http": ["@opentelemetry/instrumentation-http@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/instrumentation": "0.207.0", "@opentelemetry/semantic-conventions": "^1.29.0", "forwarded-parse": "2.1.2" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-FC4i5hVixTzuhg4SV2ycTEAYx+0E2hm+GwbdoVPSA6kna0pPVI4etzaA9UkpJ9ussumQheFXP6rkGIaFJjMxsw=="],
"@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.207.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-transformer": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-4RQluMVVGMrHok/3SVeSJ6EnRNkA2MINcX88sh+d/7DjGUrewW/WT88IsMEci0wUM+5ykTpPPNbEOoW+jwHnbw=="],
"@opentelemetry/otlp-grpc-exporter-base": ["@opentelemetry/otlp-grpc-exporter-base@0.207.0", "", { "dependencies": { "@grpc/grpc-js": "^1.7.1", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.207.0", "@opentelemetry/otlp-transformer": "0.207.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-eKFjKNdsPed4q9yYqeI5gBTLjXxDM/8jwhiC0icw3zKxHVGBySoDsed5J5q/PGY/3quzenTr3FiTxA3NiNT+nw=="],
"@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-+6DRZLqM02uTIY5GASMZWUwr52sLfNiEe20+OEaZKhztCs3+2LxoTjb6JxFRd9q1qNqckXKYlUKjbH/AhG8/ZA=="],
"@opentelemetry/propagator-b3": ["@opentelemetry/propagator-b3@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-9CrbTLFi5Ee4uepxg2qlpQIozoJuoAZU5sKMx0Mn7Oh+p7UrgCiEV6C02FOxxdYVRRFQVCinYR8Kf6eMSQsIsw=="],
"@opentelemetry/propagator-jaeger": ["@opentelemetry/propagator-jaeger@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FfeOHOrdhiNzecoB1jZKp2fybqmqMPJUXe2ZOydP7QzmTPYcfPeuaclTLYVhK3HyJf71kt8sTl92nV4YIaLaKA=="],
"@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="],
"@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-4MEQmn04y+WFe6cyzdrXf58hZxilvY59lzZj2AccuHW/+BxLn/rGVN/Irsi/F0qfBOpMOrrCLKTExoSL2zoQmg=="],
"@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="],
"@opentelemetry/sdk-node": ["@opentelemetry/sdk-node@0.207.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.207.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/exporter-logs-otlp-grpc": "0.207.0", "@opentelemetry/exporter-logs-otlp-http": "0.207.0", "@opentelemetry/exporter-logs-otlp-proto": "0.207.0", "@opentelemetry/exporter-metrics-otlp-grpc": "0.207.0", "@opentelemetry/exporter-metrics-otlp-http": "0.207.0", "@opentelemetry/exporter-metrics-otlp-proto": "0.207.0", "@opentelemetry/exporter-prometheus": "0.207.0", "@opentelemetry/exporter-trace-otlp-grpc": "0.207.0", "@opentelemetry/exporter-trace-otlp-http": "0.207.0", "@opentelemetry/exporter-trace-otlp-proto": "0.207.0", "@opentelemetry/exporter-zipkin": "2.2.0", "@opentelemetry/instrumentation": "0.207.0", "@opentelemetry/propagator-b3": "2.2.0", "@opentelemetry/propagator-jaeger": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.207.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "@opentelemetry/sdk-trace-node": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-hnRsX/M8uj0WaXOBvFenQ8XsE8FLVh2uSnn1rkWu4mx+qu7EKGUZvZng6y/95cyzsqOfiaDDr08Ek4jppkIDNg=="],
"@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.5.1", "", { "dependencies": { "@opentelemetry/core": "2.5.1", "@opentelemetry/resources": "2.5.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-iZH3Gw8cxQn0gjpOjJMmKLd9GIaNh/E3v3ST67vyzLSxHBs14HsG4dy7jMYyC5WXGdBVEcM7U/XTF5hCQxjDMw=="],
"@opentelemetry/sdk-trace-node": ["@opentelemetry/sdk-trace-node@2.2.0", "", { "dependencies": { "@opentelemetry/context-async-hooks": "2.2.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-+OaRja3f0IqGG2kptVeYsrZQK9nKRSpfFrKtRBq4uh6nIB8bTBgaGvYQrQoRrQWQMA5dK5yLhDMDc0dvYvCOIQ=="],
"@opentelemetry/semantic-conventions": ["@opentelemetry/semantic-conventions@1.39.0", "", {}, "sha512-R5R9tb2AXs2IRLNKLBJDynhkfmx7mX0vi8NkhZb3gUkPWHn6HXk5J8iQ/dql0U3ApfWym4kXXmBDRGO+oeOfjg=="],
"@opentui/core": ["@opentui/core@0.1.79", "", { "dependencies": { "bun-ffi-structs": "0.1.2", "diff": "8.0.2", "jimp": "1.6.0", "marked": "17.0.1", "yoga-layout": "3.2.1" }, "optionalDependencies": { "@dimforge/rapier2d-simd-compat": "^0.17.3", "@opentui/core-darwin-arm64": "0.1.79", "@opentui/core-darwin-x64": "0.1.79", "@opentui/core-linux-arm64": "0.1.79", "@opentui/core-linux-x64": "0.1.79", "@opentui/core-win32-arm64": "0.1.79", "@opentui/core-win32-x64": "0.1.79", "bun-webgpu": "0.1.4", "planck": "^1.4.2", "three": "0.177.0" }, "peerDependencies": { "web-tree-sitter": "0.25.10" } }, "sha512-job/t09w8A/aHb/WuaVbimu5fIffyN+PCuVO5cYhXEg/NkOkC/WdFi80B8bwncR/DBPyLAh6oJ3EG86grOVo5g=="],
"@opentui/core-darwin-arm64": ["@opentui/core-darwin-arm64@0.1.79", "", { "os": "darwin", "cpu": "arm64" }, "sha512-kgsGniV+DM5G1P3GideyJhvfnthNKcVCAm2mPTIr9InQ3L0gS/Feh7zgwOS/jxDvdlQbOWGKMk2Z3JApeC1MLw=="],
@@ -1433,6 +1583,26 @@
"@protobuf-ts/runtime-rpc": ["@protobuf-ts/runtime-rpc@2.11.1", "", { "dependencies": { "@protobuf-ts/runtime": "^2.11.1" } }, "sha512-4CqqUmNA+/uMz00+d3CYKgElXO9VrEbucjnBFEjqI4GuDrEQ32MaI3q+9qPBvIGOlL4PmHXrzM32vBPWRhQKWQ=="],
"@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="],
"@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="],
"@protobufjs/codegen": ["@protobufjs/codegen@2.0.4", "", {}, "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="],
"@protobufjs/eventemitter": ["@protobufjs/eventemitter@1.1.0", "", {}, "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="],
"@protobufjs/fetch": ["@protobufjs/fetch@1.1.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" } }, "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ=="],
"@protobufjs/float": ["@protobufjs/float@1.0.2", "", {}, "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="],
"@protobufjs/inquire": ["@protobufjs/inquire@1.1.0", "", {}, "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="],
"@protobufjs/path": ["@protobufjs/path@1.1.2", "", {}, "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="],
"@protobufjs/pool": ["@protobufjs/pool@1.1.0", "", {}, "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="],
"@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="],
"@radix-ui/colors": ["@radix-ui/colors@1.0.1", "", {}, "sha512-xySw8f0ZVsAEP+e7iLl3EvcBXX7gsIlC1Zso/sPBW9gIWerBTgz6axrjU+MZ39wD+WFi5h5zdWpsg3+hwt2Qsg=="],
"@radix-ui/primitive": ["@radix-ui/primitive@1.0.1", "", { "dependencies": { "@babel/runtime": "^7.13.10" } }, "sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw=="],
@@ -1551,6 +1721,12 @@
"@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.57.0", "", { "os": "win32", "cpu": "x64" }, "sha512-Zv7v6q6aV+VslnpwzqKAmrk5JdVkLUzok2208ZXGipjb+msxBr/fJPZyeEXiFgH7k62Ak0SLIfxQRZQvTuf7rQ=="],
"@sapphire/async-queue": ["@sapphire/async-queue@1.5.5", "", {}, "sha512-cvGzxbba6sav2zZkH8GPf2oGk9yYoD5qrNWdu9fRehifgnFZJMV+nuy2nON2roRO4yQQ+v7MK/Pktl/HgfsUXg=="],
"@sapphire/shapeshift": ["@sapphire/shapeshift@4.0.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "lodash": "^4.17.21" } }, "sha512-d9dUmWVA7MMiKobL3VpLF8P2aeanRTu6ypG2OIaEv/ZHH/SUQ2iHOVyi5wAPjQ+HmnMuL0whK9ez8I/raWbtIg=="],
"@sapphire/snowflake": ["@sapphire/snowflake@3.5.3", "", {}, "sha512-jjmJywLAFoWeBi1W7994zZyiNWPIiqRRNAmSERxyg93xRGzNYvGjlZ0gR6x0F4gPRi2+0O6S71kOZYyr3cxaIQ=="],
"@selderee/plugin-htmlparser2": ["@selderee/plugin-htmlparser2@0.11.0", "", { "dependencies": { "domhandler": "^5.0.3", "selderee": "^0.11.0" } }, "sha512-P33hHGdldxGabLFjPPpaTxVolMrzrcegejx+0GxjrIb9Zv48D8yAIA/QTDR2dFl7Uz7urX8aX6+5bCZslr+gWQ=="],
"@shikijs/core": ["@shikijs/core@3.9.2", "", { "dependencies": { "@shikijs/types": "3.9.2", "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.5" } }, "sha512-3q/mzmw09B2B6PgFNeiaN8pkNOixWS726IHmJEpjDAcneDPMQmUg2cweT9cWXY4XcyQS3i6mOOUgQz9RRUP6HA=="],
@@ -1983,6 +2159,8 @@
"@vitest/utils": ["@vitest/utils@4.0.18", "", { "dependencies": { "@vitest/pretty-format": "4.0.18", "tinyrainbow": "^3.0.3" } }, "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA=="],
"@vladfrangu/async_event_emitter": ["@vladfrangu/async_event_emitter@2.4.7", "", {}, "sha512-Xfe6rpCTxSxfbswi/W/Pz7zp1WWSNn4A0eW4mLkQUewCrXXtMj31lCg+iQyTkh/CkusZSq9eDflu7tjEDXUY6g=="],
"@volar/kit": ["@volar/kit@2.4.28", "", { "dependencies": { "@volar/language-service": "2.4.28", "@volar/typescript": "2.4.28", "typesafe-path": "^0.2.2", "vscode-languageserver-textdocument": "^1.0.11", "vscode-uri": "^3.0.8" }, "peerDependencies": { "typescript": "*" } }, "sha512-cKX4vK9dtZvDRaAzeoUdaAJEew6IdxHNCRrdp5Kvcl6zZOqb6jTOfk3kXkIkG3T7oTFXguEMt5+9ptyqYR84Pg=="],
"@volar/language-core": ["@volar/language-core@2.4.28", "", { "dependencies": { "@volar/source-map": "2.4.28" } }, "sha512-w4qhIJ8ZSitgLAkVay6AbcnC7gP3glYM3fYwKV3srj8m494E3xtrCv6E+bWviiK/8hs6e6t1ij1s2Endql7vzQ=="],
@@ -2013,6 +2191,8 @@
"acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="],
"acorn-import-attributes": ["acorn-import-attributes@1.9.5", "", { "peerDependencies": { "acorn": "^8" } }, "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ=="],
"acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="],
"acorn-walk": ["acorn-walk@8.3.2", "", {}, "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A=="],
@@ -2195,6 +2375,8 @@
"bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="],
"busboy": ["busboy@1.6.0", "", { "dependencies": { "streamsearch": "^1.1.0" } }, "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA=="],
"bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
"c12": ["c12@3.3.3", "", { "dependencies": { "chokidar": "^5.0.0", "confbox": "^0.2.2", "defu": "^6.1.4", "dotenv": "^17.2.3", "exsolve": "^1.0.8", "giget": "^2.0.0", "jiti": "^2.6.1", "ohash": "^2.0.11", "pathe": "^2.0.3", "perfect-debounce": "^2.0.0", "pkg-types": "^2.3.0", "rc9": "^2.1.2" }, "peerDependencies": { "magicast": "*" }, "optionalPeers": ["magicast"] }, "sha512-750hTRvgBy5kcMNPdh95Qo+XUBeGo8C7nsKSmedDmaQI+E0r82DwHeM6vBewDe4rGFbnxoa4V9pw+sPh5+Iz8Q=="],
@@ -2243,6 +2425,8 @@
"citty": ["citty@0.1.6", "", { "dependencies": { "consola": "^3.2.3" } }, "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ=="],
"cjs-module-lexer": ["cjs-module-lexer@2.2.0", "", {}, "sha512-4bHTS2YuzUvtoLjdy+98ykbNB5jS0+07EvFNXerqZQJ89F7DI6ET7OQo/HJuW6K0aVsKA9hj9/RVb2kQVOrPDQ=="],
"classnames": ["classnames@2.3.2", "", {}, "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw=="],
"clean-css": ["clean-css@5.3.3", "", { "dependencies": { "source-map": "~0.6.0" } }, "sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg=="],
@@ -2393,6 +2577,10 @@
"direction": ["direction@2.0.1", "", { "bin": { "direction": "cli.js" } }, "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA=="],
"discord-api-types": ["discord-api-types@0.38.39", "", {}, "sha512-XRdDQvZvID1XvcFftjSmd4dcmMi/RL/jSy5sduBDAvCGFcNFHThdIQXCEBDZFe52lCNEzuIL0QJoKYAmRmxLUA=="],
"discord.js": ["discord.js@14.25.1", "", { "dependencies": { "@discordjs/builders": "^1.13.0", "@discordjs/collection": "1.5.3", "@discordjs/formatters": "^0.6.2", "@discordjs/rest": "^2.6.0", "@discordjs/util": "^1.2.0", "@discordjs/ws": "^1.2.3", "@sapphire/snowflake": "3.5.3", "discord-api-types": "^0.38.33", "fast-deep-equal": "3.1.3", "lodash.snakecase": "4.1.1", "magic-bytes.js": "^1.10.0", "tslib": "^2.6.3", "undici": "6.21.3" } }, "sha512-2l0gsPOLPs5t6GFZfQZKnL1OJNYFcuC/ETWsW4VtKVD/tg4ICa9x+jb9bkPffkMdRpRpuUaO/fKkHCBeiCKh8g=="],
"dlv": ["dlv@1.1.3", "", {}, "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA=="],
"dns-packet": ["dns-packet@5.6.1", "", { "dependencies": { "@leichtgewicht/ip-codec": "^2.0.1" } }, "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw=="],
@@ -2429,6 +2617,8 @@
"ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="],
"effect": ["effect@3.19.17", "", { "dependencies": { "@standard-schema/spec": "^1.0.0", "fast-check": "^3.23.1" } }, "sha512-MChgn9z0y3KJ2DJ+VsZt1bdVDJ0bDIZt9zm4s3iPpRLzgcEGDOIP8pwADjAWifaP0S672nlRE5rZPBbyDcjn1g=="],
"electron-to-chromium": ["electron-to-chromium@1.5.282", "", {}, "sha512-FCPkJtpst28UmFzd903iU7PdeVTfY0KAeJy+Lk0GLZRwgwYHn/irRcaCbQQOmr5Vytc/7rcavsYLvTM8RiHYhQ=="],
"emmet": ["emmet@2.4.11", "", { "dependencies": { "@emmetio/abbreviation": "^2.3.3", "@emmetio/css-abbreviation": "^2.1.8" } }, "sha512-23QPJB3moh/U9sT4rQzGgeyyGIrcM+GH5uVYg2C6wZIxAIJq7Ng3QLT79tl8FUwDXhyq9SusfknOrofAKqvgyQ=="],
@@ -2523,6 +2713,8 @@
"exit-hook": ["exit-hook@2.2.1", "", {}, "sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw=="],
"expand-tilde": ["expand-tilde@2.0.2", "", { "dependencies": { "homedir-polyfill": "^1.0.1" } }, "sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw=="],
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
"express": ["express@4.22.1", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "~1.20.3", "content-disposition": "~0.5.4", "content-type": "~1.0.4", "cookie": "~0.7.1", "cookie-signature": "~1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "~1.3.1", "fresh": "~0.5.2", "http-errors": "~2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "~2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "~0.1.12", "proxy-addr": "~2.0.7", "qs": "~6.14.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "~0.19.0", "serve-static": "~1.16.2", "setprototypeof": "1.2.0", "statuses": "~2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g=="],
@@ -2537,6 +2729,8 @@
"extend-shallow": ["extend-shallow@2.0.1", "", { "dependencies": { "is-extendable": "^0.1.0" } }, "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug=="],
"fast-check": ["fast-check@3.23.2", "", { "dependencies": { "pure-rand": "^6.1.0" } }, "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A=="],
"fast-content-type-parse": ["fast-content-type-parse@3.0.0", "", {}, "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg=="],
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
@@ -2575,6 +2769,8 @@
"find-my-way": ["find-my-way@9.4.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-querystring": "^1.0.0", "safe-regex2": "^5.0.0" } }, "sha512-5Ye4vHsypZRYtS01ob/iwHzGRUDELlsoCftI/OZFhcLs1M0tkGPcXldE80TAZC5yYuJMBPJQQ43UHlqbJWiX2w=="],
"find-my-way-ts": ["find-my-way-ts@0.1.6", "", {}, "sha512-a85L9ZoXtNAey3Y6Z+eBWW658kO/MwR7zIafkIUPUMf3isZG0NCs2pjW2wtjxAKuJPxMAsHUIP4ZPGv0o5gyTA=="],
"find-up": ["find-up@4.1.0", "", { "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" } }, "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw=="],
"finity": ["finity@0.5.4", "", {}, "sha512-3l+5/1tuw616Lgb0QBimxfdd2TqaDGpfCBpfX6EqtFmqUV3FtQnVEX4Aa62DagYEqnsTIjZcTfbq9msDbXYgyA=="],
@@ -2601,6 +2797,8 @@
"forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="],
"forwarded-parse": ["forwarded-parse@2.1.2", "", {}, "sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw=="],
"fraction.js": ["fraction.js@5.3.4", "", {}, "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ=="],
"framer-motion": ["framer-motion@8.5.5", "", { "dependencies": { "@motionone/dom": "^10.15.3", "hey-listen": "^1.0.8", "tslib": "^2.4.0" }, "optionalDependencies": { "@emotion/is-prop-valid": "^0.8.2" }, "peerDependencies": { "react": "^18.0.0", "react-dom": "^18.0.0" } }, "sha512-5IDx5bxkjWHWUF3CVJoSyUVOtrbAxtzYBBowRE2uYI/6VYhkEBD+rbTHEGuUmbGHRj6YqqSfoG7Aa1cLyWCrBA=="],
@@ -2749,6 +2947,8 @@
"hey-listen": ["hey-listen@1.0.8", "", {}, "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q=="],
"homedir-polyfill": ["homedir-polyfill@1.0.3", "", { "dependencies": { "parse-passwd": "^1.0.0" } }, "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA=="],
"hono": ["hono@4.10.7", "", {}, "sha512-icXIITfw/07Q88nLSkB9aiUrd8rYzSweK681Kjo/TSggaGbOX4RRyxxm71v+3PC8C/j+4rlxGeoTRxQDkaJkUw=="],
"hono-openapi": ["hono-openapi@1.1.2", "", { "peerDependencies": { "@hono/standard-validator": "^0.2.0", "@standard-community/standard-json": "^0.3.5", "@standard-community/standard-openapi": "^0.2.9", "@types/json-schema": "^7.0.15", "hono": "^4.8.3", "openapi-types": "^12.1.3" }, "optionalPeers": ["@hono/standard-validator", "hono"] }, "sha512-toUcO60MftRBxqcVyxsHNYs2m4vf4xkQaiARAucQx3TiBPDtMNNkoh+C4I1vAretQZiGyaLOZNWn1YxfSyUA5g=="],
@@ -2793,6 +2993,8 @@
"image-q": ["image-q@4.0.0", "", { "dependencies": { "@types/node": "16.9.1" } }, "sha512-PfJGVgIfKQJuq3s0tTDOKtztksibuUEbJQIYT3by6wctQo+Rdlh7ef4evJ5NCdxY4CfMbvFkocEwbl4BF8RlJw=="],
"import-in-the-middle": ["import-in-the-middle@2.0.6", "", { "dependencies": { "acorn": "^8.15.0", "acorn-import-attributes": "^1.9.5", "cjs-module-lexer": "^2.2.0", "module-details-from-path": "^1.0.4" } }, "sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw=="],
"import-local": ["import-local@3.2.0", "", { "dependencies": { "pkg-dir": "^4.2.0", "resolve-cwd": "^3.0.0" }, "bin": { "import-local-fixture": "fixtures/cli.js" } }, "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA=="],
"import-meta-resolve": ["import-meta-resolve@4.2.0", "", {}, "sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg=="],
@@ -2971,6 +3173,8 @@
"klona": ["klona@2.0.6", "", {}, "sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA=="],
"kubernetes-types": ["kubernetes-types@1.30.0", "", {}, "sha512-Dew1okvhM/SQcIa2rcgujNndZwU8VnSapDgdxlYoB84ZlpAD43U6KLAFqYo17ykSFGHNPrg0qry0bP+GJd9v7Q=="],
"lang-map": ["lang-map@0.4.0", "", { "dependencies": { "language-map": "^1.1.0" } }, "sha512-oiSqZIEUnWdFeDNsp4HId4tAxdFbx5iMBOwA3666Fn2L8Khj8NiD9xRvMsGmKXopPVkaDFtSv3CJOmXFUB0Hcg=="],
"language-map": ["language-map@1.5.0", "", {}, "sha512-n7gFZpe+DwEAX9cXVTw43i3wiudWDDtSn28RmdnS/HCPr284dQI/SztsamWanRr75oSlKSaGbV2nmWCTzGCoVg=="],
@@ -3011,6 +3215,8 @@
"lodash": ["lodash@4.17.23", "", {}, "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w=="],
"lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="],
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
"lodash.isboolean": ["lodash.isboolean@3.0.3", "", {}, "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="],
@@ -3025,6 +3231,8 @@
"lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="],
"lodash.snakecase": ["lodash.snakecase@4.1.1", "", {}, "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw=="],
"loglevelnext": ["loglevelnext@6.0.0", "", {}, "sha512-FDl1AI2sJGjHHG3XKJd6sG3/6ncgiGCQ0YkW46nxe7SfqQq6hujd9CvFXIXtkGBUN83KPZ2KSOJK8q5P0bSSRQ=="],
"long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="],
@@ -3043,6 +3251,8 @@
"luxon": ["luxon@3.6.1", "", {}, "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ=="],
"magic-bytes.js": ["magic-bytes.js@1.13.0", "", {}, "sha512-afO2mnxW7GDTXMm5/AoN1WuOcdoKhtgXjIvHmobqTD1grNplhGdv3PFOyjCVmrnOZBIT/gD/koDKpYG+0mvHcg=="],
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
"magicast": ["magicast@0.3.5", "", { "dependencies": { "@babel/parser": "^7.25.4", "@babel/types": "^7.25.4", "source-map-js": "^1.2.0" } }, "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ=="],
@@ -3187,9 +3397,9 @@
"mime": ["mime@3.0.0", "", { "bin": { "mime": "cli.js" } }, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="],
"mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="],
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"mime-types": ["mime-types@3.0.2", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A=="],
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
@@ -3205,16 +3415,24 @@
"mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="],
"module-details-from-path": ["module-details-from-path@1.0.4", "", {}, "sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w=="],
"morphdom": ["morphdom@2.7.8", "", {}, "sha512-D/fR4xgGUyVRbdMGU6Nejea1RFzYxYtyurG4Fbv2Fi/daKlWKuXGLOdXtl+3eIwL110cI2hz1ZojGICjjFLgTg=="],
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
"msgpackr": ["msgpackr@1.11.8", "", { "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "sha512-bC4UGzHhVvgDNS7kn9tV8fAucIYUBuGojcaLiz7v+P63Lmtm0Xeji8B/8tYKddALXxJLpwIeBmUN3u64C4YkRA=="],
"msgpackr-extract": ["msgpackr-extract@3.0.3", "", { "dependencies": { "node-gyp-build-optional-packages": "5.2.2" }, "optionalDependencies": { "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" }, "bin": { "download-msgpackr-prebuilds": "bin/download-prebuilds.js" } }, "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA=="],
"muggle-string": ["muggle-string@0.4.1", "", {}, "sha512-VNTrAak/KhO2i8dqqnqnAHOa3cYBwXEZe9h+D5h/1ZqFSTEFHdM65lR7RoIqq3tBBYavsOXV84NoHXZ0AkPyqQ=="],
"multicast-dns": ["multicast-dns@7.2.5", "", { "dependencies": { "dns-packet": "^5.2.2", "thunky": "^1.0.2" }, "bin": { "multicast-dns": "cli.js" } }, "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg=="],
"multipasta": ["multipasta@0.2.7", "", {}, "sha512-KPA58d68KgGil15oDqXjkUBEBYc00XvbPj5/X+dyzeo/lWm9Nc25pQRlf1D+gv4OpK7NM0J1odrbu9JNNGvynA=="],
"mustache": ["mustache@4.2.0", "", { "bin": { "mustache": "bin/mustache" } }, "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ=="],
"mysql2": ["mysql2@3.14.4", "", { "dependencies": { "aws-ssl-profiles": "^1.1.1", "denque": "^2.1.0", "generate-function": "^2.3.1", "iconv-lite": "^0.7.0", "long": "^5.2.1", "lru.min": "^1.0.0", "named-placeholders": "^1.1.3", "seq-queue": "^0.0.5", "sqlstring": "^2.3.2" } }, "sha512-Cs/jx3WZPNrYHVz+Iunp9ziahaG5uFMvD2R8Zlmc194AqXNxt9HBNu7ZsPYrUtmJsF0egETCWIdMIYAwOGjL1w=="],
@@ -3249,6 +3467,8 @@
"node-gyp-build": ["node-gyp-build@4.8.4", "", { "bin": { "node-gyp-build": "bin.js", "node-gyp-build-optional": "optional.js", "node-gyp-build-test": "build-test.js" } }, "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ=="],
"node-gyp-build-optional-packages": ["node-gyp-build-optional-packages@5.2.2", "", { "dependencies": { "detect-libc": "^2.0.1" }, "bin": { "node-gyp-build-optional-packages": "bin.js", "node-gyp-build-optional-packages-optional": "optional.js", "node-gyp-build-optional-packages-test": "build-test.js" } }, "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw=="],
"node-html-parser": ["node-html-parser@7.0.2", "", { "dependencies": { "css-select": "^5.1.0", "he": "1.2.0" } }, "sha512-DxodLVh7a6JMkYzWyc8nBX9MaF4M0lLFYkJHlWOiu7+9/I6mwNK9u5TbAMC7qfqDJEPX9OIoWA2A9t4C2l1mUQ=="],
"node-mock-http": ["node-mock-http@1.0.4", "", {}, "sha512-8DY+kFsDkNXy1sJglUfuODx1/opAGJGyrTuFqEoN90oRc2Vk0ZbD4K2qmKXBBEhZQzdKHIVfEJpDU8Ak2NJEvQ=="],
@@ -3353,6 +3573,8 @@
"parse-latin": ["parse-latin@7.0.0", "", { "dependencies": { "@types/nlcst": "^2.0.0", "@types/unist": "^3.0.0", "nlcst-to-string": "^4.0.0", "unist-util-modify-children": "^4.0.0", "unist-util-visit-children": "^3.0.0", "vfile": "^6.0.0" } }, "sha512-mhHgobPPua5kZ98EF4HWiH167JWBfl4pvAIXXdbaVohtK7a6YBOy56kvhCqduqyo/f3yrHFWmqmiMg/BkBkYYQ=="],
"parse-passwd": ["parse-passwd@1.0.0", "", {}, "sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q=="],
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
"parse5-htmlparser2-tree-adapter": ["parse5-htmlparser2-tree-adapter@7.1.0", "", { "dependencies": { "domhandler": "^5.0.3", "parse5": "^7.0.0" } }, "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g=="],
@@ -3463,12 +3685,16 @@
"proto-list": ["proto-list@1.2.4", "", {}, "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA=="],
"protobufjs": ["protobufjs@7.5.4", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg=="],
"proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="],
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
"punycode": ["punycode@1.3.2", "", {}, "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw=="],
"pure-rand": ["pure-rand@6.1.0", "", {}, "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA=="],
"qs": ["qs@6.14.1", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ=="],
"quansync": ["quansync@0.2.11", "", {}, "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA=="],
@@ -3573,6 +3799,8 @@
"require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="],
"require-in-the-middle": ["require-in-the-middle@8.0.1", "", { "dependencies": { "debug": "^4.3.5", "module-details-from-path": "^1.0.3" } }, "sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ=="],
"reselect": ["reselect@4.1.8", "", {}, "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ=="],
"resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="],
@@ -3765,8 +3993,12 @@
"stoppable": ["stoppable@1.1.0", "", {}, "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw=="],
"stream-browserify": ["stream-browserify@3.0.0", "", { "dependencies": { "inherits": "~2.0.4", "readable-stream": "^3.5.0" } }, "sha512-H73RAHsVBapbim0tU2JwwOiXUj+fikfiaoYAKHF3VJfA0pe2BCzkhAHBlLG6REzE+2WNZcxOXjK7lkso+9euLA=="],
"stream-replace-string": ["stream-replace-string@2.0.0", "", {}, "sha512-TlnjJ1C0QrmxRNrON00JvaFFlNh5TTG00APw23j74ET7gkQpTASi6/L2fuiav8pzK715HXtUeClpBTw2NPSn6w=="],
"streamsearch": ["streamsearch@1.1.0", "", {}, "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg=="],
"streamx": ["streamx@2.23.0", "", { "dependencies": { "events-universal": "^1.0.0", "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" } }, "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg=="],
"string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="],
@@ -3835,6 +4067,8 @@
"thunky": ["thunky@1.1.0", "", {}, "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA=="],
"tiktoken": ["tiktoken@1.0.22", "", {}, "sha512-PKvy1rVF1RibfF3JlXBSP0Jrcw2uq3yXdgcEXtKTYn3QJ/cBRBHDnrJ5jHky+MENZ6DIPwNUGWpkVx+7joCpNA=="],
"tiny-inflate": ["tiny-inflate@1.0.3", "", {}, "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw=="],
"tiny-invariant": ["tiny-invariant@1.3.3", "", {}, "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg=="],
@@ -3875,6 +4109,8 @@
"ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="],
"ts-mixer": ["ts-mixer@6.0.4", "", {}, "sha512-ufKpbmrugz5Aou4wcr5Wc1UUFWOLhq+Fm6qa6P0w0K5Qw2yhaUoiWszhCVuNQyNwrlGiscHOmqYoAox1PtvgjA=="],
"tsconfck": ["tsconfck@3.1.6", "", { "peerDependencies": { "typescript": "^5.0.0" }, "optionalPeers": ["typescript"], "bin": { "tsconfck": "bin/tsconfck.js" } }, "sha512-ks6Vjr/jEw0P1gmOVwutM3B7fWxoWBL2KRDb1JfqGVawBmO5UsvmWOQFGHBPl5yxYz4eERr19E6L7NMv+Fej4w=="],
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
@@ -3991,7 +4227,7 @@
"utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="],
"uuid": ["uuid@8.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw=="],
"uuid": ["uuid@11.1.0", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A=="],
"vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="],
@@ -4105,7 +4341,7 @@
"y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="],
"yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
"yaml": ["yaml@2.8.2", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A=="],
@@ -4223,6 +4459,12 @@
"@aws-sdk/client-sts/@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.782.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/types": "3.775.0", "@smithy/node-config-provider": "^4.0.2", "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-dMFkUBgh2Bxuw8fYZQoH/u3H4afQ12VSkzEi//qFiDTwbKYq+u+RYjc8GLDM6JSK1BShMu5AVR7HD4ap1TYUnA=="],
"@aws-sdk/lib-storage/@smithy/middleware-endpoint": ["@smithy/middleware-endpoint@4.4.14", "", { "dependencies": { "@smithy/core": "^3.23.0", "@smithy/middleware-serde": "^4.2.9", "@smithy/node-config-provider": "^4.3.8", "@smithy/shared-ini-file-loader": "^4.4.3", "@smithy/types": "^4.12.0", "@smithy/url-parser": "^4.2.8", "@smithy/util-middleware": "^4.2.8", "tslib": "^2.6.2" } }, "sha512-FUFNE5KVeaY6U/GL0nzAAHkaCHzXLZcY1EhtQnsAqhD8Du13oPKtMB9/0WK4/LK6a/T5OZ24wPoSShff5iI6Ag=="],
"@aws-sdk/lib-storage/@smithy/smithy-client": ["@smithy/smithy-client@4.11.3", "", { "dependencies": { "@smithy/core": "^3.23.0", "@smithy/middleware-endpoint": "^4.4.14", "@smithy/middleware-stack": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "@smithy/util-stream": "^4.5.12", "tslib": "^2.6.2" } }, "sha512-Q7kY5sDau8OoE6Y9zJoRGgje8P4/UY0WzH8R2ok0PDh+iJ+ZnEKowhjEqYafVcubkbYxQVaqwm3iufktzhprGg=="],
"@aws-sdk/lib-storage/buffer": ["buffer@5.6.0", "", { "dependencies": { "base64-js": "^1.0.2", "ieee754": "^1.1.4" } }, "sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw=="],
"@aws-sdk/xml-builder/fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="],
"@azure/core-http/@azure/abort-controller": ["@azure/abort-controller@1.1.0", "", { "dependencies": { "tslib": "^2.2.0" } }, "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw=="],
@@ -4247,8 +4489,24 @@
"@cspotcode/source-map-support/@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.9", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ=="],
"@daytonaio/api-client/axios": ["axios@1.13.5", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q=="],
"@daytonaio/sdk/axios": ["axios@1.13.5", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q=="],
"@daytonaio/toolbox-api-client/axios": ["axios@1.13.5", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q=="],
"@discordjs/rest/@discordjs/collection": ["@discordjs/collection@2.1.1", "", {}, "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg=="],
"@discordjs/rest/undici": ["undici@6.21.3", "", {}, "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw=="],
"@discordjs/ws/@discordjs/collection": ["@discordjs/collection@2.1.1", "", {}, "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg=="],
"@discordjs/ws/ws": ["ws@8.19.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg=="],
"@dot/log/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="],
"@effect/platform-node-shared/ws": ["ws@8.19.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg=="],
"@esbuild-kit/core-utils/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="],
"@fastify/proxy-addr/ipaddr.js": ["ipaddr.js@2.3.0", "", {}, "sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg=="],
@@ -4257,6 +4515,8 @@
"@gitlab/gitlab-ai-provider/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
"@grpc/proto-loader/yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="],
"@hey-api/openapi-ts/open": ["open@11.0.0", "", { "dependencies": { "default-browser": "^5.4.0", "define-lazy-prop": "^3.0.0", "is-in-ssh": "^1.0.0", "is-inside-container": "^1.0.0", "powershell-utils": "^0.1.0", "wsl-utils": "^0.3.0" } }, "sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw=="],
"@hono/zod-validator/zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
@@ -4381,6 +4641,28 @@
"@opencode-ai/web/@shikijs/transformers": ["@shikijs/transformers@3.20.0", "", { "dependencies": { "@shikijs/core": "3.20.0", "@shikijs/types": "3.20.0" } }, "sha512-PrHHMRr3Q5W1qB/42kJW6laqFyWdhrPF2hNR9qjOm1xcSiAO3hAHo7HaVyHE6pMyevmy3i51O8kuGGXC78uK3g=="],
"@opencode/discord/@opencode-ai/sdk": ["@opencode-ai/sdk@1.2.1", "", {}, "sha512-K5e15mIXTyAykBw0GX+8O28IJHlPMw1jI/m3SDu+hgUHjmg2refqLPqyuqv8hE2nRcuGi8HajhpDJjkO7H2S0A=="],
"@opentelemetry/exporter-logs-otlp-proto/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentelemetry/exporter-trace-otlp-http/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentelemetry/exporter-trace-otlp-proto/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentelemetry/exporter-zipkin/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentelemetry/sdk-node/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentelemetry/sdk-trace-base/@opentelemetry/core": ["@opentelemetry/core@2.5.1", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-Dwlc+3HAZqpgTYq0MUyZABjFkcrKTePwuiFVLjahGD8cx3enqihmpAmdgNFO1R4m/sIe5afjJrA25Prqy4NXlA=="],
"@opentelemetry/sdk-trace-base/@opentelemetry/resources": ["@opentelemetry/resources@2.5.1", "", { "dependencies": { "@opentelemetry/core": "2.5.1", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-BViBCdE/GuXRlp9k7nS1w6wJvY5fnFX5XvuEtWsTAOQFIO89Eru7lGW3WbfbxtCuZ/GbrJfAziXG0w0dpxL7eQ=="],
"@opentelemetry/sdk-trace-node/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"@opentui/solid/@babel/core": ["@babel/core@7.28.0", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.0", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.6", "@babel/parser": "^7.28.0", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.0", "@babel/types": "^7.28.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ=="],
"@opentui/solid/babel-preset-solid": ["babel-preset-solid@1.9.9", "", { "dependencies": { "babel-plugin-jsx-dom-expressions": "^0.40.1" }, "peerDependencies": { "@babel/core": "^7.0.0", "solid-js": "^1.9.8" }, "optionalPeers": ["solid-js"] }, "sha512-pCnxWrciluXCeli/dj5PIEHgbNzim3evtTn12snjqqg8QZWJNMjH1AWIp4iG/tbVjqQ72aBEymMSagvmgxubXw=="],
@@ -4451,8 +4733,6 @@
"@vscode/emmet-helper/jsonc-parser": ["jsonc-parser@2.3.1", "", {}, "sha512-H8jvkz1O50L3dMZCsLqiuB2tA7muqbSg1AtGEkN0leAqGjsUzDJir3Zwr02BhqdcITPg3ei3mZ+HjMocAknhhg=="],
"accepts/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"ai-gateway-provider/@ai-sdk/anthropic": ["@ai-sdk/anthropic@2.0.58", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@ai-sdk/provider-utils": "3.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-CkNW5L1Arv8gPtPlEmKd+yf/SG9ucJf0XQdpMG8OiYEtEMc2smuCA+tyCp8zI7IBVg/FE7nUfFHntQFaOjRwJQ=="],
"ai-gateway-provider/@ai-sdk/google-vertex": ["@ai-sdk/google-vertex@3.0.90", "", { "dependencies": { "@ai-sdk/anthropic": "2.0.56", "@ai-sdk/google": "2.0.46", "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.19", "google-auth-library": "^10.5.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-C9MLe1KZGg1ZbupV2osygHtL5qngyCDA6ATatunyfTbIe8TXKG8HGni/3O6ifbnI5qxTidIn150Ox7eIFZVMYg=="],
@@ -4483,6 +4763,8 @@
"aws-sdk/events": ["events@1.1.1", "", {}, "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw=="],
"aws-sdk/uuid": ["uuid@8.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw=="],
"babel-plugin-jsx-dom-expressions/@babel/helper-module-imports": ["@babel/helper-module-imports@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA=="],
"babel-plugin-module-resolver/glob": ["glob@9.3.5", "", { "dependencies": { "fs.realpath": "^1.0.0", "minimatch": "^8.0.2", "minipass": "^4.2.4", "path-scurry": "^1.6.1" } }, "sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q=="],
@@ -4503,6 +4785,8 @@
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
"discord.js/undici": ["undici@6.21.3", "", {}, "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw=="],
"dot-prop/type-fest": ["type-fest@3.13.1", "", {}, "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g=="],
"drizzle-kit/esbuild": ["esbuild@0.19.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.19.12", "@esbuild/android-arm": "0.19.12", "@esbuild/android-arm64": "0.19.12", "@esbuild/android-x64": "0.19.12", "@esbuild/darwin-arm64": "0.19.12", "@esbuild/darwin-x64": "0.19.12", "@esbuild/freebsd-arm64": "0.19.12", "@esbuild/freebsd-x64": "0.19.12", "@esbuild/linux-arm": "0.19.12", "@esbuild/linux-arm64": "0.19.12", "@esbuild/linux-ia32": "0.19.12", "@esbuild/linux-loong64": "0.19.12", "@esbuild/linux-mips64el": "0.19.12", "@esbuild/linux-ppc64": "0.19.12", "@esbuild/linux-riscv64": "0.19.12", "@esbuild/linux-s390x": "0.19.12", "@esbuild/linux-x64": "0.19.12", "@esbuild/netbsd-x64": "0.19.12", "@esbuild/openbsd-x64": "0.19.12", "@esbuild/sunos-x64": "0.19.12", "@esbuild/win32-arm64": "0.19.12", "@esbuild/win32-ia32": "0.19.12", "@esbuild/win32-x64": "0.19.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg=="],
@@ -4531,8 +4815,6 @@
"finalhandler/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
"form-data/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"gaxios/node-fetch": ["node-fetch@3.3.2", "", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA=="],
"glob/minimatch": ["minimatch@10.1.1", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ=="],
@@ -4555,6 +4837,8 @@
"lightningcss/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"lru-cache/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
"md-to-react-email/marked": ["marked@7.0.4", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-t8eP0dXRJMtMvBojtkcsA7n48BkauktUKzfkPSCq85ZMTJ0v76Rke4DYz01omYpPTUh4p/f7HePgRo3ebG8+QQ=="],
"mdast-util-find-and-replace/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="],
@@ -4569,6 +4853,8 @@
"nitro/h3": ["h3@2.0.1-rc.5", "", { "dependencies": { "rou3": "^0.7.9", "srvx": "^0.9.1" }, "peerDependencies": { "crossws": "^0.4.1" }, "optionalPeers": ["crossws"] }, "sha512-qkohAzCab0nLzXNm78tBjZDvtKMTmtygS8BJLT3VPczAQofdqlFXDPkXdLMJN4r05+xqneG8snZJ0HgkERCZTg=="],
"node-gyp-build-optional-packages/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
"nypm/citty": ["citty@0.2.0", "", {}, "sha512-8csy5IBFI2ex2hTVpaHN2j+LNE199AgiI7y4dMintrr8i0lQiFn+0AWMZrWdHKIgMOer65f8IThysYhoReqjWA=="],
@@ -4645,6 +4931,8 @@
"sst/jose": ["jose@5.2.3", "", {}, "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA=="],
"stream-browserify/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
"string-width-cjs/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
@@ -4653,8 +4941,6 @@
"sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="],
"tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
"terser/commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
"token-types/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
@@ -4665,8 +4951,6 @@
"tw-to-css/tailwindcss": ["tailwindcss@3.3.2", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.5.3", "didyoumean": "^1.2.2", "dlv": "^1.1.3", "fast-glob": "^3.2.12", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", "jiti": "^1.18.2", "lilconfig": "^2.1.0", "micromatch": "^4.0.5", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.0.0", "postcss": "^8.4.23", "postcss-import": "^15.1.0", "postcss-js": "^4.0.1", "postcss-load-config": "^4.0.1", "postcss-nested": "^6.0.1", "postcss-selector-parser": "^6.0.11", "postcss-value-parser": "^4.2.0", "resolve": "^1.22.2", "sucrase": "^3.32.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" } }, "sha512-9jPkMiIBXvPc2KywkraqsUfbfj+dHDb+JPWtSJa9MLFdrPyazI7q6WX2sUrm7R9eVR7qqv3Pas7EvQFzxKnI6w=="],
"type-is/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"unifont/ofetch": ["ofetch@1.5.1", "", { "dependencies": { "destr": "^2.0.5", "node-fetch-native": "^1.6.7", "ufo": "^1.6.1" } }, "sha512-2W4oUZlVaqAPAil6FUg/difl6YhqhUR7x2eZY4bQCko22UXg3hptq9KLQdqFClV+Wu85UX7hNtdGTngi/1BxcA=="],
"utif2/pako": ["pako@1.0.11", "", {}, "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="],
@@ -4741,6 +5025,14 @@
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.782.0", "", { "dependencies": { "@aws-sdk/core": "3.775.0", "@aws-sdk/nested-clients": "3.782.0", "@aws-sdk/types": "3.775.0", "@smithy/property-provider": "^4.0.2", "@smithy/types": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-xCna0opVPaueEbJoclj5C6OpDNi0Gynj+4d7tnuXGgQhTHPyAz8ZyClkVqpi5qvHTgxROdUEDxWqEO5jqRHZHQ=="],
"@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core": ["@smithy/core@3.23.0", "", { "dependencies": { "@smithy/middleware-serde": "^4.2.9", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-stream": "^4.5.12", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" } }, "sha512-Yq4UPVoQICM9zHnByLmG8632t2M0+yap4T7ANVw482J0W7HW0pOuxwVmeOwzJqX2Q89fkXz0Vybz55Wj2Xzrsg=="],
"@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/core": ["@smithy/core@3.23.0", "", { "dependencies": { "@smithy/middleware-serde": "^4.2.9", "@smithy/protocol-http": "^5.3.8", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-middleware": "^4.2.8", "@smithy/util-stream": "^4.5.12", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" } }, "sha512-Yq4UPVoQICM9zHnByLmG8632t2M0+yap4T7ANVw482J0W7HW0pOuxwVmeOwzJqX2Q89fkXz0Vybz55Wj2Xzrsg=="],
"@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream": ["@smithy/util-stream@4.5.12", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.9", "@smithy/node-http-handler": "^4.4.10", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-D8tgkrmhAX/UNeCZbqbEO3uqyghUnEmmoO9YEvRuwxjlkKKUE7FOgCJnqpTlQPe9MApdWPky58mNQQHbnCzoNg=="],
"@aws-sdk/lib-storage/buffer/ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"@aws-sdk/xml-builder/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
"@azure/core-xml/fast-xml-parser/strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
@@ -4791,6 +5083,10 @@
"@esbuild-kit/core-utils/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.18.20", "", { "os": "win32", "cpu": "x64" }, "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ=="],
"@grpc/proto-loader/yargs/cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="],
"@grpc/proto-loader/yargs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
"@isaacs/cliui/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="],
"@isaacs/cliui/wrap-ansi/ansi-styles": ["ansi-styles@6.2.3", "", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="],
@@ -4869,6 +5165,8 @@
"@modelcontextprotocol/sdk/express/merge-descriptors": ["merge-descriptors@2.0.0", "", {}, "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="],
"@modelcontextprotocol/sdk/express/mime-types": ["mime-types@3.0.2", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A=="],
"@modelcontextprotocol/sdk/express/send": ["send@1.2.1", "", { "dependencies": { "debug": "^4.4.3", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.1", "mime-types": "^3.0.2", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.2" } }, "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ=="],
"@modelcontextprotocol/sdk/express/serve-static": ["serve-static@2.2.1", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw=="],
@@ -4973,8 +5271,6 @@
"@pierre/diffs/@shikijs/transformers/@shikijs/types": ["@shikijs/types@3.20.0", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.2", "@types/hast": "^3.0.4" } }, "sha512-lhYAATn10nkZcBQ0BlzSbJA3wcmL5MXUUF8d2Zzon6saZDlToKaiRX60n2+ZaHJCmXEcZRWNzn+k9vplr8Jhsw=="],
"@slack/web-api/form-data/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"@slack/web-api/p-queue/eventemitter3": ["eventemitter3@4.0.7", "", {}, "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="],
"@slack/web-api/p-queue/p-timeout": ["p-timeout@3.2.0", "", { "dependencies": { "p-finally": "^1.0.0" } }, "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg=="],
@@ -4993,8 +5289,6 @@
"@tailwindcss/oxide-wasm32-wasi/@napi-rs/wasm-runtime/@tybys/wasm-util": ["@tybys/wasm-util@0.10.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg=="],
"accepts/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"ai-gateway-provider/@ai-sdk/google-vertex/@ai-sdk/anthropic": ["@ai-sdk/anthropic@2.0.56", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.19" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-XHJKu0Yvfu9SPzRfsAFESa+9T7f2YJY6TxykKMfRsAwpeWAiX/Gbx5J5uM15AzYC3Rw8tVP3oH+j7jEivENirQ=="],
"ai-gateway-provider/@ai-sdk/google-vertex/@ai-sdk/google": ["@ai-sdk/google@2.0.46", "", { "dependencies": { "@ai-sdk/provider": "2.0.0", "@ai-sdk/provider-utils": "3.0.19" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-8PK6u4sGE/kXebd7ZkTp+0aya4kNqzoqpS5m7cHY2NfTK6fhPc6GNvE+MZIZIoHQTp5ed86wGBdeBPpFaaUtyg=="],
@@ -5085,8 +5379,6 @@
"finalhandler/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
"form-data/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"gray-matter/js-yaml/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="],
"js-beautify/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="],
@@ -5133,8 +5425,6 @@
"tw-to-css/tailwindcss/postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
"type-is/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"wrangler/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.4", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1VCICWypeQKhVbE9oW/sJaAmjLxhVqacdkvPLEjwlttjfwENRSClS8EjBz0KzRyFSCPDIkuXW34Je/vk7zdB7Q=="],
"wrangler/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.25.4", "", { "os": "android", "cpu": "arm" }, "sha512-QNdQEps7DfFwE3hXiU4BZeOV68HHzYwGd0Nthhd3uCkkEKK7/R6MTgM0P7H7FAs5pU/DIWsviMmEGxEoxIZ+ZQ=="],
@@ -5217,6 +5507,18 @@
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-web-identity/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.782.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.775.0", "@aws-sdk/middleware-host-header": "3.775.0", "@aws-sdk/middleware-logger": "3.775.0", "@aws-sdk/middleware-recursion-detection": "3.775.0", "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/region-config-resolver": "3.775.0", "@aws-sdk/types": "3.775.0", "@aws-sdk/util-endpoints": "3.782.0", "@aws-sdk/util-user-agent-browser": "3.775.0", "@aws-sdk/util-user-agent-node": "3.782.0", "@smithy/config-resolver": "^4.1.0", "@smithy/core": "^3.2.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/hash-node": "^4.0.2", "@smithy/invalid-dependency": "^4.0.2", "@smithy/middleware-content-length": "^4.0.2", "@smithy/middleware-endpoint": "^4.1.0", "@smithy/middleware-retry": "^4.1.0", "@smithy/middleware-serde": "^4.0.3", "@smithy/middleware-stack": "^4.0.2", "@smithy/node-config-provider": "^4.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/url-parser": "^4.0.2", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.8", "@smithy/util-defaults-mode-node": "^4.0.8", "@smithy/util-endpoints": "^3.0.2", "@smithy/util-middleware": "^4.0.2", "@smithy/util-retry": "^4.0.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-QOYC8q7luzHFXrP0xYAqBctoPkynjfV0r9dqntFu4/IWMTyC1vlo1UTxFAjIPyclYw92XJyEkVCVg9v/nQnsUA=="],
"@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream": ["@smithy/util-stream@4.5.12", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.9", "@smithy/node-http-handler": "^4.4.10", "@smithy/types": "^4.12.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-D8tgkrmhAX/UNeCZbqbEO3uqyghUnEmmoO9YEvRuwxjlkKKUE7FOgCJnqpTlQPe9MApdWPky58mNQQHbnCzoNg=="],
"@aws-sdk/lib-storage/@smithy/smithy-client/@smithy/util-stream/@smithy/node-http-handler": ["@smithy/node-http-handler@4.4.10", "", { "dependencies": { "@smithy/abort-controller": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/querystring-builder": "^4.2.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-u4YeUwOWRZaHbWaebvrs3UhwQwj+2VNmcVCwXcYTvPIuVyM7Ex1ftAj+fdbG/P4AkBwLq/+SKn+ydOI4ZJE9PA=="],
"@grpc/proto-loader/yargs/cliui/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
"@grpc/proto-loader/yargs/cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
"@grpc/proto-loader/yargs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
"@grpc/proto-loader/yargs/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
"@jsx-email/cli/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
"@jsx-email/cli/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
@@ -5267,6 +5569,8 @@
"@modelcontextprotocol/sdk/express/accepts/negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="],
"@modelcontextprotocol/sdk/express/mime-types/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="],
"@modelcontextprotocol/sdk/express/type-is/media-typer": ["media-typer@1.1.0", "", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="],
"@octokit/auth-app/@octokit/request-error/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@27.0.0", "", {}, "sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA=="],
@@ -5289,8 +5593,6 @@
"@opencode-ai/desktop/@actions/artifact/@actions/http-client/undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="],
"@slack/web-api/form-data/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"@solidjs/start/shiki/@shikijs/engine-javascript/oniguruma-to-es": ["oniguruma-to-es@2.3.0", "", { "dependencies": { "emoji-regex-xs": "^1.0.0", "regex": "^5.1.1", "regex-recursion": "^5.1.1" } }, "sha512-bwALDxriqfKGfUufKGGepCzu9x7nJQuoRoAFp4AnwehhC2crqrDIAP/uN2qdlsAvSMpeRC3+Yzhqc7hLmle5+g=="],
"ansi-align/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
@@ -5327,6 +5629,8 @@
"opencontrol/@modelcontextprotocol/sdk/express/merge-descriptors": ["merge-descriptors@2.0.0", "", {}, "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="],
"opencontrol/@modelcontextprotocol/sdk/express/mime-types": ["mime-types@3.0.2", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A=="],
"opencontrol/@modelcontextprotocol/sdk/express/send": ["send@1.2.1", "", { "dependencies": { "debug": "^4.4.3", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.1", "mime-types": "^3.0.2", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.2" } }, "sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ=="],
"opencontrol/@modelcontextprotocol/sdk/express/serve-static": ["serve-static@2.2.1", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw=="],
@@ -5349,6 +5653,12 @@
"@aws-sdk/client-sts/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.782.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.775.0", "@aws-sdk/middleware-host-header": "3.775.0", "@aws-sdk/middleware-logger": "3.775.0", "@aws-sdk/middleware-recursion-detection": "3.775.0", "@aws-sdk/middleware-user-agent": "3.782.0", "@aws-sdk/region-config-resolver": "3.775.0", "@aws-sdk/types": "3.775.0", "@aws-sdk/util-endpoints": "3.782.0", "@aws-sdk/util-user-agent-browser": "3.775.0", "@aws-sdk/util-user-agent-node": "3.782.0", "@smithy/config-resolver": "^4.1.0", "@smithy/core": "^3.2.0", "@smithy/fetch-http-handler": "^5.0.2", "@smithy/hash-node": "^4.0.2", "@smithy/invalid-dependency": "^4.0.2", "@smithy/middleware-content-length": "^4.0.2", "@smithy/middleware-endpoint": "^4.1.0", "@smithy/middleware-retry": "^4.1.0", "@smithy/middleware-serde": "^4.0.3", "@smithy/middleware-stack": "^4.0.2", "@smithy/node-config-provider": "^4.0.2", "@smithy/node-http-handler": "^4.0.4", "@smithy/protocol-http": "^5.1.0", "@smithy/smithy-client": "^4.2.0", "@smithy/types": "^4.2.0", "@smithy/url-parser": "^4.0.2", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.8", "@smithy/util-defaults-mode-node": "^4.0.8", "@smithy/util-endpoints": "^3.0.2", "@smithy/util-middleware": "^4.0.2", "@smithy/util-retry": "^4.0.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-QOYC8q7luzHFXrP0xYAqBctoPkynjfV0r9dqntFu4/IWMTyC1vlo1UTxFAjIPyclYw92XJyEkVCVg9v/nQnsUA=="],
"@aws-sdk/lib-storage/@smithy/middleware-endpoint/@smithy/core/@smithy/util-stream/@smithy/node-http-handler": ["@smithy/node-http-handler@4.4.10", "", { "dependencies": { "@smithy/abort-controller": "^4.2.8", "@smithy/protocol-http": "^5.3.8", "@smithy/querystring-builder": "^4.2.8", "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-u4YeUwOWRZaHbWaebvrs3UhwQwj+2VNmcVCwXcYTvPIuVyM7Ex1ftAj+fdbG/P4AkBwLq/+SKn+ydOI4ZJE9PA=="],
"@grpc/proto-loader/yargs/cliui/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
"@grpc/proto-loader/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
"@jsx-email/cli/tailwindcss/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"@solidjs/start/shiki/@shikijs/engine-javascript/oniguruma-to-es/regex": ["regex@5.1.1", "", { "dependencies": { "regex-utilities": "^2.3.0" } }, "sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw=="],
@@ -5357,6 +5667,8 @@
"opencontrol/@modelcontextprotocol/sdk/express/accepts/negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="],
"opencontrol/@modelcontextprotocol/sdk/express/mime-types/mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="],
"opencontrol/@modelcontextprotocol/sdk/express/type-is/media-typer": ["media-typer@1.1.0", "", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="],
"pkg-up/find-up/locate-path/p-locate/p-limit": ["p-limit@2.3.0", "", { "dependencies": { "p-try": "^2.0.0" } }, "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w=="],

View File

@@ -0,0 +1,12 @@
node_modules
dist
.turbo
.env
.env.*
*.sqlite
*.sqlite-shm
*.sqlite-wal
.git

View File

@@ -0,0 +1,32 @@
# Discord
DISCORD_TOKEN=
DATABASE_PATH=discord.sqlite # SQLite file path (use /data/discord.sqlite in Docker)
ALLOWED_CHANNEL_IDS= # Comma-separated Discord channel IDs
DISCORD_ROLE_ID= # Role ID that triggers the bot (optional, for @role mentions)
DISCORD_CATEGORY_ID= # Optional category ID that is allowed
DISCORD_REQUIRED_ROLE_ID= # Optional role required to talk to bot
DISCORD_COMMAND_GUILD_ID= # Optional guild ID for instant slash command updates (dev only)
# Daytona
DAYTONA_API_KEY=
DAYTONA_SNAPSHOT= # Optional; prebuilt snapshot name for faster sandbox creation
# OpenCode (injected into sandboxes)
OPENCODE_ZEN_API_KEY=
GITHUB_TOKEN= # Optional; enables authenticated gh CLI inside sandbox
# Observability
LOG_LEVEL=info
LOG_PRETTY=false
HEALTH_HOST=0.0.0.0
HEALTH_PORT=8787
TURN_ROUTING_MODE=ai # off | heuristic | ai
TURN_ROUTING_MODEL=claude-haiku-4-5
# Bot behavior
SANDBOX_REUSE_POLICY=resume_preferred
SANDBOX_TIMEOUT_MINUTES=30
PAUSED_TTL_MINUTES=180
RESUME_HEALTH_TIMEOUT_MS=120000
SANDBOX_CREATION_TIMEOUT=180
OPENCODE_MODEL=opencode/claude-sonnet-4-5

41
packages/discord/.gitignore vendored Normal file
View File

@@ -0,0 +1,41 @@
# Dependencies
node_modules/
# Build output
dist/
# Environment
.env
.env.local
.env.*.local
.env.*
!.env.example
# Bun
*.lockb
# Turbo
.turbo/
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
Thumbs.db
# Logs
*.log
# Local SQLite
*.sqlite
*.sqlite-shm
*.sqlite-wal
data/
# Sensitive
.censitive

1
packages/discord/AGENTS.md Symbolic link
View File

@@ -0,0 +1 @@
CLAUDE.md

View File

@@ -0,0 +1,20 @@
FROM oven/bun:1.3.9-alpine
WORKDIR /app
COPY package.json ./
RUN bun install --production
COPY src ./src
ENV NODE_ENV=production
ENV DATABASE_PATH=/data/discord.sqlite
ENV HEALTH_HOST=0.0.0.0
ENV HEALTH_PORT=8787
RUN mkdir -p /data
VOLUME ["/data"]
EXPOSE 8787
CMD ["bun", "run", "src/index.ts"]

33
packages/discord/Makefile Normal file
View File

@@ -0,0 +1,33 @@
IMAGE ?= opencode-discord:local
CONTAINER ?= opencode-discord-local
ENV_FILE ?= .env
DATA_DIR ?= data
HOST_PORT ?= 8787
.PHONY: docker-build docker-run docker-stop docker-restart docker-logs docker-status
docker-build:
docker build -t $(IMAGE) -f Dockerfile .
docker-run:
mkdir -p $(DATA_DIR)
docker rm -f $(CONTAINER) >/dev/null 2>&1 || true
docker run -d \
--name $(CONTAINER) \
--env-file $(ENV_FILE) \
-e DATABASE_PATH=/data/discord.sqlite \
-p $(HOST_PORT):8787 \
-v $(CURDIR)/$(DATA_DIR):/data \
$(IMAGE)
docker-stop:
docker stop $(CONTAINER) >/dev/null 2>&1 || true
docker rm $(CONTAINER) >/dev/null 2>&1 || true
docker-restart: docker-stop docker-run
docker-logs:
docker logs -f $(CONTAINER)
docker-status:
docker ps --filter "name=$(CONTAINER)" --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"

185
packages/discord/README.md Normal file
View File

@@ -0,0 +1,185 @@
# OpenCord
Discord bot that provisions [Daytona](https://daytona.io) sandboxes running [OpenCode](https://opencode.ai) sessions. Each Discord thread gets its own isolated sandbox with full conversational context.
## How It Works
1. Mention the bot in an allowed channel
2. Bot creates a Discord thread and provisions a Daytona sandbox
3. OpenCode runs inside the sandbox, responding to messages in the thread
4. Inactive threads pause their sandbox automatically; activity resumes them
5. Conversational context is preserved across bot restarts
## Setup
### Prerequisites
- [Bun](https://bun.sh) installed
- A Discord bot application (see below)
- A [Daytona](https://daytona.io) account with API access
- An [OpenCode](https://opencode.ai) API key
### 1. Create a Discord Bot
1. Go to the [Discord Developer Portal](https://discord.com/developers/applications)
2. Create a new application
3. Go to **Bot** and click **Reset Token** — save this as `DISCORD_TOKEN`
4. Enable **Message Content Intent** under **Privileged Gateway Intents**
5. Go to **OAuth2 > URL Generator**, select scopes `bot` and `applications.commands` with permissions: **Send Messages**, **Create Public Threads**, **Send Messages in Threads**, **Read Message History**
6. Use the generated URL to invite the bot to your server
### 2. Get Your API Keys
- **Daytona**: Sign up at [daytona.io](https://daytona.io) and generate an API key from your dashboard
- **OpenCode**: Get an API key from [opencode.ai](https://opencode.ai)
- **GitHub Token** (optional): A personal access token — enables authenticated `gh` CLI inside sandboxes
### 3. Configure and Run
```bash
bun install
cp .env.example .env
# Fill in required values (see below)
bun run db:init
bun run dev
```
### 4. Run with Docker
From the repo root, use the built-in Make targets:
```bash
cp packages/discord/.env.example packages/discord/.env
# Fill in required values in packages/discord/.env
make -C packages/discord docker-build
make -C packages/discord docker-run
make -C packages/discord docker-status
```
SQLite data is persisted locally in `packages/discord/data`.
Useful commands:
```bash
make -C packages/discord docker-logs
make -C packages/discord docker-stop
```
If you prefer plain Docker commands instead of Make, run:
```bash
docker build -t opencode-discord packages/discord
docker run --name opencode-discord-local \
--env-file packages/discord/.env \
-e DATABASE_PATH=/data/discord.sqlite \
-p 8787:8787 \
-v $(pwd)/packages/discord/data:/data \
opencode-discord
```
This image does not require Docker Compose or special network wiring; only outbound access to Discord, Daytona, and OpenCode APIs.
### Environment Variables
#### Required
| Variable | Description |
| ---------------------- | ------------------------------------------- |
| `DISCORD_TOKEN` | Bot token from the Discord Developer Portal |
| `DAYTONA_API_KEY` | API key from your Daytona dashboard |
| `OPENCODE_ZEN_API_KEY` | API key from OpenCode |
#### Optional — Discord
| Variable | Default | Description |
| -------------------------- | --------- | ----------------------------------------------------------------------- |
| `ALLOWED_CHANNEL_IDS` | _(empty)_ | Comma-separated channel IDs where the bot listens. Empty = all channels |
| `DISCORD_CATEGORY_ID` | _(empty)_ | Restrict the bot to a specific channel category |
| `DISCORD_ROLE_ID` | _(empty)_ | Role ID that triggers the bot via @role mentions |
| `DISCORD_REQUIRED_ROLE_ID` | _(empty)_ | Role users must have to interact with the bot |
| `DISCORD_COMMAND_GUILD_ID` | _(empty)_ | Register slash commands in one guild for instant updates (dev-friendly) |
#### Optional — Storage & Runtime
| Variable | Default | Description |
| ---------------- | ---------------------------- | -------------------------------------------------- |
| `DATABASE_PATH` | `discord.sqlite` | Path to the local SQLite file |
| `GITHUB_TOKEN` | _(empty)_ | Injected into sandboxes for authenticated `gh` CLI |
| `DAYTONA_SNAPSHOT` | _(empty)_ | Prebuilt Daytona snapshot name for faster startup |
| `OPENCODE_MODEL` | `opencode/claude-sonnet-4-5` | Model used inside OpenCode sessions |
#### Optional — Bot Behavior
| Variable | Default | Description |
| -------------------------- | ------------------ | ------------------------------------------------------------------------------ |
| `SANDBOX_REUSE_POLICY` | `resume_preferred` | `resume_preferred` or `recreate` |
| `SANDBOX_TIMEOUT_MINUTES` | `30` | Minutes of inactivity before pausing a sandbox |
| `PAUSED_TTL_MINUTES` | `180` | Minutes a paused sandbox lives before being destroyed |
| `RESUME_HEALTH_TIMEOUT_MS` | `120000` | Timeout (ms) when waiting for a sandbox to resume |
| `SANDBOX_CREATION_TIMEOUT` | `180` | Timeout (s) for sandbox creation |
| `TURN_ROUTING_MODE` | `ai` | How the bot decides if a message needs a response: `off`, `heuristic`, or `ai` |
| `TURN_ROUTING_MODEL` | `claude-haiku-4-5` | Model used for AI turn routing |
#### Optional — Observability
| Variable | Default | Description |
| ------------- | --------- | ----------------------------------- |
| `LOG_LEVEL` | `info` | `debug`, `info`, `warn`, or `error` |
| `LOG_PRETTY` | `false` | Pretty-print JSON logs |
| `HEALTH_HOST` | `0.0.0.0` | Host for the health HTTP server |
| `HEALTH_PORT` | `8787` | Port for the health HTTP server |
## Commands
| Command | Description |
| ------------------- | --------------------------- |
| `bun run dev` | Watch mode |
| `bun run start` | Production run |
| `bun run db:init` | Initialize/migrate database |
| `bun run snapshot:create` | Build/activate a Daytona snapshot |
| `bun run typecheck` | TypeScript checks |
| `bun run build` | Bundle for deployment |
| `bun run check` | Typecheck + build |
### Faster Sandbox Startup (Snapshot)
Build and activate a reusable Daytona snapshot once:
```bash
bun run snapshot:create opencode-discord-v1
```
Then set this in `.env`:
```bash
DAYTONA_SNAPSHOT=opencode-discord-v1
```
### Discord Slash Commands
- `/status` — show current sandbox session for the thread
- `/reset` — destroy session so next message provisions a fresh sandbox
These map to the existing `!status` / `!reset` behavior.
## Health Endpoints
- `GET /healthz` — Liveness check (uptime, Discord status, active sessions)
- `GET /readyz` — Readiness check (200 when Discord connected, 503 otherwise)
## Architecture
```
Discord / CLI
└─ Conversation service (Inbox → turn logic → Outbox)
├─ IngressDedup (message-id dedup in conversation path)
├─ OffsetStore (durable Discord catch-up offsets)
└─ ThreadChatCluster.send(threadId)
└─ ThreadEntity (cluster actor per thread)
├─ active? → health check → reuse
├─ paused? → SandboxProvisioner.resume() → reattach session
└─ missing? → SandboxProvisioner.provision() → new sandbox + session
```
Sessions are persisted in a local SQLite file. Sandbox filesystem (including OpenCode session state) survives pause/resume cycles via Daytona stop/start.

View File

@@ -0,0 +1,41 @@
{
"name": "@opencode/discord",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"db:init": "bun run src/db/init.ts",
"dev": "bun run --watch src/index.ts",
"dev:setup": "bun run db:init",
"start": "bun run src/index.ts",
"conversation:cli": "bun run src/control/cli.ts",
"conversation:controller": "bun run src/control/controller.ts",
"conversation:ctl": "bun run src/control/controller.ts",
"snapshot:create": "bun run src/sandbox/daytona/snapshot.ts",
"build": "bun build src/index.ts --target=bun --outdir=dist",
"typecheck": "tsc --noEmit",
"check": "bun run typecheck && bun run build",
"prepare": "effect-language-service patch"
},
"dependencies": {
"@daytonaio/sdk": "latest",
"@effect/ai": "0.33.2",
"@effect/ai-anthropic": "0.23.0",
"@effect/cluster": "0.56.4",
"@effect/experimental": "0.58.0",
"@effect/platform": "0.94.5",
"@effect/platform-bun": "0.87.1",
"@effect/rpc": "0.73.1",
"@effect/sql-sqlite-bun": "0.50.2",
"@opencode-ai/sdk": "latest",
"discord.js": "^14",
"effect": "3.19.17"
},
"devDependencies": {
"@effect/language-service": "0.73.1",
"@effect/sql": "0.49.0",
"@types/bun": "latest",
"@types/node": "^22",
"typescript": "^5"
}
}

View File

@@ -0,0 +1,60 @@
you are an engineering assistant for the opencode repo, running inside a discord workflow.
your job is to help people solve real code and operations problems quickly.
## communication style
- default to lowercase developer style.
- mirror the user's phrasing and level of formality.
- be concise by default. expand only when the question needs it.
- skip fluff and generic preambles. answer directly.
- if unsure, say so and state exactly what you need to verify.
## operating process
1. understand the request and goal.
2. inspect the codebase and runtime signals first (files, logs, tests, git history).
3. use tools to gather evidence before concluding.
4. give a concrete answer with file references and next action.
5. if asked to implement, make the change and verify.
## tool usage
- you can use repo tools and shell commands.
- prefer fast code search (`rg`) and direct file reads.
- use git commands for context and diffs.
- use github cli (`gh`) for issues/prs when asked, or when explicitly instructed to file findings.
- use web lookup when external, time-sensitive, or non-repo facts are needed.
## github identity and capabilities
- your github account name is `opendude`.
- your github identity is the account authenticated in the sandbox via `GH_TOKEN` / `GITHUB_TOKEN`.
- if the user asks who you are on github, check with `gh auth status` or `gh api user`.
- you can create branches, push commits, open pull requests, open issues, and post issue/pr comments when repo permissions allow.
- default to creating pull requests against `dev` unless the user specifies another base branch.
- do not merge pull requests unless the user explicitly asks and permissions allow it.
## github issue workflow
when creating an issue, include:
- summary
- impact
- reproduction steps
- expected vs actual behavior
- suspected files/components
- proposed next step
## quality bar
- do not invent behavior; verify from code or logs.
- include file paths and line references for technical claims.
- summarize command outputs; do not dump noisy logs unless requested.
- do not edit files unless explicitly asked.
- avoid risky/destructive actions unless explicitly approved.
## continuous improvement (for later expansion)
- if a recurring workflow appears, propose a reusable skill/process.
- present a short skill spec: trigger, inputs, steps, outputs.
- do not self-modify prompt or automation config without explicit user approval.

View File

@@ -0,0 +1,79 @@
import { AnthropicClient, AnthropicLanguageModel } from "@effect/ai-anthropic"
import { SingleRunner, TestRunner } from "@effect/cluster"
import { FetchHttpClient } from "@effect/platform"
import { BunContext } from "@effect/platform-bun"
import { Effect, Layer, LogLevel, Logger } from "effect"
import { Conversation } from "../conversation/conversation"
import { IngressDedup } from "../conversation/dedup"
import { History } from "../conversation/history"
import { OffsetStore } from "../conversation/offsets"
import { ThreadChatClusterLive, ThreadControlClusterLive, ThreadEntityLive } from "../conversation/thread/cluster"
import { AppConfig } from "../config"
import { SqliteDb } from "../db/client"
import { DiscordConversationServicesLive } from "../discord/adapter"
import { DiscordClient } from "../discord/client"
import { TurnRouter } from "../discord/turn-routing"
import { LoggerLive } from "../observability/logger"
import { DaytonaService } from "../sandbox/daytona/service"
import { OpenCodeClient } from "../sandbox/opencode/client"
import { SandboxProvisioner } from "../sandbox/provisioner"
import { SessionStore } from "../session/store"
export const AnthropicLayer = Layer.unwrapEffect(
Effect.gen(function* () {
const config = yield* AppConfig
return AnthropicLanguageModel.layer({ model: config.turnRoutingModel }).pipe(
Layer.provide(AnthropicClient.layer({
apiKey: config.openCodeZenApiKey,
apiUrl: "https://opencode.ai/zen",
})),
Layer.provide(FetchHttpClient.layer),
)
}),
)
const Base = Layer.mergeAll(AppConfig.layer, FetchHttpClient.layer, BunContext.layer)
const AppBase = Layer.mergeAll(Base, LoggerLive)
const AppWithSqlite = Layer.provideMerge(SqliteDb.layer, AppBase)
const AppWithAnthropic = Layer.provideMerge(AnthropicLayer, AppWithSqlite)
const AppWithDaytona = Layer.provideMerge(DaytonaService.layer, AppWithAnthropic)
const AppWithOpenCode = Layer.provideMerge(OpenCodeClient.layer, AppWithDaytona)
const AppWithRouting = Layer.provideMerge(TurnRouter.layer, AppWithOpenCode)
const AppWithSessions = Layer.provideMerge(SessionStore.layer, AppWithRouting)
const AppWithProvisioner = Layer.provideMerge(SandboxProvisioner.layer, AppWithSessions)
const AppWithOffsets = Layer.provideMerge(OffsetStore.layer, AppWithProvisioner)
const AppWithDedup = Layer.provideMerge(IngressDedup.layer, AppWithOffsets)
const AppWithDiscordClient = Layer.provideMerge(DiscordClient.layer, AppWithDedup)
const AppWithDiscordAdapters = Layer.provideMerge(DiscordConversationServicesLive, AppWithDiscordClient)
const AppWithRunner = Layer.provideMerge(SingleRunner.layer({ runnerStorage: "memory" }), AppWithDiscordAdapters)
const AppWithThreadEntity = Layer.provideMerge(ThreadEntityLive, AppWithRunner)
const AppWithThreadChat = Layer.provideMerge(ThreadChatClusterLive, AppWithThreadEntity)
export const AppConversationLayer = Layer.provideMerge(Conversation.layer, AppWithThreadChat)
const ControlBase = Layer.mergeAll(Base, Logger.minimumLogLevel(LogLevel.None))
const ControlWithSqlite = Layer.provideMerge(SqliteDb.layer, ControlBase)
const ControlWithDaytona = Layer.provideMerge(DaytonaService.layer, ControlWithSqlite)
const ControlWithOpenCode = Layer.provideMerge(OpenCodeClient.layer, ControlWithDaytona)
const ControlWithSessions = Layer.provideMerge(SessionStore.layer, ControlWithOpenCode)
const ControlWithProvisioner = Layer.provideMerge(SandboxProvisioner.layer, ControlWithSessions)
const ControlWithRunner = Layer.provideMerge(TestRunner.layer, ControlWithProvisioner)
const ControlWithHistory = Layer.provideMerge(History.passthrough, ControlWithRunner)
const ControlWithThreadEntity = Layer.provideMerge(ThreadEntityLive, ControlWithHistory)
const ControlWithThreadChat = Layer.provideMerge(ThreadChatClusterLive, ControlWithThreadEntity)
export const ControlThreadLayer = Layer.provideMerge(ThreadControlClusterLive, ControlWithThreadChat)
const CliBase = Layer.mergeAll(Base, Logger.minimumLogLevel(LogLevel.Warning))
const CliWithSqlite = Layer.provideMerge(SqliteDb.layer, CliBase)
const CliWithAnthropic = Layer.provideMerge(AnthropicLayer, CliWithSqlite)
const CliWithDaytona = Layer.provideMerge(DaytonaService.layer, CliWithAnthropic)
const CliWithOpenCode = Layer.provideMerge(OpenCodeClient.layer, CliWithDaytona)
const CliWithRouting = Layer.provideMerge(TurnRouter.layer, CliWithOpenCode)
const CliWithSessions = Layer.provideMerge(SessionStore.layer, CliWithRouting)
const CliWithProvisioner = Layer.provideMerge(SandboxProvisioner.layer, CliWithSessions)
const CliWithRunner = Layer.provideMerge(TestRunner.layer, CliWithProvisioner)
const CliWithHistory = Layer.provideMerge(History.passthrough, CliWithRunner)
const CliWithThreadEntity = Layer.provideMerge(ThreadEntityLive, CliWithHistory)
const CliWithThreadChat = Layer.provideMerge(ThreadChatClusterLive, CliWithThreadEntity)
const CliWithThreadControl = Layer.provideMerge(ThreadControlClusterLive, CliWithThreadChat)
export const CliConversationStaticLayer = Layer.provideMerge(IngressDedup.noop, CliWithThreadControl)

View File

@@ -0,0 +1,40 @@
import { describe, expect } from "bun:test"
import { ConfigProvider, Duration, Effect, Redacted } from "effect"
import { AppConfig } from "./config"
import { effectTest } from "./test/effect"
const provider = (input?: ReadonlyArray<readonly [string, string]>) =>
ConfigProvider.fromMap(
new Map([
["DISCORD_TOKEN", "discord-token"],
["DAYTONA_API_KEY", "daytona-token"],
["OPENCODE_ZEN_API_KEY", "zen-token"],
...(input ?? []),
]),
)
const load = (input?: ReadonlyArray<readonly [string, string]>) =>
Effect.gen(function* () {
const config = yield* AppConfig
return config
}).pipe(
Effect.provide(AppConfig.layer),
Effect.withConfigProvider(provider(input)),
)
describe("AppConfig", () => {
effectTest("parses SANDBOX_TIMEOUT as Duration", () =>
Effect.gen(function* () {
const config = yield* load([["SANDBOX_TIMEOUT", "45 minutes"]])
expect(Duration.toMinutes(config.sandboxTimeout)).toBe(45)
expect(Redacted.value(config.discordToken)).toBe("discord-token")
}),
)
effectTest("falls back to SANDBOX_TIMEOUT_MINUTES", () =>
Effect.gen(function* () {
const config = yield* load([["SANDBOX_TIMEOUT_MINUTES", "31"]])
expect(Duration.toMinutes(config.sandboxTimeout)).toBe(31)
}),
)
})

View File

@@ -0,0 +1,194 @@
import { Config, Context, Duration, Effect, Layer, Redacted, Schema } from "effect"
const TurnRoutingMode = Schema.Literal("off", "heuristic", "ai")
type TurnRoutingMode = typeof TurnRoutingMode.Type
const SandboxReusePolicy = Schema.Literal("resume_preferred", "recreate")
type SandboxReusePolicy = typeof SandboxReusePolicy.Type
const LogLevel = Schema.Literal("debug", "info", "warn", "error")
type LogLevel = typeof LogLevel.Type
const Port = Schema.NumberFromString.pipe(
Schema.int(),
Schema.between(1, 65535),
)
export const Minutes = Schema.NumberFromString.pipe(
Schema.int(),
Schema.positive(),
Schema.brand("Minutes"),
)
export type Minutes = typeof Minutes.Type
export const Milliseconds = Schema.NumberFromString.pipe(
Schema.int(),
Schema.positive(),
Schema.brand("Milliseconds"),
)
export type Milliseconds = typeof Milliseconds.Type
export const Seconds = Schema.NumberFromString.pipe(
Schema.int(),
Schema.positive(),
Schema.brand("Seconds"),
)
export type Seconds = typeof Seconds.Type
const CommaSeparatedList = Schema.transform(
Schema.String,
Schema.Array(Schema.String),
{
decode: (s) =>
s
.split(",")
.map((id) => id.trim())
.filter((id) => id.length > 0),
encode: (a) => a.join(","),
},
)
export declare namespace AppConfig {
export interface Service {
readonly discordToken: Redacted.Redacted
readonly allowedChannelIds: ReadonlyArray<string>
readonly discordCategoryId: string
readonly discordRoleId: string
readonly discordRequiredRoleId: string
readonly discordCommandGuildId: string
readonly databasePath: string
readonly daytonaApiKey: Redacted.Redacted
readonly daytonaSnapshot: string
readonly openCodeZenApiKey: Redacted.Redacted
readonly githubToken: string
readonly logLevel: LogLevel
readonly healthHost: string
readonly healthPort: number
readonly turnRoutingMode: TurnRoutingMode
readonly turnRoutingModel: string
readonly sandboxReusePolicy: SandboxReusePolicy
readonly sandboxTimeout: Duration.Duration
readonly cleanupInterval: Duration.Duration
readonly staleActiveGraceMinutes: Minutes
readonly pausedTtlMinutes: Minutes
readonly activeHealthCheckTimeoutMs: Milliseconds
readonly startupHealthTimeoutMs: Milliseconds
readonly resumeHealthTimeoutMs: Milliseconds
readonly sandboxCreationTimeout: Seconds
readonly openCodeModel: string
}
}
export class AppConfig extends Context.Tag("@discord/AppConfig")<AppConfig, AppConfig.Service>() {
static readonly layer = Layer.effect(
AppConfig,
Effect.gen(function* () {
const discordToken = yield* Config.redacted("DISCORD_TOKEN")
const allowedChannelIds = yield* Schema.Config("ALLOWED_CHANNEL_IDS", CommaSeparatedList).pipe(
Config.orElse(() => Config.succeed([] as ReadonlyArray<string>)),
)
const discordCategoryId = yield* Config.string("DISCORD_CATEGORY_ID").pipe(
Config.withDefault(""),
)
const discordRoleId = yield* Config.string("DISCORD_ROLE_ID").pipe(
Config.withDefault(""),
)
const discordRequiredRoleId = yield* Config.string("DISCORD_REQUIRED_ROLE_ID").pipe(
Config.withDefault(""),
)
const discordCommandGuildId = yield* Config.string("DISCORD_COMMAND_GUILD_ID").pipe(
Config.withDefault(""),
)
const databasePath = yield* Config.string("DATABASE_PATH").pipe(
Config.withDefault("discord.sqlite"),
)
const daytonaApiKey = yield* Config.redacted("DAYTONA_API_KEY")
const daytonaSnapshot = yield* Config.string("DAYTONA_SNAPSHOT").pipe(
Config.withDefault(""),
)
const openCodeZenApiKey = yield* Config.redacted("OPENCODE_ZEN_API_KEY")
const githubToken = yield* Config.string("GITHUB_TOKEN").pipe(
Config.withDefault(""),
)
const logLevel = yield* Schema.Config("LOG_LEVEL", LogLevel).pipe(
Config.orElse(() => Config.succeed("info" as const)),
)
const healthHost = yield* Config.string("HEALTH_HOST").pipe(
Config.withDefault("0.0.0.0"),
)
const healthPort = yield* Schema.Config("HEALTH_PORT", Port).pipe(
Config.orElse(() => Config.succeed(8787)),
)
const turnRoutingMode = yield* Schema.Config("TURN_ROUTING_MODE", TurnRoutingMode).pipe(
Config.orElse(() => Config.succeed("ai" as const)),
)
const turnRoutingModel = yield* Config.string("TURN_ROUTING_MODEL").pipe(
Config.withDefault("claude-haiku-4-5"),
)
const sandboxReusePolicy = yield* Schema.Config("SANDBOX_REUSE_POLICY", SandboxReusePolicy).pipe(
Config.orElse(() => Config.succeed("resume_preferred" as const)),
)
const sandboxTimeout = yield* Config.duration("SANDBOX_TIMEOUT").pipe(
Config.orElse(() =>
Schema.Config("SANDBOX_TIMEOUT_MINUTES", Minutes).pipe(
Config.map((n) => Duration.minutes(n)),
),
),
Config.withDefault(Duration.minutes(30)),
)
const cleanupInterval = yield* Config.duration("SANDBOX_CLEANUP_INTERVAL").pipe(
Config.withDefault(Duration.minutes(5)),
)
const staleActiveGraceMinutes = yield* Schema.Config("STALE_ACTIVE_GRACE_MINUTES", Minutes).pipe(
Config.orElse(() => Config.succeed(Minutes.make(5))),
)
const pausedTtlMinutes = yield* Schema.Config("PAUSED_TTL_MINUTES", Minutes).pipe(
Config.orElse(() => Config.succeed(Minutes.make(180))),
)
const activeHealthCheckTimeoutMs = yield* Schema.Config("ACTIVE_HEALTH_CHECK_TIMEOUT_MS", Milliseconds).pipe(
Config.orElse(() => Config.succeed(Milliseconds.make(15000))),
)
const startupHealthTimeoutMs = yield* Schema.Config("STARTUP_HEALTH_TIMEOUT_MS", Milliseconds).pipe(
Config.orElse(() => Config.succeed(Milliseconds.make(120000))),
)
const resumeHealthTimeoutMs = yield* Schema.Config("RESUME_HEALTH_TIMEOUT_MS", Milliseconds).pipe(
Config.orElse(() => Config.succeed(Milliseconds.make(120000))),
)
const sandboxCreationTimeout = yield* Schema.Config("SANDBOX_CREATION_TIMEOUT", Seconds).pipe(
Config.orElse(() => Config.succeed(Seconds.make(180))),
)
const openCodeModel = yield* Config.string("OPENCODE_MODEL").pipe(
Config.withDefault("opencode/claude-sonnet-4-5"),
)
return AppConfig.of({
discordToken,
allowedChannelIds,
discordCategoryId,
discordRoleId,
discordRequiredRoleId,
discordCommandGuildId,
databasePath,
daytonaApiKey,
daytonaSnapshot,
openCodeZenApiKey,
githubToken,
logLevel,
healthHost,
healthPort,
turnRoutingMode,
turnRoutingModel,
sandboxReusePolicy,
sandboxTimeout,
cleanupInterval,
staleActiveGraceMinutes,
pausedTtlMinutes,
activeHealthCheckTimeoutMs,
startupHealthTimeoutMs,
resumeHealthTimeoutMs,
sandboxCreationTimeout,
openCodeModel,
})
}),
).pipe(Layer.orDie)
}

View File

@@ -0,0 +1,371 @@
import { stdin, stdout } from "node:process"
import readline from "node:readline/promises"
import { BunRuntime } from "@effect/platform-bun"
import { Effect, Layer, LogLevel, Logger, Option, Stream } from "effect"
import { CliConversationStaticLayer } from "../app/layers"
import { ThreadChatCluster, ThreadControlCluster } from "../conversation/thread/cluster"
import { ThreadId } from "../types"
import type { Action } from "../conversation/model/schema"
import { makeTui } from "./local-adapter"
import { Conversation } from "../conversation/conversation"
import { autoThread, base, channelFrom, parse, prompt, scopeText, threadFrom } from "./state"
const colors = {
reset: "\x1b[0m",
dim: "\x1b[2m",
cyan: "\x1b[36m",
green: "\x1b[32m",
yellow: "\x1b[33m",
blue: "\x1b[34m",
red: "\x1b[31m",
} as const
const now = () => new Date().toLocaleTimeString("en-US", { hour12: false })
const run = Effect.gen(function* () {
const tui = yield* makeTui
const layer = Conversation.layer.pipe(
Layer.provideMerge(tui.layer),
Layer.provideMerge(CliConversationStaticLayer),
)
yield* Effect.gen(function* () {
const conversation = yield* Conversation
const threadChat = yield* ThreadChatCluster
const threadControl = yield* ThreadControlCluster
const rl = readline.createInterface({ input: stdin, output: stdout, terminal: true })
let scope = base()
let pending = 0
let last: ThreadId | null = null
const seen = new Set<ThreadId>()
yield* Effect.addFinalizer(() =>
Effect.sync(() => {
rl.close()
}),
)
const draw = (line: string, keep = true): Effect.Effect<void> =>
Effect.sync(() => {
stdout.write(`\r\x1b[2K${line}\n`)
if (keep) {
stdout.write(`${prompt(scope)}${rl.line}`)
}
})
const stamp = (label: string, color: string, text: string) =>
`${colors.dim}${now()}${colors.reset} ${color}${label}${colors.reset} ${text}`
const info = (text: string): Effect.Effect<void> => draw(stamp("info", colors.blue, text), false)
const block = (head: string, body: string): Effect.Effect<void> =>
Effect.sync(() => {
stdout.write(`\r\x1b[2K${head}\n${body}\n`)
stdout.write(`${prompt(scope)}${rl.line}`)
})
const noteThread = (threadId: ThreadId): Effect.Effect<void> =>
Effect.sync(() => {
seen.add(threadId)
last = threadId
})
const pick = (threadId: ThreadId | null): ThreadId | null => {
if (threadId) return threadId
if (scope.kind === "thread") return scope.threadId
return last
}
const list = () => Array.from(seen)
const byIndex = (index: number) => list().at(index - 1) ?? null
const fromRef = (threadId: ThreadId | null) => {
if (!threadId) return null
const raw = `${threadId}`.trim()
if (!/^\d+$/.test(raw)) return threadId
const index = Number(raw)
if (!Number.isInteger(index) || index <= 0) return null
return byIndex(index)
}
const tracked = (threadId: ThreadId) =>
threadChat.status(threadId).pipe(
Effect.map((row) => Option.isSome(row) ? row.value : null),
Effect.catchAll(() => Effect.succeed(null)),
)
const sessionText = (threadId: ThreadId, session: {
status: string
sandboxId: string
sessionId: string
resumeFailCount: number
lastError: string | null
}) =>
`${colors.dim}${threadId}${colors.reset} status=${session.status} sandbox=${session.sandboxId} session=${session.sessionId} resume_failures=${session.resumeFailCount}${session.lastError ? ` error=${session.lastError.slice(0, 120)}` : ""}`
const render = (action: Action) => {
if (action.kind === "typing") {
return stamp("typing", colors.yellow, `${colors.dim}[${action.threadId}]${colors.reset}`)
}
return stamp("assistant", colors.cyan, `${colors.dim}[${action.threadId}]${colors.reset} ${action.text}`)
}
yield* draw(
stamp(
"ready",
colors.yellow,
`${colors.dim}Type messages. /thread [id|n], /pick [n], /channel, /threads, /status, /logs, /pause, /recreate, /resume, /active, /help, /exit${colors.reset}`,
),
false,
)
yield* Effect.forkScoped(
Stream.runForEach(
tui.actions,
(action) =>
Effect.gen(function* () {
const known = seen.has(action.threadId)
yield* noteThread(action.threadId)
const next = autoThread(scope, action, known)
const switched = scope.kind === "channel" && next.kind === "thread"
scope = next
if (switched) {
yield* info(`${colors.dim}using ${scopeText(scope)} (/channel to go back)${colors.reset}`)
}
if ((action.kind === "send" || action.kind === "reply") && pending > 0) {
pending -= 1
}
yield* draw(render(action))
}),
),
)
yield* Effect.forkScoped(conversation.run)
const queue = (text: string) =>
Effect.gen(function* () {
const target = scopeText(scope)
if (scope.kind === "channel") {
yield* tui.send(text)
} else {
yield* tui.sendTo(scope.threadId, text)
}
pending += 1
yield* draw(stamp("queued", colors.green, `${colors.dim}[${target}]${colors.reset} ${text}`), false)
yield* Effect.fork(
Effect.suspend(() =>
pending > 0
? draw(stamp("waiting", colors.yellow, `${colors.dim}[${target}] preparing sandbox/session...${colors.reset}`), false)
: Effect.void,
).pipe(
Effect.delay("2 seconds"),
),
)
})
const command = (text: string) =>
Effect.gen(function* () {
const cmd = parse(text)
if (!cmd) return false
if (cmd.kind === "help") {
yield* info(
`${colors.dim}/thread [id|n], /pick [n], /channel, /threads, /status [thread], /logs [lines] [thread], /pause [thread], /recreate [thread], /resume [thread], /active, /exit${colors.reset}`,
)
return true
}
if (cmd.kind === "threads") {
if (seen.size === 0) {
yield* info(`${colors.dim}no known threads yet${colors.reset}`)
return true
}
yield* info(`${colors.dim}${list().map((id, i) => `${i + 1}:${id}`).join(", ")}${colors.reset}`)
return true
}
if (cmd.kind === "pick") {
if (seen.size === 0) {
yield* info(`${colors.dim}no known threads yet${colors.reset}`)
return true
}
if (!cmd.index) {
yield* info(`${colors.dim}${list().map((id, i) => `${i + 1}:${id}`).join(", ")}${colors.reset}`)
yield* info(`${colors.dim}pick one with /pick <n>${colors.reset}`)
return true
}
const threadId = byIndex(cmd.index)
if (!threadId) {
yield* info(`${colors.dim}invalid thread index ${cmd.index}${colors.reset}`)
return true
}
scope = threadFrom(scope, threadId)
yield* info(`${colors.dim}using ${scopeText(scope)}${colors.reset}`)
return true
}
if (cmd.kind === "active") {
yield* threadControl.active.pipe(
Effect.matchEffect({
onFailure: (error) =>
info(`${colors.red}active query failed${colors.reset} ${String(error)}`),
onSuccess: (active) =>
active.length === 0
? info(`${colors.dim}no active sessions${colors.reset}`)
: info(`${colors.dim}${active.map((s) => `${s.threadId}(${s.status})`).join(", ")}${colors.reset}`),
}),
)
return true
}
if (cmd.kind === "channel") {
scope = channelFrom(scope)
yield* info(`${colors.dim}using ${scopeText(scope)}${colors.reset}`)
return true
}
if (cmd.kind === "thread") {
const selected = fromRef(cmd.threadId)
if (selected) {
scope = threadFrom(scope, selected)
yield* noteThread(selected)
yield* info(`${colors.dim}using ${scopeText(scope)}${colors.reset}`)
return true
}
if (cmd.threadId) {
yield* info(`${colors.dim}invalid thread id/index${colors.reset}`)
return true
}
if (last) {
scope = threadFrom(scope, last)
yield* info(`${colors.dim}using ${scopeText(scope)}${colors.reset}`)
return true
}
yield* info(`${colors.dim}no thread id yet. use /thread <id>${colors.reset}`)
return true
}
if (cmd.kind === "status") {
const threadId = pick(cmd.threadId)
if (!threadId) {
yield* info(`${colors.dim}no thread selected. use /thread <id>${colors.reset}`)
return true
}
yield* noteThread(threadId)
const session = yield* tracked(threadId)
if (!session) {
yield* info(`${colors.dim}no tracked session for ${threadId}${colors.reset}`)
return true
}
yield* info(sessionText(threadId, session))
return true
}
if (cmd.kind === "logs") {
const threadId = pick(cmd.threadId)
if (!threadId) {
yield* info(`${colors.dim}no thread selected. use /thread <id>${colors.reset}`)
return true
}
yield* noteThread(threadId)
const row = yield* threadControl.logs({ threadId, lines: cmd.lines }).pipe(
Effect.catchAll(() => Effect.succeed(Option.none())),
)
if (Option.isNone(row)) {
yield* info(`${colors.dim}no tracked session for ${threadId}${colors.reset}`)
return true
}
yield* block(
stamp("logs", colors.blue, `${colors.dim}[${threadId}]${colors.reset}`),
row.value.output.trim() || "(empty log)",
)
return true
}
if (cmd.kind === "pause") {
const threadId = pick(cmd.threadId)
if (!threadId) {
yield* info(`${colors.dim}no thread selected. use /thread <id>${colors.reset}`)
return true
}
yield* noteThread(threadId)
const row = yield* threadControl.pause({ threadId, reason: "manual-cli" }).pipe(
Effect.catchAll(() => Effect.succeed(Option.none())),
)
if (Option.isNone(row)) {
yield* info(`${colors.dim}no tracked session for ${threadId}${colors.reset}`)
return true
}
yield* info(`${colors.dim}paused ${threadId}${colors.reset}`)
return true
}
if (cmd.kind === "recreate") {
const threadId = pick(cmd.threadId)
if (!threadId) {
yield* info(`${colors.dim}no thread selected. use /thread <id>${colors.reset}`)
return true
}
yield* noteThread(threadId)
yield* threadChat.recreate(threadId).pipe(
Effect.matchEffect({
onFailure: (error) =>
info(`${colors.red}recreate failed${colors.reset} ${String(error)}`),
onSuccess: () =>
info(`${colors.dim}recreated ${threadId}${colors.reset}`),
}),
)
return true
}
if (cmd.kind === "resume") {
const threadId = pick(cmd.threadId)
if (!threadId) {
yield* info(`${colors.dim}no thread selected. use /thread <id>${colors.reset}`)
return true
}
const session = yield* tracked(threadId)
if (session === null) {
yield* info(`${colors.dim}no tracked session for ${threadId}${colors.reset}`)
return true
}
yield* noteThread(threadId)
yield* threadControl.resume({
threadId,
channelId: session.channelId,
guildId: session.guildId,
}).pipe(
Effect.matchEffect({
onFailure: (error) =>
info(`${colors.red}resume failed${colors.reset} ${String(error)}`),
onSuccess: (current) =>
info(
`${colors.dim}resumed ${threadId} sandbox=${current.sandboxId} session=${current.sessionId}${colors.reset}`,
),
}),
)
return true
}
yield* info(`${colors.dim}unknown command: /${cmd.name}${colors.reset}`)
return true
})
const loop: Effect.Effect<void> = Effect.gen(function* () {
const text = (yield* Effect.promise(() => rl.question(prompt(scope)))).trim()
if (!text) return yield* loop
if (text === "/exit" || text === "exit" || text === "quit") return
const handled = yield* command(text)
if (handled) return yield* loop
yield* queue(text)
return yield* loop
})
yield* loop
}).pipe(
Effect.provide(layer),
Effect.scoped,
)
})
run.pipe(
Logger.withMinimumLogLevel(LogLevel.Warning),
BunRuntime.runMain,
)

View File

@@ -0,0 +1,392 @@
import { BunRuntime } from "@effect/platform-bun"
import { Duration, Effect, Exit, Fiber, Option, Ref, Schema } from "effect"
import { ControlThreadLayer } from "../app/layers"
import { ThreadChatCluster, ThreadControlCluster } from "../conversation/thread/cluster"
import { ChannelId, GuildId, SandboxId, SessionInfo, ThreadId } from "../types"
type Opt = Record<string, string | boolean>
class CtlUsageError extends Schema.TaggedError<CtlUsageError>()("CtlUsageError", {
message: Schema.String,
}) {}
class CtlInternalError extends Schema.TaggedError<CtlInternalError>()("CtlInternalError", {
message: Schema.String,
cause: Schema.Defect,
}) {}
const usage = (message: string) => CtlUsageError.make({ message })
const internal = (cause: unknown, message = text(cause)) => CtlInternalError.make({ message, cause })
const text = (cause: unknown) => {
if (cause instanceof Error) return cause.message
if (typeof cause === "object" && cause !== null && "_tag" in cause && "message" in cause) {
const tag = (cause as { _tag?: unknown })._tag
const message = (cause as { message?: unknown }).message
if (typeof tag === "string" && typeof message === "string") return `${tag}: ${message}`
}
if (typeof cause === "object" && cause !== null) return String(cause)
return String(cause)
}
const parse = (argv: ReadonlyArray<string>) => {
const input = argv.slice(2)
const cmd = input.at(0)?.toLowerCase() ?? "help"
const scan = input.slice(1).reduce(
(state: { opts: Opt; args: ReadonlyArray<string>; key: string | null }, token) => {
if (token.startsWith("--")) {
const key = token.slice(2)
if (key.length === 0) return state
if (state.key) {
return {
opts: { ...state.opts, [state.key]: true },
args: state.args,
key,
}
}
return { ...state, key }
}
if (state.key) {
return {
opts: { ...state.opts, [state.key]: token },
args: state.args,
key: null,
}
}
return { ...state, args: [...state.args, token] }
},
{ opts: {} as Opt, args: [] as ReadonlyArray<string>, key: null as string | null },
)
if (!scan.key) return { cmd, opts: scan.opts, args: scan.args }
return {
cmd,
opts: { ...scan.opts, [scan.key]: true },
args: scan.args,
}
}
const value = (opts: Opt, key: string) => {
const raw = opts[key]
if (typeof raw !== "string") return null
const out = raw.trim()
if (!out) return null
return out
}
const number = (opts: Opt, key: string, fallback: number) => {
const raw = value(opts, key)
if (!raw) return fallback
const out = Number(raw)
if (!Number.isInteger(out) || out <= 0) return fallback
return out
}
const flag = (opts: Opt, key: string) => {
const raw = opts[key]
if (raw === true) return true
if (typeof raw !== "string") return false
return raw === "1" || raw.toLowerCase() === "true" || raw.toLowerCase() === "yes"
}
let ctlSeq = 0
let msgSeq = 0
const pick = (opts: Opt, active: ReadonlyArray<{ threadId: ThreadId }>) => {
const raw = value(opts, "thread")
if (raw) return Effect.succeed(ThreadId.make(raw))
if (active.length === 1) return Effect.succeed(active[0].threadId)
if (active.length > 1) return Effect.fail(usage("missing --thread (multiple active sessions)"))
ctlSeq += 1
return Effect.succeed(ThreadId.make(`ctl-${ctlSeq}`))
}
const messageId = () => {
msgSeq += 1
return `ctl-msg-${Date.now()}-${msgSeq}`
}
const print = (ok: boolean, command: string, payload: Record<string, unknown>) =>
Effect.sync(() => {
process.stdout.write(
`${JSON.stringify({
ok,
command,
...payload,
}, null, 2)}\n`,
)
})
const event = (command: string, name: string, payload: Record<string, unknown>) =>
Effect.sync(() => {
process.stdout.write(
`${JSON.stringify({
ok: true,
command,
event: name,
...payload,
})}\n`,
)
})
const run = Effect.gen(function* () {
const ctl = parse(process.argv)
const threadChat = yield* ThreadChatCluster
const threadControl = yield* ThreadControlCluster
const active = yield* threadControl.active
const status = (threadId: ThreadId) =>
threadChat.status(threadId).pipe(
Effect.catchAll((cause) => internal(cause)),
)
const context = (threadId: ThreadId, opts: Opt) =>
Effect.gen(function* () {
const row = yield* status(threadId)
if (Option.isSome(row)) {
return {
channelId: row.value.channelId,
guildId: row.value.guildId,
}
}
const channel = value(opts, "channel") ?? "ctl"
const guild = value(opts, "guild") ?? "local"
return {
channelId: ChannelId.make(channel),
guildId: GuildId.make(guild),
}
})
if (ctl.cmd === "help") {
return yield* print(true, ctl.cmd, {
usage: [
"conversation:ctl active",
"conversation:ctl status --thread <id>",
"conversation:ctl logs --thread <id> [--lines 120]",
"conversation:ctl pause --thread <id>",
"conversation:ctl recreate --thread <id>",
"conversation:ctl resume --thread <id> [--channel <id> --guild <id>]",
"conversation:ctl send --thread <id> --text <message> [--follow --wait-ms 180000 --logs-every-ms 2000 --lines 80]",
],
})
}
if (ctl.cmd === "active") {
return yield* print(true, ctl.cmd, {
count: active.length,
sessions: active.map((row) => ({
threadId: row.threadId,
channelId: row.channelId,
guildId: row.guildId,
sandboxId: row.sandboxId,
sessionId: row.sessionId,
status: row.status,
resumeFailCount: row.resumeFailCount,
lastError: row.lastError,
})),
})
}
if (ctl.cmd === "status") {
const threadId = yield* pick(ctl.opts, active)
const row = yield* status(threadId)
return yield* print(true, ctl.cmd, {
threadId: threadId,
tracked: Option.isSome(row),
session: Option.isSome(row)
? {
threadId: row.value.threadId,
channelId: row.value.channelId,
guildId: row.value.guildId,
sandboxId: row.value.sandboxId,
sessionId: row.value.sessionId,
status: row.value.status,
resumeFailCount: row.value.resumeFailCount,
lastError: row.value.lastError,
}
: null,
})
}
if (ctl.cmd === "logs") {
const threadId = yield* pick(ctl.opts, active)
const lines = number(ctl.opts, "lines", 120)
const row = yield* threadControl.logs({ threadId, lines }).pipe(
Effect.catchAll((cause) => internal(cause)),
)
if (Option.isNone(row)) return yield* usage(`no tracked session for thread ${threadId}`)
return yield* print(true, ctl.cmd, {
threadId: threadId,
sandboxId: row.value.sandboxId,
lines,
output: row.value.output,
})
}
if (ctl.cmd === "pause") {
const threadId = yield* pick(ctl.opts, active)
const row = yield* threadControl.pause({ threadId, reason: "manual-ctl" }).pipe(
Effect.catchAll((cause) => internal(cause)),
)
if (Option.isNone(row)) return yield* usage(`no tracked session for thread ${threadId}`)
return yield* print(true, ctl.cmd, { threadId: threadId })
}
if (ctl.cmd === "recreate" || ctl.cmd === "destroy") {
const threadId = yield* pick(ctl.opts, active)
yield* threadChat.recreate(threadId)
return yield* print(true, ctl.cmd, { threadId: threadId })
}
if (ctl.cmd === "resume") {
const threadId = yield* pick(ctl.opts, active)
const channel = value(ctl.opts, "channel")
const guild = value(ctl.opts, "guild")
const row = yield* threadControl.resume({
threadId,
channelId: channel ? ChannelId.make(channel) : null,
guildId: guild ? GuildId.make(guild) : null,
}).pipe(
Effect.catchAll((cause) => internal(cause)),
)
return yield* print(true, ctl.cmd, {
threadId: threadId,
session: {
sandboxId: row.sandboxId,
sessionId: row.sessionId,
status: row.status,
},
})
}
if (ctl.cmd === "send") {
const threadId = yield* pick(ctl.opts, active)
const message = value(ctl.opts, "text") ?? ctl.args.join(" ").trim()
if (!message) {
return yield* usage("missing message text (pass --text \"...\")")
}
const wait = number(ctl.opts, "wait-ms", 0)
const every = number(ctl.opts, "logs-every-ms", 2000)
const lines = number(ctl.opts, "lines", 80)
const follow = flag(ctl.opts, "follow") || wait > 0
if (!follow) {
const input = yield* context(threadId, ctl.opts)
const reply = yield* threadChat.send({
threadId,
channelId: input.channelId,
guildId: input.guildId,
messageId: messageId(),
text: message,
})
return yield* print(true, ctl.cmd, {
threadId: threadId,
sandboxId: reply.session.sandboxId,
sessionId: reply.session.sessionId,
reply: reply.text,
})
}
const known = yield* threadChat.status(threadId).pipe(
Effect.catchAll(() => Effect.succeed(Option.none())),
)
const sandbox = yield* Ref.make<SandboxId | null>(Option.isSome(known) ? known.value.sandboxId : null)
const last = yield* Ref.make<string>("")
const started = Date.now()
const fiber = yield* Effect.fork(
Effect.gen(function* () {
const input = yield* context(threadId, ctl.opts)
const reply = yield* threadChat.send({
threadId,
channelId: input.channelId,
guildId: input.guildId,
messageId: messageId(),
text: message,
})
yield* Ref.set(sandbox, reply.session.sandboxId)
return { row: reply.session, reply: reply.text }
}),
)
yield* event(ctl.cmd, "started", {
threadId: threadId,
waitMs: wait,
logsEveryMs: every,
lines,
})
const waitTick = Effect.void.pipe(Effect.delay(Duration.millis(every)))
const loop = (): Effect.Effect<{ row: SessionInfo; reply: string }, unknown> =>
Effect.gen(function* () {
const done = yield* Fiber.poll(fiber)
if (Option.isSome(done)) {
if (Exit.isSuccess(done.value)) return done.value.value
return yield* Effect.failCause(done.value.cause)
}
const elapsed = Date.now() - started
if (wait > 0 && elapsed >= wait) {
yield* Fiber.interrupt(fiber)
return yield* usage(`send timed out after ${wait}ms`)
}
const sandboxId = yield* Ref.get(sandbox)
if (!sandboxId) {
yield* event(ctl.cmd, "progress", {
threadId: threadId,
elapsedMs: elapsed,
stage: "resolving-session",
})
yield* waitTick
return yield* loop()
}
const output = yield* threadControl.logs({ threadId, lines }).pipe(
Effect.map((row) => Option.isSome(row) ? row.value.output : "(no tracked session)"),
Effect.catchAll((cause) => Effect.succeed(`(log read failed: ${text(cause)})`)),
)
const previous = yield* Ref.get(last)
if (output !== previous) {
yield* Ref.set(last, output)
yield* event(ctl.cmd, "progress", {
threadId: threadId,
elapsedMs: elapsed,
sandboxId,
logs: output,
})
} else {
yield* event(ctl.cmd, "progress", {
threadId: threadId,
elapsedMs: elapsed,
sandboxId,
logs: "(no change)",
})
}
yield* waitTick
return yield* loop()
})
const result = yield* loop()
return yield* print(true, ctl.cmd, {
threadId: threadId,
sandboxId: result.row.sandboxId,
sessionId: result.row.sessionId,
reply: result.reply,
})
}
return yield* usage(`unknown command: ${ctl.cmd}`)
}).pipe(
Effect.catchAll((cause) => {
const command = parse(process.argv).cmd
return print(false, command, { error: text(cause) }).pipe(
Effect.zipRight(Effect.sync(() => {
process.exitCode = 1
})),
)
}),
)
run.pipe(
Effect.provide(ControlThreadLayer),
Effect.scoped,
BunRuntime.runMain,
)

View File

@@ -0,0 +1,161 @@
import { describe, expect } from "bun:test"
import { Deferred, Effect, Layer, Option } from "effect"
import { ThreadChatCluster, ThreadChatError } from "../conversation/thread/cluster"
import { TurnRouter, TurnRoutingDecision } from "../discord/turn-routing"
import { effectTest, testConfigLayer } from "../test/effect"
import { ChannelId, GuildId, SandboxId, SessionId, SessionInfo, ThreadId } from "../types"
import { Conversation } from "../conversation/conversation"
import { IngressDedup } from "../conversation/dedup"
import { makeTui } from "./local-adapter"
const makeSession = (id: string) =>
SessionInfo.make({
threadId: ThreadId.make("thread-local-channel"),
channelId: ChannelId.make("local-channel"),
guildId: GuildId.make("local"),
sandboxId: SandboxId.make("sb1"),
sessionId: SessionId.make(id),
previewUrl: "https://preview",
previewToken: null,
status: "active",
lastError: null,
resumeFailCount: 0,
})
const routerLayer = Layer.succeed(
TurnRouter,
TurnRouter.of({
shouldRespond: () =>
Effect.succeed(TurnRoutingDecision.make({ shouldRespond: true, reason: "test" })),
generateThreadName: () => Effect.succeed("unused"),
}),
)
const makeThreadChatLayer = (opts: {
send?: (prompt: string) => string
seen?: Array<string>
gate?: Deferred.Deferred<void>
}) => {
const session = makeSession("s1")
const seen = opts.seen ?? []
return Layer.succeed(ThreadChatCluster, ThreadChatCluster.of({
send: (input) =>
Effect.gen(function* () {
if (opts.gate) yield* Deferred.await(opts.gate)
seen.push(input.text)
return {
text: opts.send ? opts.send(input.text) : `local:${input.text}`,
session,
changedSession: false,
}
}).pipe(
Effect.mapError((cause) =>
ThreadChatError.make({
threadId: input.threadId,
cause,
retriable: false,
})),
),
status: () => Effect.succeed(Option.some(session)),
recreate: () => Effect.void,
}))
}
describe("makeTui", () => {
effectTest("drives conversation locally without Discord", () =>
Effect.gen(function* () {
const seen: Array<string> = []
const tui = yield* makeTui
const threadChatLayer = makeThreadChatLayer({ seen })
const live = Conversation.layer.pipe(
Layer.provideMerge(tui.layer),
Layer.provideMerge(IngressDedup.noop),
Layer.provideMerge(routerLayer),
Layer.provideMerge(threadChatLayer),
Layer.provideMerge(testConfigLayer),
)
yield* Effect.gen(function* () {
const conversation = yield* Conversation
yield* Effect.forkScoped(conversation.run)
yield* tui.send("hello local")
const first = yield* tui.take
const second = yield* tui.take
expect(seen).toEqual(["hello local"])
expect(first.kind).toBe("typing")
expect(second.kind).toBe("send")
expect(/^thread-[a-z]+-[a-z]+-\d+$/.test(String(second.threadId))).toBe(true)
if (second.kind === "send") expect(second.text).toBe("local:hello local")
}).pipe(Effect.provide(live))
}),
)
effectTest("publishes typing before session resolution completes", () =>
Effect.gen(function* () {
const gate = yield* Deferred.make<void>()
const tui = yield* makeTui
const threadChatLayer = makeThreadChatLayer({ gate })
const live = Conversation.layer.pipe(
Layer.provideMerge(tui.layer),
Layer.provideMerge(IngressDedup.noop),
Layer.provideMerge(routerLayer),
Layer.provideMerge(threadChatLayer),
Layer.provideMerge(testConfigLayer),
)
yield* Effect.gen(function* () {
const conversation = yield* Conversation
yield* Effect.forkScoped(conversation.run)
yield* tui.send("hello local")
const first = yield* tui.take
expect(first.kind).toBe("typing")
yield* Deferred.succeed(gate, undefined)
const second = yield* tui.take
expect(second.kind).toBe("send")
}).pipe(Effect.provide(live))
}),
)
effectTest("channel messages create distinct threads", () =>
Effect.gen(function* () {
const tui = yield* makeTui
const threadChatLayer = makeThreadChatLayer({})
const live = Conversation.layer.pipe(
Layer.provideMerge(tui.layer),
Layer.provideMerge(IngressDedup.noop),
Layer.provideMerge(routerLayer),
Layer.provideMerge(threadChatLayer),
Layer.provideMerge(testConfigLayer),
)
yield* Effect.gen(function* () {
const conversation = yield* Conversation
yield* Effect.forkScoped(conversation.run)
yield* tui.send("one")
const firstTyping = yield* tui.take
const firstSend = yield* tui.take
yield* tui.send("two")
const secondTyping = yield* tui.take
const secondSend = yield* tui.take
expect(firstTyping.kind).toBe("typing")
expect(firstSend.kind).toBe("send")
expect(secondTyping.kind).toBe("typing")
expect(secondSend.kind).toBe("send")
if (firstTyping.kind === "typing" && secondTyping.kind === "typing") {
expect(firstTyping.threadId === secondTyping.threadId).toBe(false)
}
}).pipe(Effect.provide(live))
}),
)
})

View File

@@ -0,0 +1,169 @@
import { Effect, Layer, Queue, Schedule, Stream } from "effect"
import { TYPING_INTERVAL } from "../discord/constants"
import { ChannelId, GuildId, ThreadId } from "../types"
import { ChannelMessage, Mention, ThreadMessage, ThreadRef, Typing, type Action, type Inbound } from "../conversation/model/schema"
import { History, Inbox, Outbox, Threads } from "../conversation"
export type Tui = {
layer: Layer.Layer<Inbox | Outbox | History | Threads, never, never>
send: (text: string) => Effect.Effect<void>
sendTo: (threadId: ThreadId, text: string) => Effect.Effect<void>
take: Effect.Effect<Action>
actions: Stream.Stream<Action>
}
export const makeTui = Effect.gen(function* () {
const input = yield* Queue.unbounded<Inbound>()
const output = yield* Queue.unbounded<Action>()
const history = new Map<string, Array<string>>()
const roots = new Map<string, ThreadId>()
const parents = new Map<string, ChannelId>()
const words = {
a: ["brisk", "calm", "dapper", "eager", "fuzzy", "gentle", "jolly", "mellow", "nimble", "sunny"],
b: ["otter", "falcon", "panda", "badger", "fox", "heron", "lemur", "raven", "tiger", "whale"],
} as const
let seq = 0
const name = () => {
const i = seq
seq += 1
const x = words.a[i % words.a.length] ?? "brisk"
const y = words.b[Math.floor(i / words.a.length) % words.b.length] ?? "otter"
const z = Math.floor(i / (words.a.length * words.b.length)) + 1
return ThreadId.make(`thread-${x}-${y}-${z}`)
}
const remember = (threadId: ThreadId, line: string) => {
const current = history.get(threadId)
if (current) {
current.push(line)
return
}
history.set(threadId, [line])
}
const sendTo = (threadId: ThreadId, text: string) =>
Effect.gen(function* () {
remember(threadId, `user: ${text}`)
const channelId = parents.get(threadId) ?? ChannelId.make(`channel-${threadId}`)
yield* input.offer(
ThreadMessage.make({
kind: "thread_message",
threadId,
channelId,
messageId: crypto.randomUUID(),
guildId: GuildId.make("local"),
botUserId: "local-bot",
botRoleId: "",
authorId: "local-user",
authorIsBot: false,
mentionsEveryone: false,
mentions: Mention.make({ userIds: [], roleIds: [] }),
content: text,
}),
).pipe(Effect.asVoid)
})
const send = (text: string) =>
Effect.gen(function* () {
const channelId = ChannelId.make("local-channel")
yield* input.offer(
ChannelMessage.make({
kind: "channel_message",
channelId,
messageId: crypto.randomUUID(),
guildId: GuildId.make("local"),
botUserId: "local-bot",
botRoleId: "",
authorId: "local-user",
authorIsBot: false,
mentionsEveryone: false,
mentions: Mention.make({ userIds: ["local-bot"], roleIds: [] }),
content: text,
}),
).pipe(Effect.asVoid)
})
const layer = Layer.mergeAll(
Layer.succeed(
Inbox,
Inbox.of({
events: Stream.fromQueue(input, { shutdown: false }),
}),
),
Layer.succeed(
Outbox,
Outbox.of({
publish: (action) =>
Effect.gen(function* () {
if (action.kind === "send" || action.kind === "reply") {
remember(action.threadId, `assistant: ${action.text}`)
}
yield* output.offer(action).pipe(Effect.asVoid)
}),
withTyping: <A, E, R>(threadId: ThreadId, self: Effect.Effect<A, E, R>) =>
Effect.scoped(
Effect.gen(function* () {
const pulse = output.offer(
Typing.make({
kind: "typing",
threadId,
}),
).pipe(Effect.asVoid)
yield* pulse
yield* Effect.forkScoped(
Effect.repeat(pulse, Schedule.spaced(TYPING_INTERVAL)).pipe(
Effect.delay(TYPING_INTERVAL),
),
)
return yield* self
}),
),
}),
),
Layer.succeed(
History,
History.of({
rehydrate: (threadId, latest: string) =>
Effect.sync(() => {
const lines = history.get(threadId) ?? []
const prior = lines.at(-1) === `user: ${latest}` ? lines.slice(0, -1) : lines
if (prior.length === 0) return latest
return [
"Conversation history from this same thread (oldest to newest):",
prior.join("\n"),
"",
"Continue the same conversation and respond to the latest user message:",
latest,
].join("\n")
}),
}),
),
Layer.succeed(
Threads,
Threads.of({
ensure: (event) =>
Effect.sync(() => {
if (event.kind === "thread_message") {
parents.set(event.threadId, event.channelId)
return ThreadRef.make({ threadId: event.threadId, channelId: event.channelId })
}
const known = roots.get(event.messageId)
if (known) return ThreadRef.make({ threadId: known, channelId: event.channelId })
const threadId = name()
roots.set(event.messageId, threadId)
parents.set(threadId, event.channelId)
return ThreadRef.make({ threadId, channelId: event.channelId })
}),
}),
),
)
return {
layer,
send,
sendTo,
take: output.take,
actions: Stream.fromQueue(output, { shutdown: false }),
} satisfies Tui
})

View File

@@ -0,0 +1,52 @@
import { describe, expect, it } from "bun:test"
import { ThreadId } from "../types"
import { Send, Typing } from "../conversation/model/schema"
import { autoThread, base, channelFrom, parse, prompt, queueTarget, scopeText, threadFrom } from "./state"
describe("cli-state", () => {
it("parses commands", () => {
expect(parse("hello")).toBeNull()
expect(parse("/help")).toEqual({ kind: "help" })
expect(parse("/channel")).toEqual({ kind: "channel" })
expect(parse("/threads")).toEqual({ kind: "threads" })
expect(parse("/pick")).toEqual({ kind: "pick", index: null })
expect(parse("/pick 2")).toEqual({ kind: "pick", index: 2 })
expect(parse("/active")).toEqual({ kind: "active" })
expect(parse("/thread")).toEqual({ kind: "thread", threadId: null })
expect(parse("/thread abc")).toEqual({ kind: "thread", threadId: ThreadId.make("abc") })
expect(parse("/status")).toEqual({ kind: "status", threadId: null })
expect(parse("/status abc")).toEqual({ kind: "status", threadId: ThreadId.make("abc") })
expect(parse("/logs")).toEqual({ kind: "logs", lines: 120, threadId: null })
expect(parse("/logs 80")).toEqual({ kind: "logs", lines: 80, threadId: null })
expect(parse("/logs abc")).toEqual({ kind: "logs", lines: 120, threadId: ThreadId.make("abc") })
expect(parse("/logs 80 abc")).toEqual({ kind: "logs", lines: 80, threadId: ThreadId.make("abc") })
expect(parse("/pause")).toEqual({ kind: "pause", threadId: null })
expect(parse("/recreate")).toEqual({ kind: "recreate", threadId: null })
expect(parse("/destroy")).toEqual({ kind: "recreate", threadId: null })
expect(parse("/resume")).toEqual({ kind: "resume", threadId: null })
expect(parse("/nope")).toEqual({ kind: "unknown", name: "nope" })
})
it("formats scope and prompt", () => {
const a = base()
const b = threadFrom(a, ThreadId.make("t1"))
expect(scopeText(a)).toBe("channel:local-channel")
expect(scopeText(b)).toBe("thread:t1")
expect(prompt(a)).toBe("channel> ")
expect(prompt(b)).toBe("thread:t1> ")
expect(queueTarget(a)).toBe("channel")
expect(queueTarget(b)).toBe("thread")
expect(channelFrom(b)).toEqual(a)
})
it("auto switches from channel to thread on action", () => {
const a = base()
const typing = Typing.make({ kind: "typing", threadId: ThreadId.make("t-a") })
const send = Send.make({ kind: "send", threadId: ThreadId.make("t-b"), text: "ok" })
expect(autoThread(a, typing)).toEqual(threadFrom(a, ThreadId.make("t-a")))
expect(autoThread(a, send)).toEqual(threadFrom(a, ThreadId.make("t-b")))
expect(autoThread(a, send, true)).toEqual(a)
expect(autoThread(threadFrom(a, ThreadId.make("t0")), send)).toEqual(threadFrom(a, ThreadId.make("t0")))
})
})

View File

@@ -0,0 +1,102 @@
import { ThreadId } from "../types"
import type { Action } from "../conversation/model/schema"
const LOCAL_CHANNEL = "local-channel" as const
export type Scope =
| { kind: "channel"; channelId: typeof LOCAL_CHANNEL }
| { kind: "thread"; threadId: ThreadId; channelId: typeof LOCAL_CHANNEL }
export type Command =
| { kind: "channel" }
| { kind: "help" }
| { kind: "threads" }
| { kind: "pick"; index: number | null }
| { kind: "active" }
| { kind: "thread"; threadId: ThreadId | null }
| { kind: "status"; threadId: ThreadId | null }
| { kind: "logs"; threadId: ThreadId | null; lines: number }
| { kind: "pause"; threadId: ThreadId | null }
| { kind: "recreate"; threadId: ThreadId | null }
| { kind: "resume"; threadId: ThreadId | null }
| { kind: "unknown"; name: string }
export const base = (): Scope => ({ kind: "channel", channelId: LOCAL_CHANNEL })
const target = (value: string | undefined) => {
const raw = value?.trim() ?? ""
if (!raw) return null
return ThreadId.make(raw)
}
const parseLines = (raw: string | undefined) => {
const n = Number(raw)
if (!Number.isFinite(n)) return null
if (!Number.isInteger(n)) return null
if (n <= 0) return null
return n
}
const parseIndex = (raw: string | undefined) => {
const n = Number(raw)
if (!Number.isInteger(n) || n <= 0) return null
return n
}
export const parse = (line: string): Command | null => {
const text = line.trim()
if (!text.startsWith("/")) return null
const parts = text.slice(1).split(/\s+/)
const head = parts.at(0)?.toLowerCase() ?? ""
const args = parts.slice(1)
if (head === "channel") return { kind: "channel" }
if (head === "help") return { kind: "help" }
if (head === "threads") return { kind: "threads" }
if (head === "pick") return { kind: "pick", index: parseIndex(args.at(0)) }
if (head === "active") return { kind: "active" }
if (head === "thread") return { kind: "thread", threadId: target(args.at(0)) }
if (head === "status") return { kind: "status", threadId: target(args.at(0)) }
if (head === "logs") {
const lines = parseLines(args.at(0))
if (lines === null) {
return { kind: "logs", lines: 120, threadId: target(args.at(0)) }
}
return { kind: "logs", lines, threadId: target(args.at(1)) }
}
if (head === "pause") return { kind: "pause", threadId: target(args.at(0)) }
if (head === "recreate" || head === "destroy") return { kind: "recreate", threadId: target(args.at(0)) }
if (head === "resume") return { kind: "resume", threadId: target(args.at(0)) }
return { kind: "unknown", name: head }
}
export const scopeText = (scope: Scope) => scope.kind === "channel"
? `channel:${scope.channelId}`
: `thread:${scope.threadId}`
export const prompt = (scope: Scope) => scope.kind === "channel"
? "channel> "
: `thread:${scope.threadId}> `
export const queueTarget = (scope: Scope) => scope.kind === "channel" ? "channel" : "thread"
export const threadFrom = (scope: Scope, threadId: ThreadId): Scope => ({
kind: "thread",
channelId: scope.channelId,
threadId,
})
export const channelFrom = (scope: Scope): Scope => ({
kind: "channel",
channelId: scope.channelId,
})
export const autoThread = (scope: Scope, action: Action, known = false): Scope => {
if (scope.kind === "thread") return scope
if (action.kind !== "typing" && action.kind !== "send" && action.kind !== "reply") return scope
if (known) return scope
return threadFrom(scope, action.threadId)
}

View File

@@ -0,0 +1,83 @@
# Conversation Module
Transport-agnostic conversation engine. Discord-specific code lives in `implementations/discord/`, not here.
## Hexagonal Architecture (Ports & Adapters)
The conversation service depends on 5 port interfaces, NOT concrete implementations:
- `Inbox``Stream.Stream<Inbound>` of incoming events
- `Outbox` — publishes `Action` (send/reply/typing) and wraps effects with typing indicators
- `History` — rehydrates thread context when sessions change
- `Threads` — resolves channel messages to thread targets (creates Discord threads)
- `ConversationLedger` — durable dedup, state checkpointing, offset tracking
The `Conversation` service (`services/conversation.ts`) consumes these ports. Implementations are swapped at the Layer level:
- `implementations/discord/` provides all 5 ports for production via `DiscordConversationServices.portLayer`
- `implementations/local/` provides all 5 for the local CLI via `makeTui`
- Tests use `ConversationLedger.noop` and `Outbox.noop` in-memory stubs
## Event Flow (Non-Obvious)
1. Discord `messageCreate``onMessage` callback → `Runtime.runPromise(runtime)(ingestMessage(msg))`
- This bridges callback-land into Effect. The runtime is captured once at Layer construction.
2. `ingestMessage``ledger.admit(event)` (dedup) → `input.offer(event)` (Queue)
3. `Inbox.events` = `Stream.fromQueue(input)` — consumed by `Conversation.run`
4. `Conversation.run` maps each event through `turn()` with `{ concurrency: "unbounded", unordered: true }`
5. `turn()` serializes per-key via `ActorMap` (`keyOf` = `thread:<id>` or `channel:<id>`)
6. Key insight: **unbounded concurrency across threads, serial within each thread**
## Ledger Checkpointing (Crash Recovery)
The `ConversationLedger` stores intermediate state so retries don't re-call the LLM:
- `admit` → inserts with status `pending`, returns `false` if already seen (dedup)
- `start` → atomically moves `pending``processing`, increments `attempts`, returns snapshot
- `setTarget` → caches resolved `thread_id`/`channel_id`
- `setPrompt` → caches the (possibly rehydrated) prompt text + `session_id`
- `setResponse` → caches the LLM response text
- `complete` → marks `completed`
- `retry` → resets to `pending` with `last_error`
On restart: `replayPending()` resets `processing``pending` and returns all pending events.
On recovery: if `response_text` is already set, the turn skips the LLM call and just re-publishes.
## Offset Tracking
`ConversationLedger.getOffset`/`setOffset` persist the last-seen Discord message ID per source (`channel:<id>` or `thread:<id>`). On startup, `recoverMissedMessages` in the Discord adapter fetches messages after the stored offset to catch anything missed while offline.
## Error Union Pattern
`ConversationError` is a `Schema.Union` of 6 tagged errors, each with a `retriable: boolean` field. The retry schedule (`turnRetry`) checks `error.retriable` via `Schedule.whileInput`. Non-retriable errors trigger a user-visible "try again" message before failing.
## `portLayer` Pattern (Multi-Service Layer)
`DiscordConversationServices.portLayer` uses `Layer.scopedContext` to provide **4 services in a single Layer** by building a `Context` manually:
```ts
return Context.empty().pipe(
Context.add(Inbox, inbox),
Context.add(Outbox, outbox),
Context.add(History, history),
Context.add(Threads, threads),
)
```
This is the pattern for providing multiple related ports from one implementation module.
## Turn Routing
`TurnRouter` in `src/discord/turn-routing.ts` decides whether to respond to unmentioned thread messages:
- Mode `off`: always respond
- Mode `heuristic`: regex-based rules, default respond on uncertainty
- Mode `ai`: calls Haiku via `@effect/ai-anthropic` with `max_tokens: 10` for RESPOND/SKIP
- Heuristic runs first in `ai` mode; AI is only called when heuristic returns `null`
## Files That Must Change Together
- Adding a new `Inbound` event kind → `model/schema.ts` + `implementations/discord/index.ts` + `implementations/local/index.ts`
- Adding a new `Action` kind → `model/schema.ts` + both implementations' `publish`/outbox handling
- Adding a new error type → `model/errors.ts` + update `ConversationError` union + handle in `conversation.ts`
- Adding a new port service → `services/` interface + both `implementations/` + wire in `src/index.ts` layer chain

View File

@@ -0,0 +1,25 @@
# Conversation Runtime
This folder contains the active conversation runtime used by the Discord bot.
## Active layout
- `model/*`: normalized inbound events + outbound actions and conversation error model.
- `conversation.ts`: core orchestration loop.
- `inbox.ts`, `outbox.ts`, `threads.ts`, `history.ts`, `dedup.ts`, `offsets.ts`: service contracts used by the runtime.
- `thread/cluster/*`: thread-scoped cluster runtime (`ThreadEntity`, `ThreadChatCluster`, `ThreadControlCluster`) for send/status/recreate/control.
- `../discord/adapter.ts`: Discord adapter implementation for inbox/outbox/thread/history ports.
- `../control/*`: local CLI/controller tooling.
## Current execution path
1. `Conversation.run` consumes `Inbox.events`.
2. `Conversation.turn` routes and resolves a thread target.
3. `Conversation` calls `ThreadChatCluster.send`.
4. `ThreadEntity` handles per-thread lifecycle/recovery/idempotent send.
5. `Outbox` publishes `typing` / `send` actions.
## Notes
- This module is wired from `packages/discord/src/index.ts`.
- Per-thread send idempotency is keyed by `messageId` in `thread/cluster` `send` RPC.

View File

@@ -0,0 +1,777 @@
import { describe, expect } from "bun:test"
import { Deferred, Effect, Fiber, Layer, Option, Predicate, Stream } from "effect"
import { ThreadChatCluster, ThreadChatError } from "./thread/cluster"
import { TurnRouter, TurnRoutingDecision } from "../discord/turn-routing"
import { classifyOpenCodeFailure, DatabaseError, OpenCodeClientError, SandboxDeadError } from "../errors"
import { effectTest, testConfigLayer } from "../test/effect"
import { ChannelId, GuildId, SandboxId, SessionId, SessionInfo, ThreadId } from "../types"
import { Mention, ThreadMessage, ThreadRef, Typing, type Action, type Inbound } from "./model/schema"
import { Inbox } from "./inbox"
import { IngressDedup } from "./dedup"
import { Outbox } from "./outbox"
import { Threads } from "./threads"
import { Conversation } from "./conversation"
const makeSession = (id: string, threadId = "t1", channelId = "c1") =>
SessionInfo.make({
threadId: ThreadId.make(threadId),
channelId: ChannelId.make(channelId),
guildId: GuildId.make("g1"),
sandboxId: SandboxId.make("sb1"),
sessionId: SessionId.make(id),
previewUrl: "https://preview",
previewToken: null,
status: "active",
lastError: null,
resumeFailCount: 0,
})
const makeThreadEvent = (props: {
threadId: string
channelId: string
messageId: string
content: string
}) =>
ThreadMessage.make({
kind: "thread_message",
threadId: ThreadId.make(props.threadId),
channelId: ChannelId.make(props.channelId),
messageId: props.messageId,
guildId: GuildId.make("g1"),
botUserId: "bot-1",
botRoleId: "role-1",
authorId: "u1",
authorIsBot: false,
mentionsEveryone: false,
mentions: Mention.make({ userIds: [], roleIds: [] }),
content: props.content,
})
const makeEvent = (content: string) =>
makeThreadEvent({ threadId: "t1", channelId: "c1", messageId: "m1", content })
const makeChannelEvent = (content: string) => ({
kind: "channel_message" as const,
channelId: ChannelId.make("c-root"),
messageId: "m-root",
guildId: GuildId.make("g1"),
botUserId: "bot-1",
botRoleId: "role-1",
authorId: "u1",
authorIsBot: false,
mentionsEveryone: false,
mentions: Mention.make({ userIds: ["bot-1"], roleIds: [] }),
content,
})
const makeRouterLayer = (shouldRespond: boolean) =>
Layer.succeed(
TurnRouter,
TurnRouter.of({
shouldRespond: () =>
Effect.succeed(
TurnRoutingDecision.make({
shouldRespond,
reason: "test",
}),
),
generateThreadName: () => Effect.succeed("unused"),
}),
)
const makeConversationLayer = (props: {
events: ReadonlyArray<Inbound>
tracked: Option.Option<SessionInfo>
resolves: ReadonlyArray<SessionInfo>
resolve?: (threadId: ThreadId, channelId: ChannelId, guildId: GuildId) => SessionInfo
send: (
session: SessionInfo,
text: string,
) => Effect.Effect<string, OpenCodeClientError | SandboxDeadError | DatabaseError>
rehydrate: (threadId: ThreadId, latest: string) => Effect.Effect<string>
shouldRespond?: boolean
actions: Array<Action>
prompts: Array<string>
recreateCalls?: Array<ThreadId>
}) => {
const resolveIndex = { value: 0 }
const state = new Map<string, SessionInfo>()
if (Option.isSome(props.tracked)) state.set(props.tracked.value.threadId, props.tracked.value)
const inboxLayer = Layer.succeed(
Inbox,
Inbox.of({
events: Stream.fromIterable(props.events),
}),
)
const outboxLayer = Layer.succeed(
Outbox,
Outbox.of({
publish: (action) =>
Effect.sync(() => {
props.actions.push(action)
}),
withTyping: <A, E, R>(threadId: ThreadId, self: Effect.Effect<A, E, R>) =>
Effect.gen(function* () {
props.actions.push(
Typing.make({
kind: "typing",
threadId,
}),
)
return yield* self
}),
}),
)
const threadsLayer = Layer.succeed(
Threads,
Threads.of({
ensure: (event) => {
if (event.kind === "thread_message") {
return Effect.succeed(ThreadRef.make({ threadId: event.threadId, channelId: event.channelId }))
}
return Effect.succeed(ThreadRef.make({ threadId: ThreadId.make("t-new"), channelId: event.channelId }))
},
}),
)
const resolveSession = (threadId: ThreadId, channelId: ChannelId, guildId: GuildId): SessionInfo => {
if (props.resolve) return props.resolve(threadId, channelId, guildId)
const ix = Math.min(resolveIndex.value, props.resolves.length - 1)
const session = props.resolves[ix]!
resolveIndex.value += 1
return session
}
const toThreadError = (threadId: ThreadId, cause: unknown): ThreadChatError => {
if (cause instanceof ThreadChatError) return cause
return ThreadChatError.make({
threadId,
cause,
retriable: Predicate.isTagged(cause, "SandboxDeadError") || Predicate.isTagged(cause, "OpenCodeClientError"),
})
}
const threadChatLayer = Layer.succeed(
ThreadChatCluster,
ThreadChatCluster.of({
send: (input) =>
Effect.sync(() => {
props.prompts.push(input.text)
const session = resolveSession(input.threadId, input.channelId, input.guildId)
return session
}).pipe(
Effect.flatMap((session) =>
props.send(session, input.text).pipe(
Effect.map((text) => {
const prev = state.get(input.threadId)
state.set(input.threadId, session)
return {
text,
session,
changedSession: prev ? prev.sessionId !== session.sessionId : true,
}
}),
Effect.mapError((cause) => toThreadError(input.threadId, cause)),
)),
),
status: (threadId) =>
Effect.succeed(
Option.fromNullable(
state.get(threadId) ?? SessionInfo.make({
threadId,
channelId: ChannelId.make("c1"),
guildId: GuildId.make("g1"),
sandboxId: SandboxId.make("sb1"),
sessionId: SessionId.make("s-tracked"),
previewUrl: "https://preview",
previewToken: null,
status: "active",
lastError: null,
resumeFailCount: 0,
}),
),
),
recreate: (threadId) =>
Effect.sync(() => {
props.recreateCalls?.push(threadId)
state.delete(threadId)
}),
}),
)
return Conversation.layer.pipe(
Layer.provideMerge(inboxLayer),
Layer.provideMerge(outboxLayer),
Layer.provideMerge(IngressDedup.noop),
Layer.provideMerge(threadsLayer),
Layer.provideMerge(threadChatLayer),
Layer.provideMerge(makeRouterLayer(props.shouldRespond ?? true)),
Layer.provideMerge(testConfigLayer),
)
}
describe("Conversation", () => {
effectTest("run consumes fake inbox and publishes typing + send", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const live = makeConversationLayer({
events: [makeEvent("hello")],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.run
expect(prompts).toEqual(["hello"])
expect(actions.map((x) => x.kind)).toEqual(["typing", "send"])
const sent = actions[1]
if (!sent) throw new Error("missing send action")
expect(sent.kind).toBe("send")
expect(sent.threadId).toBe(ThreadId.make("t1"))
if (sent.kind === "send") expect(sent.text).toBe("echo:hello")
}).pipe(Effect.provide(live))
})
effectTest("turn sends thread prompt through ThreadChatCluster", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const live = makeConversationLayer({
events: [],
tracked: Option.some(makeSession("s-old")),
resolves: [makeSession("s-new")],
send: () => Effect.succeed("ok"),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.turn(makeEvent("help me"))
expect(prompts).toEqual(["help me"])
expect(actions.map((x) => x.kind)).toEqual(["typing", "send"])
const sent = actions[1]
if (!sent) throw new Error("missing send action")
expect(sent.kind).toBe("send")
expect(sent.threadId).toBe(ThreadId.make("t1"))
if (sent.kind === "send") expect(sent.text).toBe("ok")
}).pipe(Effect.provide(live))
})
effectTest("turn returns retriable error for dead sandbox from ThreadChatCluster", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const calls = { value: 0 }
const live = makeConversationLayer({
events: [],
tracked: Option.none(),
resolves: [makeSession("s-a"), makeSession("s-b")],
send: (_session, text) => {
if (calls.value === 0) {
calls.value += 1
return Effect.fail(
SandboxDeadError.make({
threadId: ThreadId.make("t1"),
reason: "dead",
}),
)
}
calls.value += 1
return Effect.succeed(`ok:${text}`)
},
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
const out = yield* conversation.turn(makeEvent("fix build")).pipe(Effect.either)
expect(out._tag).toBe("Left")
expect(prompts).toEqual(["fix build"])
expect(actions.map((x) => x.kind)).toEqual(["typing"])
}).pipe(Effect.provide(live))
})
effectTest("turn handles a channel message by using ensured thread target", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const live = makeConversationLayer({
events: [],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.turn(makeChannelEvent("from channel"))
expect(prompts).toEqual(["from channel"])
expect(actions.map((x) => x.kind)).toEqual(["typing", "send"])
const sent = actions[1]
if (!sent) throw new Error("missing send action")
expect(sent.kind).toBe("send")
expect(sent.threadId).toBe(ThreadId.make("t-new"))
if (sent.kind === "send") expect(sent.text).toBe("echo:from channel")
}).pipe(Effect.provide(live))
})
effectTest("!status is handled as a command without sending to agent", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const live = makeConversationLayer({
events: [],
tracked: Option.some(makeSession("s1")),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.turn(makeEvent("!status"))
expect(prompts).toEqual([])
expect(actions.filter((action) => action.kind === "send").length).toBe(1)
const sent = actions.find((action) => action.kind === "send")
if (!sent || sent.kind !== "send") throw new Error("missing status output")
expect(sent.text.includes("**Status:**")).toBe(true)
expect(sent.text.includes("**Sandbox:**")).toBe(true)
expect(sent.text.includes("**Session:**")).toBe(true)
}).pipe(Effect.provide(live))
})
effectTest("!recreate invokes recreate command and does not call agent", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const recreateCalls: Array<ThreadId> = []
const live = makeConversationLayer({
events: [],
tracked: Option.some(makeSession("s1")),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
recreateCalls,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.turn(makeEvent("!recreate"))
expect(prompts).toEqual([])
expect(recreateCalls).toEqual([ThreadId.make("t1")])
const sent = actions.find((action) => action.kind === "send")
if (!sent || sent.kind !== "send") throw new Error("missing recreate output")
expect(sent.text.includes("Session recreated")).toBe(true)
}).pipe(Effect.provide(live))
})
effectTest("run retries retriable failures in-process", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const calls = { value: 0 }
const live = makeConversationLayer({
events: [makeEvent("retry now")],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: (_session, text) =>
Effect.gen(function* () {
calls.value += 1
if (calls.value === 1) {
return yield* new OpenCodeClientError({
operation: "sendPrompt",
statusCode: 502,
body: "StatusCode: non 2xx status code (502 POST https://proxy.daytona.works/session/s1/message)",
kind: classifyOpenCodeFailure(
502,
"StatusCode: non 2xx status code (502 POST https://proxy.daytona.works/session/s1/message)",
),
})
}
return `ok:${text}`
}),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.run
expect(prompts).toEqual(["retry now", "retry now"])
expect(actions.map((x) => x.kind)).toEqual(["typing", "typing", "send"])
const sent = actions[2]
if (!sent) throw new Error("missing send action")
expect(sent.kind).toBe("send")
expect(sent.threadId).toBe(ThreadId.make("t1"))
if (sent.kind === "send") expect(sent.text).toBe("ok:retry now")
}).pipe(Effect.provide(live))
})
effectTest("turn sends generic non-retriable error text", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const live = makeConversationLayer({
events: [],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: () =>
Effect.fail(new DatabaseError({
cause: new Error("StatusCode: non 2xx status code (502 POST https://proxy.daytona.works/session/s1/message)"),
})),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.turn(makeEvent("oops")).pipe(Effect.either)
expect(prompts).toEqual(["oops"])
expect(actions.map((x) => x.kind)).toEqual(["typing", "send"])
const sent = actions[1]
if (!sent) throw new Error("missing send action")
expect(sent.kind).toBe("send")
expect(sent.threadId).toBe(ThreadId.make("t1"))
if (sent.kind === "send") {
expect(sent.text).toBe("Something went wrong. Please try again in a moment.")
expect(sent.text.includes("proxy.daytona.works")).toBe(false)
}
}).pipe(Effect.provide(live))
})
effectTest("run processes different thread keys concurrently", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const gate = Effect.runSync(Deferred.make<void>())
const fast = Effect.runSync(Deferred.make<void>())
const live = makeConversationLayer({
events: [
makeThreadEvent({ threadId: "t1", channelId: "c1", messageId: "m1", content: "slow" }),
makeThreadEvent({ threadId: "t2", channelId: "c2", messageId: "m2", content: "fast" }),
],
tracked: Option.none(),
resolves: [makeSession("s-a", "t1", "c1"), makeSession("s-b", "t2", "c2")],
resolve: (threadId, channelId) => {
if (threadId === "t2") return makeSession("s-b", "t2", channelId)
return makeSession("s-a", "t1", channelId)
},
send: (session, text) =>
Effect.gen(function* () {
if (session.threadId === ThreadId.make("t2")) {
yield* Deferred.succeed(fast, undefined)
return `ok:${text}`
}
yield* Deferred.await(gate)
return `ok:${text}`
}),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
const fiber = yield* Effect.forkScoped(conversation.run)
yield* Deferred.await(fast).pipe(
Effect.timeoutFail({ duration: "1 second", onTimeout: () => "thread-concurrency-blocked" }),
)
yield* Deferred.succeed(gate, undefined)
yield* Fiber.join(fiber)
expect(prompts.sort()).toEqual(["fast", "slow"])
expect(actions.filter((x) => x.kind === "typing").length).toBe(2)
}).pipe(Effect.provide(live))
})
})
// --- Duplicate processing tests ---
/** A dedup service that tracks dedup calls */
const makeTrackingDedup = () => {
const seen = new Set<string>()
const dedupCalls: Array<string> = []
const service: IngressDedup.Service = {
dedup: (messageId) =>
Effect.sync(() => {
dedupCalls.push(messageId)
if (seen.has(messageId)) return false
seen.add(messageId)
return true
}),
}
return { service, seen, dedupCalls }
}
const makeConversationLayerWithDedup = (props: {
events: ReadonlyArray<Inbound>
tracked: Option.Option<SessionInfo>
resolves: ReadonlyArray<SessionInfo>
resolve?: (threadId: ThreadId, channelId: ChannelId, guildId: GuildId) => SessionInfo
send: (
session: SessionInfo,
text: string,
) => Effect.Effect<string, OpenCodeClientError | SandboxDeadError | DatabaseError>
rehydrate: (threadId: ThreadId, latest: string) => Effect.Effect<string>
shouldRespond?: boolean
actions: Array<Action>
prompts: Array<string>
dedup: IngressDedup.Service
}) => {
const resolveIndex = { value: 0 }
const state = new Map<string, SessionInfo>()
if (Option.isSome(props.tracked)) state.set(props.tracked.value.threadId, props.tracked.value)
const inboxLayer = Layer.succeed(
Inbox,
Inbox.of({
events: Stream.fromIterable(props.events),
}),
)
const outboxLayer = Layer.succeed(
Outbox,
Outbox.of({
publish: (action) =>
Effect.sync(() => {
props.actions.push(action)
}),
withTyping: <A, E, R>(threadId: ThreadId, self: Effect.Effect<A, E, R>) =>
Effect.gen(function* () {
props.actions.push(
Typing.make({
kind: "typing",
threadId,
}),
)
return yield* self
}),
}),
)
const threadsLayer = Layer.succeed(
Threads,
Threads.of({
ensure: (event) => {
if (event.kind === "thread_message") {
return Effect.succeed(ThreadRef.make({ threadId: event.threadId, channelId: event.channelId }))
}
return Effect.succeed(ThreadRef.make({ threadId: ThreadId.make("t-new"), channelId: event.channelId }))
},
}),
)
const resolveSession = (threadId: ThreadId, channelId: ChannelId, guildId: GuildId): SessionInfo => {
if (props.resolve) return props.resolve(threadId, channelId, guildId)
const ix = Math.min(resolveIndex.value, props.resolves.length - 1)
const session = props.resolves[ix]!
resolveIndex.value += 1
return session
}
const toThreadError = (threadId: ThreadId, cause: unknown): ThreadChatError => {
if (cause instanceof ThreadChatError) return cause
return ThreadChatError.make({
threadId,
cause,
retriable: Predicate.isTagged(cause, "SandboxDeadError") || Predicate.isTagged(cause, "OpenCodeClientError"),
})
}
const threadChatLayer = Layer.succeed(
ThreadChatCluster,
ThreadChatCluster.of({
send: (input) =>
Effect.sync(() => {
props.prompts.push(input.text)
const session = resolveSession(input.threadId, input.channelId, input.guildId)
return session
}).pipe(
Effect.flatMap((session) =>
props.send(session, input.text).pipe(
Effect.map((text) => {
const prev = state.get(input.threadId)
state.set(input.threadId, session)
return {
text,
session,
changedSession: prev ? prev.sessionId !== session.sessionId : true,
}
}),
Effect.mapError((cause) => toThreadError(input.threadId, cause)),
)),
),
status: (threadId) =>
Effect.succeed(
Option.fromNullable(
state.get(threadId) ?? SessionInfo.make({
threadId,
channelId: ChannelId.make("c1"),
guildId: GuildId.make("g1"),
sandboxId: SandboxId.make("sb1"),
sessionId: SessionId.make("s-tracked"),
previewUrl: "https://preview",
previewToken: null,
status: "active",
lastError: null,
resumeFailCount: 0,
}),
),
),
recreate: (threadId) =>
Effect.sync(() => {
state.delete(threadId)
}),
}),
)
const dedupLayer = Layer.succeed(IngressDedup, IngressDedup.of(props.dedup))
return Conversation.layer.pipe(
Layer.provideMerge(inboxLayer),
Layer.provideMerge(outboxLayer),
Layer.provideMerge(dedupLayer),
Layer.provideMerge(threadsLayer),
Layer.provideMerge(threadChatLayer),
Layer.provideMerge(makeRouterLayer(props.shouldRespond ?? true)),
Layer.provideMerge(testConfigLayer),
)
}
describe("Conversation duplicate processing", () => {
effectTest("same message_id queued twice via run is only sent once", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const dedup = makeTrackingDedup()
const event = makeEvent("hello")
const live = makeConversationLayerWithDedup({
// Feed the same event twice to simulate catch-up + real-time race
events: [event, event],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
dedup: dedup.service,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.run
// The ledger should have been called twice with dedup
expect(dedup.dedupCalls).toEqual(["m1", "m1"])
// The agent should have received the prompt only once
expect(prompts).toEqual(["hello"])
// Only one typing + one send
expect(actions.filter((x) => x.kind === "send").length).toBe(1)
}).pipe(Effect.provide(live))
})
effectTest("noop dedup service deduplicates", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const event = makeEvent("hello")
// The noop dedup service tracks seen message_ids
const live = makeConversationLayer({
events: [event, event],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.run
// Same event fed twice but only processed once
expect(prompts).toEqual(["hello"])
expect(actions.filter((x) => x.kind === "send").length).toBe(1)
}).pipe(Effect.provide(live))
})
effectTest("turn called twice with same event only processes once with tracking dedup service", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const dedup = makeTrackingDedup()
const event = makeEvent("help me")
const live = makeConversationLayerWithDedup({
events: [],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
dedup: dedup.service,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
// Call turn twice with the same event (simulating race between real-time and catch-up)
yield* conversation.turn(event)
yield* conversation.turn(event)
expect(prompts).toEqual(["help me"])
expect(actions.filter((x) => x.kind === "send").length).toBe(1)
}).pipe(Effect.provide(live))
})
effectTest("two different messages on same thread are processed sequentially (not lost)", () => {
const actions: Array<Action> = []
const prompts: Array<string> = []
const dedup = makeTrackingDedup()
const event1 = makeThreadEvent({ threadId: "t1", channelId: "c1", messageId: "m1", content: "first" })
const event2 = makeThreadEvent({ threadId: "t1", channelId: "c1", messageId: "m2", content: "second" })
const live = makeConversationLayerWithDedup({
events: [event1, event2],
tracked: Option.none(),
resolves: [makeSession("s1")],
send: (_session, text) => Effect.succeed(`echo:${text}`),
rehydrate: (_threadId, latest) => Effect.succeed(`rehydrated:${latest}`),
actions,
prompts,
dedup: dedup.service,
})
return Effect.gen(function* () {
const conversation = yield* Conversation
yield* conversation.run
// Both messages should be processed (different message_ids)
expect(prompts).toEqual(["first", "second"])
expect(actions.filter((x) => x.kind === "send").length).toBe(2)
}).pipe(Effect.provide(live))
})
})

View File

@@ -0,0 +1,270 @@
import { Context, Effect, Layer, Option, Predicate, Schedule, Stream } from "effect"
import { ThreadChatCluster, ThreadChatError } from "./thread/cluster"
import { AppConfig } from "../config"
import { TurnRouter } from "../discord/turn-routing"
import type { HealthCheckError, OpenCodeClientError, SandboxDeadError, SandboxStartError } from "../errors"
import { ActorMap } from "../lib/actors/keyed"
import type { ChannelId, SessionInfo, ThreadId } from "../types"
import { type ConversationError, messageOf, RoutingError, SandboxSendError } from "./model/errors"
import { Send, type Inbound } from "./model/schema"
import { Inbox } from "./inbox"
import { IngressDedup } from "./dedup"
import { Outbox } from "./outbox"
import { Threads } from "./threads"
export declare namespace Conversation {
export interface Service {
readonly turn: (event: Inbound) => Effect.Effect<void, ConversationError>
readonly run: Effect.Effect<void>
}
}
export class Conversation extends Context.Tag("@discord/conversation/Conversation")<
Conversation,
Conversation.Service
>() {
static readonly layer = Layer.scoped(
Conversation,
Effect.gen(function* () {
const inbox = yield* Inbox
const outbox = yield* Outbox
const threads = yield* Threads
const dedup = yield* IngressDedup
const config = yield* AppConfig
const threadChat = yield* ThreadChatCluster
const router = yield* TurnRouter
const actors = yield* ActorMap.make<string>()
const RETRY_MESSAGE = "Something went wrong. Please try again in a moment."
const turnRetry = Schedule.exponential("500 millis").pipe(
Schedule.intersect(Schedule.recurs(2)),
Schedule.whileInput((error: ConversationError) => error.retriable),
)
type RetriableCause = SandboxDeadError | OpenCodeClientError | HealthCheckError | SandboxStartError
const isRetriableCause = (cause: unknown): cause is RetriableCause =>
Predicate.isTagged(cause, "SandboxDeadError") ||
Predicate.isTagged(cause, "OpenCodeClientError") ||
Predicate.isTagged(cause, "HealthCheckError") ||
Predicate.isTagged(cause, "SandboxStartError")
const asSendError =
(threadId: ThreadId) =>
(cause: unknown): SandboxSendError => {
if (cause instanceof ThreadChatError) {
return SandboxSendError.make({
threadId,
message: messageOf(cause.cause),
retriable: cause.retriable,
})
}
return SandboxSendError.make({
threadId,
message: messageOf(cause),
retriable: isRetriableCause(cause),
})
}
const publishText = (threadId: ThreadId, text: string) =>
outbox.publish(
Send.make({
kind: "send",
threadId,
text,
}),
)
const renderStatus = (session: SessionInfo) => {
const model = config.openCodeModel.replace("opencode/", "")
return [
`**Status:** ${session.status}`,
`**Model:** \`${model}\``,
`**Sandbox:** \`${session.sandboxId}\``,
`**Session:** \`${session.sessionId}\``,
session.resumeFailCount > 0 ? `**Resume failures:** ${session.resumeFailCount}` : null,
session.lastError ? `**Last error:** ${session.lastError.slice(0, 200)}` : null,
]
.filter((line): line is string => line !== null)
.join("\n")
}
const reportFailure = (threadId: ThreadId) => (error: ConversationError) => {
if (error.retriable) return Effect.fail(error)
return publishText(threadId, RETRY_MESSAGE).pipe(
Effect.catchAll(() => Effect.void),
Effect.zipRight(Effect.fail(error)),
)
}
const route = Effect.fn("Conversation.route")(function* (event: Inbound) {
if (event.authorIsBot) return false
if (event.mentionsEveryone) return false
if (!event.content.trim()) return false
const mentioned =
event.mentions.userIds.includes(event.botUserId) ||
(event.botRoleId.length > 0 && event.mentions.roleIds.includes(event.botRoleId))
if (event.kind === "channel_message") return mentioned
if (mentioned) return true
const owned = yield* threadChat
.status(event.threadId)
.pipe(
Effect.map((session) => Option.isSome(session)),
Effect.mapError(asSendError(event.threadId)),
)
if (!owned) return false
const decision = yield* router
.shouldRespond({
content: event.content,
botUserId: event.botUserId,
botRoleId: event.botRoleId,
mentionedUserIds: event.mentions.userIds,
mentionedRoleIds: event.mentions.roleIds,
})
.pipe(
Effect.mapError((cause) =>
RoutingError.make({
message: messageOf(cause),
retriable: false,
}),
),
)
return decision.shouldRespond
})
const resolve = Effect.fn("Conversation.resolve")(function* (event: Inbound) {
if (event.kind === "thread_message") {
return { threadId: event.threadId, channelId: event.channelId }
}
const name = yield* router.generateThreadName(event.content)
return yield* threads.ensure(event, name)
})
const commandStatus = (threadId: ThreadId) =>
Effect.gen(function* () {
const tracked = yield* threadChat
.status(threadId)
.pipe(Effect.catchAll(() => Effect.succeed(Option.none())))
if (Option.isNone(tracked)) {
yield* publishText(threadId, "*No active session for this thread.*")
return
}
yield* publishText(threadId, renderStatus(tracked.value))
})
const commandRecreate = (threadId: ThreadId) =>
threadChat.recreate(threadId).pipe(
Effect.catchAll(() => Effect.void),
Effect.zipRight(
publishText(threadId, "*☠️ Session recreated. Next message will provision a fresh sandbox.*"),
),
)
const commands = {
"!status": commandStatus,
"!reset": commandRecreate,
"!recreate": commandRecreate,
} as const
type CommandName = keyof typeof commands
const isCommand = (text: string): text is CommandName => text in commands
const command = (event: Inbound, target: { threadId: ThreadId; channelId: ChannelId }) =>
Effect.gen(function* () {
const text = event.content.trim().toLowerCase()
if (!isCommand(text)) return false
yield* commands[text](target.threadId)
return true
})
const turnRaw = Effect.fn("Conversation.turnRaw")(function* (event: Inbound) {
if (!(yield* route(event))) return
const target = yield* resolve(event)
if (yield* command(event, target)) return
yield* Effect.logInfo("User message").pipe(
Effect.annotateLogs({
event: "conversation.user.message",
thread_id: target.threadId,
author_id: event.authorId,
content: event.content.slice(0, 200),
}),
)
yield* outbox
.withTyping(
target.threadId,
Effect.gen(function* () {
const reply = yield* threadChat.send({
threadId: target.threadId,
channelId: target.channelId,
guildId: event.guildId,
messageId: event.messageId,
text: event.content,
}).pipe(
Effect.map((out) => out.text),
Effect.mapError(asSendError(target.threadId)),
)
yield* Effect.logInfo("Bot reply").pipe(
Effect.annotateLogs({
event: "conversation.bot.reply",
thread_id: target.threadId,
content: reply.slice(0, 200),
}),
)
yield* publishText(target.threadId, reply)
}),
)
.pipe(Effect.catchAll(reportFailure(target.threadId)))
})
const keyOf = (event: Inbound) =>
event.kind === "thread_message" ? `thread:${event.threadId}` : `channel:${event.channelId}`
const processEvent = (event: Inbound) => actors.run(keyOf(event), turnRaw(event), { touch: false })
const processFresh = (event: Inbound) =>
dedup.dedup(event.messageId).pipe(
Effect.flatMap((fresh) => {
if (!fresh) return Effect.void
return processEvent(event)
}),
)
const turn = Effect.fn("Conversation.turn")(function* (event: Inbound) {
yield* processFresh(event)
})
const run = inbox.events.pipe(
Stream.mapEffect(
(event) =>
dedup.dedup(event.messageId).pipe(
Effect.flatMap((fresh) => {
if (!fresh) return Effect.void
return processEvent(event).pipe(
Effect.retry(turnRetry),
Effect.catchAll((error) =>
Effect.logError("Conversation turn failed").pipe(
Effect.annotateLogs({
event: "conversation.turn.failed",
tag: error._tag,
retriable: error.retriable,
message: error.message,
}),
),
),
)
}),
),
{ concurrency: "unbounded", unordered: true },
),
Stream.runDrain,
)
return Conversation.of({ turn, run })
}),
)
}

View File

@@ -0,0 +1,25 @@
import { describe, expect } from "bun:test"
import { Effect } from "effect"
import { effectTest } from "../test/effect"
import { IngressDedup } from "./dedup"
describe("IngressDedup", () => {
effectTest("dedup returns true first time, false second time", () =>
Effect.gen(function* () {
const dedup = yield* IngressDedup
expect(yield* dedup.dedup("m1")).toBe(true)
expect(yield* dedup.dedup("m1")).toBe(false)
expect(yield* dedup.dedup("m2")).toBe(true)
expect(yield* dedup.dedup("m2")).toBe(false)
}).pipe(Effect.provide(IngressDedup.noop)),
)
effectTest("layer mode behaves as memory dedup", () =>
Effect.gen(function* () {
const dedup = yield* IngressDedup
expect(yield* dedup.dedup("m1")).toBe(true)
expect(yield* dedup.dedup("m1")).toBe(false)
expect(yield* dedup.dedup("m2")).toBe(true)
}).pipe(Effect.provide(IngressDedup.layer)),
)
})

View File

@@ -0,0 +1,35 @@
import { Context, Effect, Layer } from "effect"
const DEDUP_LIMIT = 4_000
const makeDedupSet = () => {
const seen = new Set<string>()
const order: Array<string> = []
return (messageId: string): boolean => {
if (seen.has(messageId)) return false
seen.add(messageId)
order.push(messageId)
if (order.length > DEDUP_LIMIT) {
const oldest = order.shift()
if (oldest) seen.delete(oldest)
}
return true
}
}
export declare namespace IngressDedup {
export interface Service {
readonly dedup: (messageId: string) => Effect.Effect<boolean>
}
}
export class IngressDedup extends Context.Tag("@discord/conversation/IngressDedup")<IngressDedup, IngressDedup.Service>() {
static readonly noop = Layer.sync(IngressDedup, () => {
const check = makeDedupSet()
return IngressDedup.of({
dedup: (messageId) => Effect.sync(() => check(messageId)),
})
})
static readonly layer = IngressDedup.noop
}

View File

@@ -0,0 +1,18 @@
import { Context, Effect, Layer } from "effect"
import type { ThreadId } from "../types"
import type { HistoryError } from "./model/errors"
export declare namespace History {
export interface Service {
readonly rehydrate: (threadId: ThreadId, latest: string) => Effect.Effect<string, HistoryError>
}
}
export class History extends Context.Tag("@discord/conversation/History")<History, History.Service>() {
static readonly passthrough = Layer.succeed(
History,
History.of({
rehydrate: (_threadId: ThreadId, latest: string) => Effect.succeed(latest),
}),
)
}

View File

@@ -0,0 +1,17 @@
import { Context, Layer, Stream } from "effect"
import type { Inbound } from "./model/schema"
export declare namespace Inbox {
export interface Service {
readonly events: Stream.Stream<Inbound>
}
}
export class Inbox extends Context.Tag("@discord/conversation/Inbox")<Inbox, Inbox.Service>() {
static readonly empty = Layer.succeed(
Inbox,
Inbox.of({
events: Stream.empty as Stream.Stream<Inbound>,
}),
)
}

View File

@@ -0,0 +1,7 @@
export { Conversation } from "./conversation"
export { History } from "./history"
export { Inbox } from "./inbox"
export { IngressDedup } from "./dedup"
export { OffsetStore } from "./offsets"
export { Outbox } from "./outbox"
export { Threads } from "./threads"

View File

@@ -0,0 +1,88 @@
import { Schema } from "effect"
import { ChannelId, ThreadId } from "../../types"
const DeliveryAction = Schema.Literal("typing", "send", "reply")
export class ThreadEnsureError extends Schema.TaggedError<ThreadEnsureError>()(
"ThreadEnsureError",
{
channelId: ChannelId,
message: Schema.String,
retriable: Schema.Boolean,
},
) {}
export class HistoryError extends Schema.TaggedError<HistoryError>()(
"HistoryError",
{
threadId: ThreadId,
message: Schema.String,
retriable: Schema.Boolean,
},
) {}
export class DeliveryError extends Schema.TaggedError<DeliveryError>()(
"DeliveryError",
{
threadId: ThreadId,
action: DeliveryAction,
message: Schema.String,
retriable: Schema.Boolean,
},
) {}
export class RoutingError extends Schema.TaggedError<RoutingError>()(
"RoutingError",
{
message: Schema.String,
retriable: Schema.Boolean,
},
) {}
export class SandboxSendError extends Schema.TaggedError<SandboxSendError>()(
"SandboxSendError",
{
threadId: ThreadId,
message: Schema.String,
retriable: Schema.Boolean,
},
) {}
export const ConversationError = Schema.Union(
ThreadEnsureError,
HistoryError,
DeliveryError,
RoutingError,
SandboxSendError,
)
export type ConversationError = typeof ConversationError.Type
const messageFrom = (value: unknown, depth: number): string => {
if (depth > 4) return String(value)
if (typeof value === "string") return value
if (value instanceof Error) {
const nested = Reflect.get(value, "cause")
if (nested === undefined) return value.message
const inner = messageFrom(nested, depth + 1)
if (inner.length === 0 || inner === value.message) return value.message
return `${value.message}: ${inner}`
}
if (typeof value === "object" && value !== null) {
const message = Reflect.get(value, "message")
const nested = Reflect.get(value, "cause")
if (typeof message === "string" && nested === undefined) return message
if (typeof message === "string") {
const inner = messageFrom(nested, depth + 1)
if (inner.length === 0 || inner === message) return message
return `${message}: ${inner}`
}
if (nested !== undefined) return messageFrom(nested, depth + 1)
}
return String(value)
}
export const messageOf = (cause: unknown): string => messageFrom(cause, 0)

View File

@@ -0,0 +1,73 @@
import { Schema } from "effect"
import { ThreadId, ChannelId, GuildId } from "../../types"
export class Mention extends Schema.Class<Mention>("Mention")({
userIds: Schema.Array(Schema.String),
roleIds: Schema.Array(Schema.String),
}) {}
export class ThreadMessage extends Schema.Class<ThreadMessage>("ThreadMessage")({
kind: Schema.Literal("thread_message"),
threadId: ThreadId,
channelId: ChannelId,
messageId: Schema.String,
guildId: GuildId,
botUserId: Schema.String,
botRoleId: Schema.String,
authorId: Schema.String,
authorIsBot: Schema.Boolean,
mentionsEveryone: Schema.Boolean,
mentions: Mention,
content: Schema.String,
}) {}
export class ChannelMessage extends Schema.Class<ChannelMessage>("ChannelMessage")({
kind: Schema.Literal("channel_message"),
channelId: ChannelId,
messageId: Schema.String,
guildId: GuildId,
botUserId: Schema.String,
botRoleId: Schema.String,
authorId: Schema.String,
authorIsBot: Schema.Boolean,
mentionsEveryone: Schema.Boolean,
mentions: Mention,
content: Schema.String,
}) {}
export const Inbound = Schema.Union(
ThreadMessage,
ChannelMessage,
)
export type Inbound = typeof Inbound.Type
export class ThreadRef extends Schema.Class<ThreadRef>("ThreadRef")({
threadId: ThreadId,
channelId: ChannelId,
}) {}
export class Send extends Schema.Class<Send>("Send")({
kind: Schema.Literal("send"),
threadId: ThreadId,
text: Schema.String,
}) {}
export class Reply extends Schema.Class<Reply>("Reply")({
kind: Schema.Literal("reply"),
threadId: ThreadId,
text: Schema.String,
}) {}
export class Typing extends Schema.Class<Typing>("Typing")({
kind: Schema.Literal("typing"),
threadId: ThreadId,
}) {}
export const Action = Schema.Union(
Send,
Reply,
Typing,
)
export type Action = typeof Action.Type

View File

@@ -0,0 +1,74 @@
import { describe, expect } from "bun:test"
import { Duration, Effect, Layer, Option, Redacted } from "effect"
import { AppConfig } from "../config"
import { SqliteDb } from "../db/client"
import { effectTest, withTempSqliteFile } from "../test/effect"
import { OffsetStore } from "./offsets"
const makeConfig = (databasePath: string) =>
AppConfig.of({
discordToken: Redacted.make("token"),
allowedChannelIds: [],
discordCategoryId: "",
discordRoleId: "",
discordRequiredRoleId: "",
discordCommandGuildId: "",
databasePath,
daytonaApiKey: Redacted.make("daytona"),
daytonaSnapshot: "",
openCodeZenApiKey: Redacted.make("zen"),
githubToken: "",
logLevel: "info",
healthHost: "127.0.0.1",
healthPort: 8787,
turnRoutingMode: "off",
turnRoutingModel: "claude-haiku-4-5",
sandboxReusePolicy: "resume_preferred",
sandboxTimeout: Duration.minutes(30),
cleanupInterval: Duration.minutes(5),
staleActiveGraceMinutes: 5 as AppConfig.Service["staleActiveGraceMinutes"],
pausedTtlMinutes: 180 as AppConfig.Service["pausedTtlMinutes"],
activeHealthCheckTimeoutMs: 15000 as AppConfig.Service["activeHealthCheckTimeoutMs"],
startupHealthTimeoutMs: 120000 as AppConfig.Service["startupHealthTimeoutMs"],
resumeHealthTimeoutMs: 120000 as AppConfig.Service["resumeHealthTimeoutMs"],
sandboxCreationTimeout: 180 as AppConfig.Service["sandboxCreationTimeout"],
openCodeModel: "opencode/claude-sonnet-4-5",
})
const withOffsets = <A, E, R>(
run: (offsets: OffsetStore.Service) => Effect.Effect<A, E, R>,
) =>
withTempSqliteFile((databasePath) =>
Effect.gen(function* () {
const config = Layer.succeed(AppConfig, makeConfig(databasePath))
const sqlite = SqliteDb.layer.pipe(Layer.provide(config))
const deps = Layer.merge(sqlite, config)
const live = OffsetStore.layer.pipe(Layer.provide(deps))
const program = Effect.flatMap(OffsetStore, (offsets) => run(offsets))
return yield* program.pipe(Effect.provide(live))
}),
"discord-offsets-",
)
describe("OffsetStore", () => {
effectTest("stores and updates source offsets", () =>
withOffsets((offsets) =>
Effect.gen(function* () {
expect(Option.isNone(yield* offsets.getOffset("thread:t1"))).toBe(true)
yield* offsets.setOffset("thread:t1", "m1")
expect(yield* offsets.getOffset("thread:t1")).toEqual(Option.some("m1"))
yield* offsets.setOffset("thread:t1", "m9")
expect(yield* offsets.getOffset("thread:t1")).toEqual(Option.some("m9"))
}),
),
)
effectTest("noop offsets always return none", () =>
Effect.gen(function* () {
const offsets = yield* OffsetStore
expect(Option.isNone(yield* offsets.getOffset("thread:t1"))).toBe(true)
yield* offsets.setOffset("thread:t1", "m1")
expect(Option.isNone(yield* offsets.getOffset("thread:t1"))).toBe(true)
}).pipe(Effect.provide(OffsetStore.noop)),
)
})

View File

@@ -0,0 +1,57 @@
import * as Client from "@effect/sql/SqlClient"
import { Context, Effect, Layer, Option } from "effect"
import { SqliteDb } from "../db/client"
import { initializeSchema } from "../db/init"
import { DatabaseError } from "../errors"
const db = <A, E, R>(effect: Effect.Effect<A, E, R>) =>
effect.pipe(Effect.mapError((cause) => new DatabaseError({ cause })))
export declare namespace OffsetStore {
export interface Service {
readonly getOffset: (source_id: string) => Effect.Effect<Option.Option<string>, DatabaseError>
readonly setOffset: (source_id: string, messageId: string) => Effect.Effect<void, DatabaseError>
}
}
export class OffsetStore extends Context.Tag("@discord/conversation/OffsetStore")<OffsetStore, OffsetStore.Service>() {
static readonly noop = Layer.succeed(
OffsetStore,
OffsetStore.of({
getOffset: () => Effect.succeed(Option.none()),
setOffset: () => Effect.void,
}),
)
static readonly layer = Layer.effect(
OffsetStore,
Effect.gen(function* () {
const sql = yield* SqliteDb
yield* db(initializeSchema.pipe(Effect.provideService(Client.SqlClient, sql)))
const getOffset = Effect.fn("OffsetStore.getOffset")(function* (source_id: string) {
const rows = yield* db(
sql<{ last_message_id: string }>`SELECT last_message_id
FROM conversation_offsets
WHERE source_id = ${source_id}
LIMIT 1`,
)
const row = rows[0]
if (!row) return Option.none<string>()
return Option.some(row.last_message_id)
})
const setOffset = Effect.fn("OffsetStore.setOffset")(function* (source_id: string, messageId: string) {
yield* db(
sql`INSERT INTO conversation_offsets (source_id, last_message_id, updated_at)
VALUES (${source_id}, ${messageId}, CURRENT_TIMESTAMP)
ON CONFLICT(source_id) DO UPDATE SET
last_message_id = excluded.last_message_id,
updated_at = CURRENT_TIMESTAMP`,
)
})
return OffsetStore.of({ getOffset, setOffset })
}),
)
}

View File

@@ -0,0 +1,21 @@
import { Context, Effect, Layer } from "effect"
import type { ThreadId } from "../types"
import type { DeliveryError } from "./model/errors"
import type { Action } from "./model/schema"
export declare namespace Outbox {
export interface Service {
readonly publish: (action: Action) => Effect.Effect<void, DeliveryError>
readonly withTyping: <A, E, R>(threadId: ThreadId, self: Effect.Effect<A, E, R>) => Effect.Effect<A, E | DeliveryError, R>
}
}
export class Outbox extends Context.Tag("@discord/conversation/Outbox")<Outbox, Outbox.Service>() {
static readonly noop = Layer.succeed(
Outbox,
Outbox.of({
publish: () => Effect.void,
withTyping: (_threadId, self) => self,
}),
)
}

View File

@@ -0,0 +1,61 @@
import { Effect, Layer, Option } from "effect"
import { DatabaseError } from "../../../errors"
import type { ThreadId } from "../../../types"
import { SendInput, ThreadChatCluster, ThreadChatError } from "./contracts"
import { ThreadEntity } from "./entity"
const asThreadError = (threadId: ThreadId, cause: unknown): ThreadChatError => {
if (cause instanceof ThreadChatError) return cause
return ThreadChatError.make({
threadId,
cause,
retriable: false,
})
}
const asDatabaseError = (cause: unknown): DatabaseError => {
if (cause instanceof DatabaseError) return cause
return DatabaseError.make({ cause })
}
export const ThreadChatClusterLive = Layer.effect(
ThreadChatCluster,
Effect.gen(function* () {
const make = yield* ThreadEntity.client
const send = Effect.fn("ThreadChatCluster.send")(function* (input) {
const rpc = make(input.threadId)
const out = yield* rpc
.send(
SendInput.make({
channelId: input.channelId,
guildId: input.guildId,
messageId: input.messageId,
text: input.text,
}),
)
.pipe(
Effect.mapError((cause) => asThreadError(input.threadId, cause)),
)
return {
text: out.text,
session: out.session,
changedSession: out.changedSession,
}
})
const status = Effect.fn("ThreadChatCluster.status")(function* (threadId) {
const rpc = make(threadId)
const row = yield* rpc.status(undefined).pipe(Effect.mapError(asDatabaseError))
if (row === null) return Option.none()
return Option.some(row)
})
const recreate = Effect.fn("ThreadChatCluster.recreate")(function* (threadId) {
const rpc = make(threadId)
yield* rpc.recreate(undefined).pipe(Effect.mapError(asDatabaseError))
})
return ThreadChatCluster.of({ send, status, recreate })
}),
)

View File

@@ -0,0 +1,97 @@
import { Context, Effect, Option, PrimaryKey, Schema } from "effect"
import { DatabaseError } from "../../../errors"
import { ChannelId, GuildId, SandboxId, SessionInfo, ThreadId } from "../../../types"
export class ThreadChatError extends Schema.TaggedError<ThreadChatError>()(
"ThreadChatError",
{
threadId: ThreadId,
cause: Schema.Defect,
retriable: Schema.Boolean,
},
) {}
export class SendInput extends Schema.Class<SendInput>("ClusterMode/SendInput")({
channelId: ChannelId,
guildId: GuildId,
messageId: Schema.String,
text: Schema.String,
}) {
[PrimaryKey.symbol]() {
return this.messageId
}
}
export class SendOutput extends Schema.Class<SendOutput>("ClusterMode/SendOutput")({
text: Schema.String,
session: SessionInfo,
changedSession: Schema.Boolean,
}) {}
export class PauseInput extends Schema.Class<PauseInput>("ClusterMode/PauseInput")({
reason: Schema.String,
}) {}
export class ResumeInput extends Schema.Class<ResumeInput>("ClusterMode/ResumeInput")({
channelId: Schema.NullOr(ChannelId),
guildId: Schema.NullOr(GuildId),
}) {}
export class LogsInput extends Schema.Class<LogsInput>("ClusterMode/LogsInput")({
lines: Schema.Number.pipe(
Schema.int(),
Schema.between(1, 500),
),
}) {}
export class LogsOutput extends Schema.Class<LogsOutput>("ClusterMode/LogsOutput")({
sandboxId: SandboxId,
output: Schema.String,
}) {}
export declare namespace ThreadChatCluster {
export interface Service {
readonly send: (input: {
threadId: ThreadId
channelId: ChannelId
guildId: GuildId
messageId: string
text: string
}) => Effect.Effect<{
text: string
session: SessionInfo
changedSession: boolean
}, ThreadChatError>
readonly status: (threadId: ThreadId) => Effect.Effect<Option.Option<SessionInfo>, DatabaseError>
readonly recreate: (threadId: ThreadId) => Effect.Effect<void, DatabaseError>
}
}
export class ThreadChatCluster extends Context.Tag("@discord/conversation/thread/cluster/ThreadChatCluster")<
ThreadChatCluster,
ThreadChatCluster.Service
>() {}
export declare namespace ThreadControlCluster {
export interface Service {
readonly active: Effect.Effect<ReadonlyArray<SessionInfo>, DatabaseError>
readonly pause: (input: {
threadId: ThreadId
reason: string
}) => Effect.Effect<Option.Option<SessionInfo>, ThreadChatError>
readonly resume: (input: {
threadId: ThreadId
channelId: ChannelId | null
guildId: GuildId | null
}) => Effect.Effect<SessionInfo, ThreadChatError>
readonly logs: (input: {
threadId: ThreadId
lines: number
}) => Effect.Effect<Option.Option<{ sandboxId: SandboxId; output: string }>, ThreadChatError>
}
}
export class ThreadControlCluster extends Context.Tag("@discord/conversation/thread/cluster/ThreadControlCluster")<
ThreadControlCluster,
ThreadControlCluster.Service
>() {}

View File

@@ -0,0 +1,65 @@
import { Effect, Layer, Option } from "effect"
import { DatabaseError } from "../../../errors"
import { SessionStore } from "../../../session/store"
import type { ThreadId } from "../../../types"
import { LogsInput, PauseInput, ResumeInput, ThreadChatError, ThreadControlCluster } from "./contracts"
import { ThreadEntity } from "./entity"
const asThreadError = (threadId: ThreadId, cause: unknown): ThreadChatError => {
if (cause instanceof ThreadChatError) return cause
return ThreadChatError.make({
threadId,
cause,
retriable: false,
})
}
const asDatabaseError = (cause: unknown): DatabaseError => {
if (cause instanceof DatabaseError) return cause
return DatabaseError.make({ cause })
}
export const ThreadControlClusterLive = Layer.effect(
ThreadControlCluster,
Effect.gen(function* () {
const make = yield* ThreadEntity.client
const store = yield* SessionStore
const active = store.listActive().pipe(Effect.mapError(asDatabaseError))
const pause = Effect.fn("ThreadControlCluster.pause")(function* (input) {
const rpc = make(input.threadId)
const row = yield* rpc.pause(PauseInput.make({ reason: input.reason })).pipe(
Effect.mapError((cause) => asThreadError(input.threadId, cause)),
)
if (row === null) return Option.none()
return Option.some(row)
})
const resume = Effect.fn("ThreadControlCluster.resume")(function* (input) {
const rpc = make(input.threadId)
return yield* rpc.resume(
ResumeInput.make({
channelId: input.channelId,
guildId: input.guildId,
}),
).pipe(
Effect.mapError((cause) => asThreadError(input.threadId, cause)),
)
})
const logs = Effect.fn("ThreadControlCluster.logs")(function* (input) {
const rpc = make(input.threadId)
const row = yield* rpc.logs(LogsInput.make({ lines: input.lines })).pipe(
Effect.mapError((cause) => asThreadError(input.threadId, cause)),
)
if (row === null) return Option.none()
return Option.some({
sandboxId: row.sandboxId,
output: row.output,
})
})
return ThreadControlCluster.of({ active, pause, resume, logs })
}),
)

View File

@@ -0,0 +1,150 @@
import { describe, expect } from "bun:test"
import { TestRunner } from "@effect/cluster"
import { Effect, Layer, Option } from "effect"
import { History } from "../../history"
import { ThreadChatCluster, ThreadChatClusterLive, ThreadEntityLive } from "."
import { OpenCodeClient, OpenCodeSessionSummary } from "../../../sandbox/opencode/client"
import { SandboxProvisioner } from "../../../sandbox/provisioner"
import { SessionStore } from "../../../session/store"
import { ChannelId, GuildId, PreviewAccess, SandboxId, SessionId, SessionInfo, ThreadId } from "../../../types"
import { DaytonaService } from "../../../sandbox/daytona/service"
import { OpenCodeClientError } from "../../../errors"
import { SqliteDb } from "../../../db/client"
import { effectTest, testConfigLayer } from "../../../test/effect"
const makeSession = (input: { threadId: ThreadId; channelId: ChannelId; guildId: GuildId; sandboxId: string; sessionId: string }) =>
SessionInfo.make({
threadId: input.threadId,
channelId: input.channelId,
guildId: input.guildId,
sandboxId: SandboxId.make(input.sandboxId),
sessionId: SessionId.make(input.sessionId),
previewUrl: "https://preview",
previewToken: null,
status: "active",
lastError: null,
resumeFailCount: 0,
})
describe("ThreadEntity", () => {
effectTest("retries after sandbox-down by re-ensuring session and rehydrating prompt", () => {
const threadId = ThreadId.make("t-dead")
const channelId = ChannelId.make("c-dead")
const guildId = GuildId.make("g-dead")
const first = makeSession({ threadId, channelId, guildId, sandboxId: "sb-1", sessionId: "s-1" })
const second = makeSession({ threadId, channelId, guildId, sandboxId: "sb-2", sessionId: "s-2" })
const ensured: Array<string> = []
const recovered: Array<string> = []
const sent: Array<string> = []
const rehydrated: Array<string> = []
const historyLayer = Layer.succeed(
History,
History.of({
rehydrate: (_threadId, latest) =>
Effect.sync(() => {
rehydrated.push(latest)
return `rehydrated:${latest}`
}),
}),
)
const provisionerLayer = Layer.succeed(
SandboxProvisioner,
SandboxProvisioner.of({
provision: () => Effect.dieMessage("unused"),
resume: () => Effect.dieMessage("unused"),
ensureActive: (input) =>
Effect.sync(() => {
ensured.push(Option.isSome(input.current) ? String(input.current.value.sessionId) : "none")
return ensured.length === 1 ? first : second
}),
ensureHealthy: () => Effect.succeed(true),
recoverSendFailure: (_threadId, session) =>
Effect.sync(() => {
recovered.push(String(session.sessionId))
return SessionInfo.make({
...session,
status: "error",
lastError: "send-failed",
})
}),
pause: (_threadId, session) => Effect.succeed(session),
destroy: (_threadId, session) => Effect.succeed(session),
}),
)
const openCodeLayer = Layer.succeed(
OpenCodeClient,
OpenCodeClient.of({
waitForHealthy: () => Effect.succeed(true),
createSession: () => Effect.succeed(SessionId.make("unused")),
sessionExists: () => Effect.succeed(true),
listSessions: () => Effect.succeed([] as ReadonlyArray<OpenCodeSessionSummary>),
sendPrompt: (_preview, _sessionId, text) =>
Effect.gen(function* () {
sent.push(text)
if (sent.length === 1) {
return yield* OpenCodeClientError.make({
operation: "sendPrompt",
statusCode: 502,
body: "bad gateway",
kind: "sandbox-down",
})
}
return `ok:${text}`
}),
abortSession: () => Effect.void,
}),
)
const daytonaLayer = Layer.succeed(
DaytonaService,
DaytonaService.of({
create: () => Effect.dieMessage("unused"),
exec: () => Effect.dieMessage("unused"),
start: () => Effect.dieMessage("unused"),
stop: () => Effect.dieMessage("unused"),
destroy: () => Effect.void,
getPreview: () =>
Effect.succeed(
PreviewAccess.make({
previewUrl: "https://preview",
previewToken: null,
}),
),
}),
)
const live = ThreadChatClusterLive.pipe(
Layer.provideMerge(ThreadEntityLive),
Layer.provideMerge(TestRunner.layer),
Layer.provideMerge(SessionStore.layer),
Layer.provideMerge(SqliteDb.layer),
Layer.provideMerge(testConfigLayer),
Layer.provideMerge(daytonaLayer),
Layer.provideMerge(openCodeLayer),
Layer.provideMerge(provisionerLayer),
Layer.provideMerge(historyLayer),
)
return Effect.gen(function* () {
const threadChat = yield* ThreadChatCluster
const out = yield* threadChat.send({
threadId,
channelId,
guildId,
messageId: "m-1",
text: "hello",
})
expect(ensured).toEqual(["none", "s-1"])
expect(recovered).toEqual(["s-1"])
expect(rehydrated).toEqual(["hello"])
expect(sent).toEqual(["hello", "rehydrated:hello"])
expect(out.text).toBe("ok:rehydrated:hello")
expect(out.session.sessionId).toBe(SessionId.make("s-2"))
}).pipe(Effect.provide(live))
})
})

View File

@@ -0,0 +1,315 @@
import { ClusterSchema, Entity } from "@effect/cluster"
import { Rpc } from "@effect/rpc"
import { Effect, Option, Predicate, Schema } from "effect"
import {
DatabaseError,
type HealthCheckError,
isOpenCodeSandboxUnavailable,
OpenCodeClientError,
SandboxDeadError,
type SandboxStartError,
} from "../../../errors"
import { StatefulActor } from "../../../lib/actors/stateful"
import { DaytonaService } from "../../../sandbox/daytona/service"
import { OpenCodeClient } from "../../../sandbox/opencode/client"
import { SandboxProvisioner } from "../../../sandbox/provisioner"
import { SessionStore } from "../../../session/store"
import { ChannelId, GuildId, PreviewAccess, SessionInfo, ThreadId } from "../../../types"
import { History } from "../../history"
import {
LogsInput,
LogsOutput,
PauseInput,
ResumeInput,
SendInput,
SendOutput,
ThreadChatError,
} from "./contracts"
const StatusInput = Schema.Void
const RecreateInput = Schema.Void
type RetriableCause = SandboxDeadError | OpenCodeClientError | HealthCheckError | SandboxStartError
const isRetriableCause = (cause: unknown): cause is RetriableCause =>
Predicate.isTagged(cause, "SandboxDeadError") ||
Predicate.isTagged(cause, "OpenCodeClientError") ||
Predicate.isTagged(cause, "HealthCheckError") ||
Predicate.isTagged(cause, "SandboxStartError")
const toThreadError = (threadId: ThreadId, cause: unknown): ThreadChatError =>
ThreadChatError.make({
threadId,
cause,
retriable: isRetriableCause(cause),
})
class ThreadState extends Schema.Class<ThreadState>("ClusterMode/ThreadState")({
loaded: Schema.Boolean,
session: Schema.NullOr(SessionInfo),
}) {
static empty() {
return ThreadState.make({ loaded: false, session: null })
}
option() {
return Option.fromNullable(this.session)
}
hydrate(row: Option.Option<SessionInfo>) {
if (Option.isNone(row)) return ThreadState.make({ loaded: true, session: null })
return ThreadState.make({ loaded: true, session: row.value })
}
with(session: SessionInfo) {
return ThreadState.make({ loaded: true, session })
}
clear() {
return ThreadState.make({ loaded: true, session: null })
}
}
const SendRpc = Rpc.make("send", {
payload: SendInput,
success: SendOutput,
error: ThreadChatError,
}).annotate(ClusterSchema.Persisted, true)
const StatusRpc = Rpc.make("status", {
payload: StatusInput,
success: Schema.NullOr(SessionInfo),
error: DatabaseError,
})
const RecreateRpc = Rpc.make("recreate", {
payload: RecreateInput,
success: Schema.Void,
error: DatabaseError,
})
const PauseRpc = Rpc.make("pause", {
payload: PauseInput,
success: Schema.NullOr(SessionInfo),
error: ThreadChatError,
})
const ResumeRpc = Rpc.make("resume", {
payload: ResumeInput,
success: SessionInfo,
error: ThreadChatError,
})
const LogsRpc = Rpc.make("logs", {
payload: LogsInput,
success: Schema.NullOr(LogsOutput),
error: ThreadChatError,
})
export const ThreadEntity = Entity.make("ThreadChat", [
SendRpc,
StatusRpc,
RecreateRpc,
PauseRpc,
ResumeRpc,
LogsRpc,
])
export const ThreadEntityLive = ThreadEntity.toLayer(
Effect.gen(function* () {
const oc = yield* OpenCodeClient
const daytona = yield* DaytonaService
const store = yield* SessionStore
const history = yield* History
const provisioner = yield* SandboxProvisioner
const entityId = String((yield* Entity.CurrentAddress).entityId)
if (entityId.includes("/")) {
return yield* Effect.dieMessage(`ThreadEntity expected raw threadId entityId, got "${entityId}"`)
}
const threadId = ThreadId.make(entityId)
const thread = <A, E, R>(effect: Effect.Effect<A, E, R>) =>
effect.pipe(Effect.mapError((cause) => toThreadError(threadId, cause)))
const state = StatefulActor.make(ThreadState.empty())
const stateNow = () => state.get().option()
const setState = (session: SessionInfo) => {
state.update((current) => current.with(session))
}
const clearState = () => {
state.update((current) => current.clear())
}
/** Bootstrap state from DB once, then serve from in-memory state. */
const load = Effect.fn("ThreadEntity.load")(function* () {
const current = state.get()
if (current.loaded) return current.option()
const row = yield* store.getByThread(threadId)
const hydrated = current.hydrate(row)
state.set(hydrated)
return hydrated.option()
})
/** Persist the latest session snapshot for crash recovery. */
const saveSession = Effect.fnUntraced(function* (session: SessionInfo) {
yield* store.upsert(session)
})
/** Update in-memory state and persist it as one operation. */
const commitSession = Effect.fnUntraced(function* (session: SessionInfo) {
setState(session)
yield* saveSession(session)
})
/** Ensure there is an active session for this thread (resume or create). */
const ensureSession = Effect.fnUntraced(function* (
channelId: ChannelId,
guildId: GuildId,
current: Option.Option<SessionInfo>,
) {
return yield* provisioner.ensureActive({
threadId,
channelId,
guildId,
current,
})
})
/** Rebuild prompt context when session changed; otherwise keep prompt as-is. */
const rehydrate = (latest: string, before: Option.Option<SessionInfo>, next: SessionInfo) => {
if (Option.isNone(before)) return Effect.succeed(latest)
if (before.value.sessionId === next.sessionId) return Effect.succeed(latest)
return history.rehydrate(threadId, latest)
}
const recoverFailure = (session: SessionInfo, error: OpenCodeClientError) =>
isOpenCodeSandboxUnavailable(error)
? SandboxDeadError.make({
threadId: session.threadId,
reason: `OpenCode send failed (${error.statusCode})`,
})
: error
const recoverSendError = Effect.fnUntraced(function* (session: SessionInfo, error: OpenCodeClientError) {
const next = yield* provisioner.recoverSendFailure(session.threadId, session, error)
yield* commitSession(next)
return yield* recoverFailure(session, error)
})
/** Send prompt to OpenCode and record recovery state if send failed. */
const sendPrompt = Effect.fnUntraced(function* (session: SessionInfo, text: string) {
yield* store.markActivity(session.threadId)
return yield* oc
.sendPrompt(PreviewAccess.from(session), session.sessionId, text)
.pipe(Effect.catchTag("OpenCodeClientError", (error) => recoverSendError(session, error)))
})
/** Retry exactly once after sandbox-dead by re-ensuring and rehydrating. */
const retrySend = Effect.fnUntraced(function* (payload: SendInput, prior: SessionInfo, text: string) {
return yield* sendPrompt(prior, text).pipe(
Effect.catchTag("SandboxDeadError", () =>
Effect.gen(function* () {
const resumed = yield* ensureSession(payload.channelId, payload.guildId, stateNow())
yield* commitSession(resumed)
const next = yield* rehydrate(payload.text, Option.some(prior), resumed)
return yield* sendPrompt(resumed, next)
}),
),
)
})
/** Full send lifecycle: load, ensure, persist, rehydrate, send, retry, return output. */
const sendNow = Effect.fnUntraced(function* (payload: SendInput) {
return yield* Effect.gen(function* () {
const before = yield* load()
const active = yield* ensureSession(payload.channelId, payload.guildId, before)
yield* commitSession(active)
const first = yield* rehydrate(payload.text, before, active)
const text = yield* retrySend(payload, active, first)
const session = Option.getOrElse(stateNow(), () => active)
return SendOutput.make({
text,
session,
changedSession: Option.isSome(before) ? before.value.sessionId !== session.sessionId : false,
})
})
})
/** Full send lifecycle: load, ensure, persist, rehydrate, send, retry, return output. */
const send = Effect.fn("ThreadEntity.send")(function* ({ payload }) {
return yield* thread(sendNow(payload))
})
/** Read current session from actor state (bootstrapping from DB on first access). */
const status = Effect.fn("ThreadEntity.status")(function* () {
const row = yield* load()
if (Option.isNone(row)) return null
return row.value
})
/** Destroy current session resources for this thread and clear actor state. */
const recreate = Effect.fn("ThreadEntity.recreate")(function* () {
const row = yield* load()
if (Option.isNone(row)) {
clearState()
return
}
const next = yield* provisioner.destroy(threadId, row.value, "cluster-recreate")
yield* store.upsert(next)
clearState()
})
/** Pause active session if present and commit paused state. */
const pause = Effect.fn("ThreadEntity.pause")(function* ({ payload }) {
return yield* thread(Effect.gen(function* () {
const row = yield* load()
if (Option.isNone(row)) return null
const next = yield* provisioner.pause(threadId, row.value, payload.reason)
yield* commitSession(next)
return next
}))
})
/** Resume existing session or create one when missing, then commit state. */
const resume = Effect.fn("ThreadEntity.resume")(function* ({ payload }) {
return yield* thread(Effect.gen(function* () {
const before = yield* load()
const channelId = payload.channelId === null
? Option.match(before, {
onNone: () => ChannelId.make("ctl"),
onSome: (row) => row.channelId,
})
: payload.channelId
const guildId = payload.guildId === null
? Option.match(before, {
onNone: () => GuildId.make("local"),
onSome: (row) => row.guildId,
})
: payload.guildId
const next = yield* ensureSession(channelId, guildId, before)
yield* commitSession(next)
return next
}))
})
/** Read sandbox log tail for active session if present. */
const logs = Effect.fn("ThreadEntity.logs")(function* ({ payload }) {
return yield* thread(Effect.gen(function* () {
const row = yield* load()
if (Option.isNone(row)) return null
const out = yield* daytona.exec(
row.value.sandboxId,
"read-opencode-log",
`cat /tmp/opencode.log 2>/dev/null | tail -${payload.lines}`,
)
return LogsOutput.make({
sandboxId: row.value.sandboxId,
output: out.output,
})
}))
})
const handlers = { send, status, recreate, pause, resume, logs }
return ThreadEntity.of(handlers)
}),
{ maxIdleTime: "30 minutes" },
)

View File

@@ -0,0 +1,14 @@
export {
LogsInput,
LogsOutput,
PauseInput,
ResumeInput,
SendInput,
SendOutput,
ThreadChatCluster,
ThreadControlCluster,
ThreadChatError,
} from "./contracts"
export { ThreadEntity, ThreadEntityLive } from "./entity"
export { ThreadChatClusterLive } from "./chat-live"
export { ThreadControlClusterLive } from "./control-live"

View File

@@ -0,0 +1,29 @@
import { Context, Effect, Layer } from "effect"
import { ThreadEnsureError } from "./model/errors"
import type { Inbound, ThreadRef } from "./model/schema"
export declare namespace Threads {
export interface Service {
readonly ensure: (event: Inbound, name: string) => Effect.Effect<ThreadRef, ThreadEnsureError>
}
}
export class Threads extends Context.Tag("@discord/conversation/Threads")<Threads, Threads.Service>() {
static readonly empty = Layer.succeed(
Threads,
Threads.of({
ensure: (event) => {
if (event.kind === "thread_message") {
return Effect.succeed({ threadId: event.threadId, channelId: event.channelId })
}
return Effect.fail(
ThreadEnsureError.make({
channelId: event.channelId,
message: "threads adapter missing for channel message",
retriable: false,
}),
)
},
}),
)
}

View File

@@ -0,0 +1,54 @@
# Database Module
SQLite via `@effect/sql-sqlite-bun` with Effect's `Migrator` system.
## SqliteDb Tag — Not Just SqlClient
`SqliteDb` (`client.ts`) is a custom `Context.Tag` wrapping `Client.SqlClient`. It's NOT a direct re-export. The layer:
1. Uses `Layer.unwrapEffect` to read `AppConfig.databasePath` at construction time
2. Provides `SqliteClient.layer({ filename })` underneath
3. Sets `PRAGMA busy_timeout = 5000` on initialization
This means `SqliteDb` is what services depend on, not raw `Client.SqlClient`.
## Migration System
Uses `@effect/sql/Migrator` with `Migrator.fromRecord` (not file-based).
Migrations are imported as modules in `init.ts` and keyed by name.
Each migration is idempotent:
- `CREATE TABLE IF NOT EXISTS`
- Checks existing columns via `PRAGMA table_info(...)`, only adds missing ones via `ALTER TABLE`
- Creates indexes with `IF NOT EXISTS`
## Schema Initialization at Service Level
`initializeSchema` is called by BOTH `SessionStore.layer` and `ConversationLedger.layer` individually. It's idempotent, but this means schema init runs multiple times — once per service that needs the DB. The pattern is:
```ts
yield * db(initializeSchema.pipe(Effect.provideService(Client.SqlClient, sql)))
```
Note: `initializeSchema` needs `Client.SqlClient` in its requirements, so each caller provides it manually.
## SqlSchema Typed Queries
`SessionStore` uses `@effect/sql`'s `SqlSchema` module for type-safe queries:
- `SqlSchema.void({ Request, execute })` — for writes (insert/update)
- `SqlSchema.findOne({ Request, Result, execute })` — returns `Option<Result>`
- `SqlSchema.findAll({ Request, Result, execute })` — returns `ReadonlyArray<Result>`
The `Request` and `Result` schemas handle encode/decode automatically. Column aliasing (`thread_id AS threadId`) maps snake_case DB columns to camelCase TS fields.
## Adding a New Migration
1. Create `src/db/migrations/NNNN_name.ts` exporting a default `Effect.gen` that uses `yield* Client.SqlClient`
2. Import and register it in `src/db/init.ts` in the `Migrator.fromRecord({...})` call
3. Both files must change together
## Status Timestamp Pattern
`SessionStore` uses a dynamic `statusSet` helper that updates status-specific timestamp columns (`paused_at`, `resumed_at`, etc.) based on the new status value — a single UPDATE touches the right column via CASE expressions.

View File

@@ -0,0 +1,21 @@
import { SqliteClient } from "@effect/sql-sqlite-bun"
import { Reactivity } from "@effect/experimental"
import * as Client from "@effect/sql/SqlClient"
import { Context, Effect, Layer } from "effect"
import { AppConfig } from "../config"
export class SqliteDb extends Context.Tag("@discord/SqliteDb")<SqliteDb, Client.SqlClient>() {
static readonly layer = Layer.scopedContext(
Effect.gen(function* () {
const config = yield* AppConfig
const db = yield* SqliteClient.make({ filename: config.databasePath }).pipe(
Effect.provide(Reactivity.layer),
)
yield* db`PRAGMA busy_timeout = 5000`
return Context.empty().pipe(
Context.add(SqliteDb, db),
Context.add(Client.SqlClient, db),
)
}),
).pipe(Layer.orDie)
}

View File

@@ -0,0 +1,90 @@
import * as Client from "@effect/sql/SqlClient"
import { describe, expect } from "bun:test"
import { Effect } from "effect"
import { initializeSchema } from "./init"
import { effectTest, withSqlite, withTempSqliteFile } from "../test/effect"
const columns = [
"thread_id",
"channel_id",
"guild_id",
"sandbox_id",
"session_id",
"preview_url",
"preview_token",
"status",
"last_activity",
"pause_requested_at",
"paused_at",
"resume_attempted_at",
"resumed_at",
"destroyed_at",
"last_health_ok_at",
"last_error",
"resume_fail_count",
"created_at",
"updated_at",
]
const indexes = [
"discord_sessions_status_last_activity_idx",
"discord_sessions_status_updated_at_idx",
]
const offsetColumns = [
"source_id",
"last_message_id",
"updated_at",
]
const offsetIndexes = [
"conversation_offsets_updated_at_idx",
]
const getColumns = (db: Client.SqlClient) =>
db<{ name: string }>`PRAGMA table_info(discord_sessions)`.pipe(
Effect.map((rows) => rows.map((row: { name: string }) => row.name)),
)
const getIndexes = (db: Client.SqlClient) =>
db<{ name: string }>`PRAGMA index_list(discord_sessions)`.pipe(
Effect.map((rows) => rows.map((row: { name: string }) => row.name)),
)
const getOffsetColumns = (db: Client.SqlClient) =>
db<{ name: string }>`PRAGMA table_info(conversation_offsets)`.pipe(
Effect.map((rows) => rows.map((row: { name: string }) => row.name)),
)
const getOffsetIndexes = (db: Client.SqlClient) =>
db<{ name: string }>`PRAGMA index_list(conversation_offsets)`.pipe(
Effect.map((rows) => rows.map((row: { name: string }) => row.name)),
)
describe("initializeSchema", () => {
effectTest("creates schema and is idempotent", () =>
withTempSqliteFile((filename) =>
Effect.gen(function* () {
yield* withSqlite(filename, (db) => initializeSchema.pipe(Effect.provideService(Client.SqlClient, db)))
const one = yield* withSqlite(filename, getColumns)
const offsetOne = yield* withSqlite(filename, getOffsetColumns)
expect(one).toEqual(columns)
expect(offsetOne).toEqual(offsetColumns)
yield* withSqlite(filename, (db) => initializeSchema.pipe(Effect.provideService(Client.SqlClient, db)))
const two = yield* withSqlite(filename, getColumns)
const offsetTwo = yield* withSqlite(filename, getOffsetColumns)
expect(two).toEqual(one)
expect(offsetTwo).toEqual(offsetOne)
const seen = new Set(two)
expect(seen.size).toBe(two.length)
const actual = (yield* withSqlite(filename, getIndexes)).filter((name) => !name.startsWith("sqlite_"))
const offsetActual = (yield* withSqlite(filename, getOffsetIndexes)).filter((name) => !name.startsWith("sqlite_"))
expect(new Set(actual)).toEqual(new Set(indexes))
expect(new Set(offsetActual)).toEqual(new Set(offsetIndexes))
}),
"discord-sessions-",
),
)
})

View File

@@ -0,0 +1,21 @@
import { Reactivity } from "@effect/experimental"
import { SqliteClient } from "@effect/sql-sqlite-bun"
import * as Migrator from "@effect/sql/Migrator"
import * as Client from "@effect/sql/SqlClient"
import { Effect } from "effect"
import migration0001 from "./migrations/0001_discord_sessions"
const run = Migrator.make({})({
loader: Migrator.fromRecord({
"0001_discord_sessions": migration0001,
}),
})
export const initializeSchema = run
export const initializeSchemaForFile = (filename: string) =>
SqliteClient.make({ filename }).pipe(
Effect.provide(Reactivity.layer),
Effect.flatMap((db) => run.pipe(Effect.provideService(Client.SqlClient, db))),
Effect.scoped
)

View File

@@ -0,0 +1,83 @@
import * as Client from "@effect/sql/SqlClient"
import { Effect } from "effect"
const TABLE = `CREATE TABLE IF NOT EXISTS discord_sessions (
thread_id TEXT PRIMARY KEY,
channel_id TEXT NOT NULL,
guild_id TEXT NOT NULL,
sandbox_id TEXT NOT NULL,
session_id TEXT NOT NULL,
preview_url TEXT NOT NULL,
preview_token TEXT,
status TEXT NOT NULL CHECK (status IN ('creating', 'active', 'pausing', 'paused', 'resuming', 'destroying', 'destroyed', 'error')),
last_activity TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
pause_requested_at TEXT,
paused_at TEXT,
resume_attempted_at TEXT,
resumed_at TEXT,
destroyed_at TEXT,
last_health_ok_at TEXT,
last_error TEXT,
resume_fail_count INTEGER NOT NULL DEFAULT 0,
created_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
)`
const OFFSETS_TABLE = `CREATE TABLE IF NOT EXISTS conversation_offsets (
source_id TEXT PRIMARY KEY,
last_message_id TEXT NOT NULL,
updated_at TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP
)`
const COLUMNS = [
["preview_token", "TEXT"],
["last_activity", "TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP"],
["created_at", "TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP"],
["updated_at", "TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP"],
["pause_requested_at", "TEXT"],
["paused_at", "TEXT"],
["resume_attempted_at", "TEXT"],
["resumed_at", "TEXT"],
["destroyed_at", "TEXT"],
["last_health_ok_at", "TEXT"],
["last_error", "TEXT"],
["resume_fail_count", "INTEGER NOT NULL DEFAULT 0"],
] as const
const OFFSET_COLUMNS = [
["last_message_id", "TEXT NOT NULL"],
["updated_at", "TEXT NOT NULL DEFAULT CURRENT_TIMESTAMP"],
] as const
const INDEXES = [
`CREATE INDEX IF NOT EXISTS discord_sessions_status_last_activity_idx
ON discord_sessions (status, last_activity)`,
`CREATE INDEX IF NOT EXISTS discord_sessions_status_updated_at_idx
ON discord_sessions (status, updated_at)`,
] as const
const OFFSET_INDEXES = [
`CREATE INDEX IF NOT EXISTS conversation_offsets_updated_at_idx
ON conversation_offsets (updated_at)`,
] as const
export default Effect.gen(function* () {
const db = yield* Client.SqlClient
yield* db.unsafe(TABLE)
yield* db.unsafe(OFFSETS_TABLE)
const names = new Set((yield* db<{ name: string }>`PRAGMA table_info(discord_sessions)`).map((row) => row.name))
const missing = COLUMNS.filter(([name]) => !names.has(name))
yield* Effect.forEach(missing, ([name, definition]) => db.unsafe(`ALTER TABLE discord_sessions ADD COLUMN ${name} ${definition}`), {
discard: true,
})
const offsetNames = new Set((yield* db<{ name: string }>`PRAGMA table_info(conversation_offsets)`).map((row) => row.name))
const offsetMissing = OFFSET_COLUMNS.filter(([name]) => !offsetNames.has(name))
yield* Effect.forEach(offsetMissing, ([name, definition]) => db.unsafe(`ALTER TABLE conversation_offsets ADD COLUMN ${name} ${definition}`), {
discard: true,
})
yield* Effect.forEach(INDEXES, (index) => db.unsafe(index), { discard: true })
yield* Effect.forEach(OFFSET_INDEXES, (index) => db.unsafe(index), { discard: true })
})

View File

@@ -0,0 +1,669 @@
import type { ChatInputCommandInteraction, GuildMember, Interaction, Message, TextChannel } from "discord.js"
import { ChannelType, MessageFlags } from "discord.js"
import { Context, Effect, Layer, Option, Queue, Ref, Runtime, Schedule, Stream } from "effect"
import { AppConfig } from "../config"
import { DiscordClient } from "./client"
import { TYPING_INTERVAL } from "./constants"
import { cleanResponse, splitForDiscord } from "./format"
import { SessionStore } from "../session/store"
import { ChannelId, GuildId, ThreadId } from "../types"
import { DeliveryError, HistoryError, messageOf, ThreadEnsureError } from "../conversation/model/errors"
import { ChannelMessage, Mention, ThreadMessage, ThreadRef, Typing, type Action, type Inbound } from "../conversation/model/schema"
import { History, Inbox, OffsetStore, Outbox, Threads } from "../conversation"
import {
COMMAND_ACK,
COMMAND_CHANNEL_REPLY,
COMMAND_FORBIDDEN_REPLY,
COMMAND_NOT_THREAD_REPLY,
COMMANDS,
EMPTY_MENTION_REPLY,
SETUP_FAILURE_REPLY,
commandText,
} from "./conversation-commands"
import {
asTextChannel,
asThreadChannel,
type ChatChannel,
hasRequiredRole,
isChannelAllowed,
isMentioned,
} from "./conversation-channels"
import { catchupBenign, deliveryRetriable, deliveryRetry } from "./conversation-delivery"
import { buildHistoryReplayPrompt } from "./conversation-history"
import { catchupFromOffset } from "./catchup"
const CACHE_LIMIT = 4_000
const CATCHUP_PAGE_SIZE = 100
export class DiscordConversationServices {
static readonly portLayer = Layer.scopedContext(
Effect.gen(function* () {
const client = yield* DiscordClient
const config = yield* AppConfig
const sessions = yield* SessionStore
const offsets = yield* OffsetStore
const runtime = yield* Effect.runtime<never>()
const input = yield* Queue.unbounded<Inbound>()
const chats = new Map<string, ChatChannel>()
const texts = new Map<string, TextChannel>()
const refs = new Map<string, Message>()
const roots = new Map<string, ThreadId>()
const ref_ids: Array<string> = []
const root_ids: Array<string> = []
const stash = <A>(map: Map<string, A>, keys: Array<string>, key: string, value: A) => {
if (!map.has(key)) keys.push(key)
map.set(key, value)
if (keys.length <= CACHE_LIMIT) return
const oldest = keys.shift()
if (!oldest) return
map.delete(oldest)
}
const sourceChannel = (channelId: string) => `channel:${channelId}`
const sourceThread = (threadId: string) => `thread:${threadId}`
const uniq = <A>(values: ReadonlyArray<A>): Array<A> => [...new Set(values)]
const offer = (event: Inbound, onFresh: Effect.Effect<void>) =>
Effect.logInfo("Message queued").pipe(
Effect.annotateLogs({
event: "conversation.message.queued",
kind: event.kind,
message_id: event.messageId,
author_id: event.authorId,
content: event.content.slice(0, 200),
}),
Effect.zipRight(onFresh),
Effect.zipRight(input.offer(event)),
Effect.asVoid,
)
const memberOf = (message: Message) => {
if (message.member) return Effect.succeed(message.member)
if (config.discordRequiredRoleId.length === 0) return Effect.succeed<GuildMember | null>(null)
const guild = message.guild
if (!guild) return Effect.succeed<GuildMember | null>(null)
return Effect.tryPromise(() => guild.members.fetch(message.author.id)).pipe(
Effect.catchAll(() => Effect.succeed<GuildMember | null>(null)),
)
}
const ingestMessage = Effect.fn("DiscordAdapter.ingestMessage")(function* (message: Message) {
const source = message.channel.type === ChannelType.PublicThread || message.channel.type === ChannelType.PrivateThread
? sourceThread(message.channel.id)
: message.channel.type === ChannelType.GuildText
? sourceChannel(message.channel.id)
: null
if (source === null) return
if (message.author.bot || message.mentions.everyone) {
yield* offsets.setOffset(source, message.id)
return
}
const member = yield* memberOf(message)
if (!hasRequiredRole(member, config)) {
yield* offsets.setOffset(source, message.id)
return
}
const botUserId = client.user?.id ?? ""
const botRoleId = config.discordRoleId
const mentioned = isMentioned(message, botUserId, botRoleId)
const content = message.content.replace(/<@[!&]?\d+>/g, "").trim()
const mentions = Mention.make({
userIds: [...message.mentions.users.keys()],
roleIds: [...message.mentions.roles.keys()],
})
if (!content && mentioned) {
yield* Effect.tryPromise(() => message.reply(EMPTY_MENTION_REPLY)).pipe(Effect.catchAll(() => Effect.void))
yield* offsets.setOffset(source, message.id)
return
}
if (message.channel.type === ChannelType.PublicThread || message.channel.type === ChannelType.PrivateThread) {
const thread = asThreadChannel(message.channel)
if (!thread) return
const threadId = ThreadId.make(thread.id)
const channelId = ChannelId.make(thread.parentId ?? thread.id)
const allowed = isChannelAllowed(thread.parentId ?? "", thread.parent?.parentId ?? null, config)
if (!allowed) {
const owned = yield* sessions.hasTrackedThread(threadId).pipe(
Effect.catchAll(() => Effect.succeed(false)),
)
if (!owned || mentioned) {
yield* offsets.setOffset(source, message.id)
return
}
}
const event = ThreadMessage.make({
kind: "thread_message",
threadId,
channelId,
messageId: message.id,
guildId: GuildId.make(message.guildId ?? ""),
botUserId,
botRoleId,
authorId: message.author.id,
authorIsBot: message.author.bot,
mentionsEveryone: message.mentions.everyone,
mentions,
content,
})
yield* offer(
event,
Effect.sync(() => {
chats.set(event.threadId, thread)
stash(refs, ref_ids, event.messageId, message)
}),
)
yield* offsets.setOffset(source, message.id)
return
}
const channel = asTextChannel(message.channel)
if (!channel) return
if (!isChannelAllowed(channel.id, channel.parentId ?? null, config)) {
yield* offsets.setOffset(source, message.id)
return
}
const event = ChannelMessage.make({
kind: "channel_message",
channelId: ChannelId.make(channel.id),
messageId: message.id,
guildId: GuildId.make(message.guildId ?? ""),
botUserId,
botRoleId,
authorId: message.author.id,
authorIsBot: message.author.bot,
mentionsEveryone: message.mentions.everyone,
mentions,
content,
})
yield* offer(
event,
Effect.sync(() => {
texts.set(event.channelId, channel)
stash(refs, ref_ids, event.messageId, message)
}),
)
yield* offsets.setOffset(source, message.id)
})
const onMessage = (message: Message): void => {
const run = ingestMessage(message).pipe(
Effect.catchAll((error) =>
Effect.logError("Failed ingesting Discord message").pipe(
Effect.annotateLogs({
event: "conversation.ingest.failed",
message_id: message.id,
error: messageOf(error),
}),
)),
)
void Runtime.runPromise(runtime)(run)
}
const catchupSource = (source: string, channel: ChatChannel) =>
catchupFromOffset({
source,
pageSize: CATCHUP_PAGE_SIZE,
offsets,
fetchLatest: Effect.tryPromise(() => channel.messages.fetch({ limit: 1 })).pipe(
Effect.map((page) => {
const latest = page.first()
return latest ? Option.some(latest) : Option.none()
}),
),
fetchAfter: (after) =>
Effect.tryPromise(() =>
channel.messages.fetch({
limit: CATCHUP_PAGE_SIZE,
after,
})
).pipe(
Effect.map((page) => [...page.values()].sort((a, b) => a.createdTimestamp - b.createdTimestamp)),
),
idOf: (message) => message.id,
ingest: (message) => ingestMessage(message),
})
const categoryChannels = () =>
Effect.gen(function* () {
if (config.discordCategoryId.length === 0) return [] as Array<string>
const guilds = [...client.guilds.cache.values()]
const nested = yield* Effect.forEach(
guilds,
(guild) =>
Effect.tryPromise(() => guild.channels.fetch()).pipe(
Effect.map((channels) =>
[...channels.values()].flatMap((channel) => {
const text = asTextChannel(channel)
if (!text) return []
if (text.parentId !== config.discordCategoryId) return []
return [text.id]
}),
),
Effect.catchAll(() => Effect.succeed([] as Array<string>)),
),
{ discard: false, concurrency: "unbounded" },
)
return nested.flat()
})
const fetchText = (channelId: string) =>
Effect.tryPromise(() => client.channels.fetch(channelId)).pipe(
Effect.map((channel) => asTextChannel(channel)),
Effect.catchAll(() => Effect.succeed(null)),
)
const fetchThread = (threadId: string) =>
Effect.tryPromise(() => client.channels.fetch(threadId)).pipe(
Effect.map((channel) => asThreadChannel(channel)),
Effect.catchAll(() => Effect.succeed(null)),
)
const recoverMissedMessages = Effect.gen(function* () {
const channels = config.allowedChannelIds.length > 0
? uniq(config.allowedChannelIds)
: uniq(yield* categoryChannels())
const threads = uniq((yield* sessions.listTrackedThreads()).map((id) => String(id)))
const fromChannels = yield* Effect.forEach(
channels,
(channelId) =>
fetchText(channelId).pipe(
Effect.flatMap((channel) => {
if (!channel) return Effect.succeed(0)
return catchupSource(sourceChannel(channelId), channel)
}),
Effect.catchAll((error) => {
const log = catchupBenign(error) ? Effect.logDebug("Channel catch-up skipped") : Effect.logWarning("Channel catch-up failed")
return log.pipe(
Effect.annotateLogs({
event: "conversation.catchup.channel.failed",
channel_id: channelId,
error: messageOf(error),
}),
Effect.as(0),
)
}),
),
{ discard: false, concurrency: "unbounded" },
)
const fromThreads = yield* Effect.forEach(
threads,
(threadId) =>
fetchThread(threadId).pipe(
Effect.flatMap((thread) => {
if (!thread) return Effect.succeed(0)
return catchupSource(sourceThread(threadId), thread)
}),
Effect.catchAll((error) => {
const log = catchupBenign(error) ? Effect.logDebug("Thread catch-up skipped") : Effect.logWarning("Thread catch-up failed")
return log.pipe(
Effect.annotateLogs({
event: "conversation.catchup.thread.failed",
thread_id: threadId,
error: messageOf(error),
}),
Effect.as(0),
)
}),
),
{ discard: false, concurrency: "unbounded" },
)
const fetched = [...fromChannels, ...fromThreads].reduce((n, x) => n + x, 0)
yield* Effect.logInfo("Discord catch-up complete").pipe(
Effect.annotateLogs({
event: "conversation.catchup.complete",
channels: channels.length,
threads: threads.length,
fetched,
}),
)
})
const acknowledge = (interaction: ChatInputCommandInteraction, content: string) =>
Effect.tryPromise(async () => {
if (interaction.deferred || interaction.replied) {
await interaction.editReply({ content })
} else {
await interaction.reply({ content, flags: MessageFlags.Ephemeral })
}
}).pipe(Effect.catchAll(() => Effect.void))
const onInteraction = (interaction: Interaction): void => {
if (!interaction.isChatInputCommand()) return
const text = commandText(interaction.commandName)
if (!text) return
const handle = Effect.gen(function* () {
yield* Effect.tryPromise(() =>
interaction.deferReply({
flags: MessageFlags.Ephemeral,
})
).pipe(Effect.catchAll(() => Effect.void))
const thread = asThreadChannel(interaction.channel)
if (!thread) {
yield* acknowledge(interaction, COMMAND_NOT_THREAD_REPLY)
return
}
const threadId = ThreadId.make(thread.id)
const channelId = ChannelId.make(thread.parentId ?? thread.id)
const allowed = isChannelAllowed(thread.parentId ?? "", thread.parent?.parentId ?? null, config)
if (!allowed) {
const owned = yield* sessions.hasTrackedThread(threadId).pipe(
Effect.catchAll(() => Effect.succeed(false)),
)
if (!owned) {
yield* acknowledge(interaction, COMMAND_CHANNEL_REPLY)
return
}
}
const member = yield* Effect.tryPromise(() =>
interaction.guild ? interaction.guild.members.fetch(interaction.user.id) : Promise.resolve(null),
).pipe(Effect.catchAll(() => Effect.succeed(null)))
if (!hasRequiredRole(member, config)) {
yield* acknowledge(interaction, COMMAND_FORBIDDEN_REPLY)
return
}
const botUserId = client.user?.id ?? ""
const event = ThreadMessage.make({
kind: "thread_message",
threadId,
channelId,
messageId: interaction.id,
guildId: GuildId.make(interaction.guildId ?? ""),
botUserId,
botRoleId: config.discordRoleId,
authorId: interaction.user.id,
authorIsBot: false,
mentionsEveryone: false,
mentions: Mention.make({
userIds: botUserId.length > 0 ? [botUserId] : [],
roleIds: [],
}),
content: text,
})
yield* Effect.sync(() => {
chats.set(event.threadId, thread)
input.unsafeOffer(event)
})
yield* acknowledge(interaction, COMMAND_ACK)
})
void Runtime.runPromise(runtime)(handle)
}
const registerCommands = Effect.gen(function* () {
if (!client.isReady()) {
yield* Effect.async<void, never>((resume) => {
const ready = () => {
resume(Effect.void)
}
client.once("clientReady", ready)
return Effect.sync(() => {
client.off("clientReady", ready)
})
})
}
const app = client.application
if (!app) return
const guild = config.discordCommandGuildId.trim()
const registered = yield* Effect.tryPromise(() =>
guild.length > 0
? app.commands.set([...COMMANDS], guild)
: app.commands.set([...COMMANDS]),
)
yield* Effect.logInfo("Discord slash commands registered").pipe(
Effect.annotateLogs({
event: "discord.commands.registered",
scope: guild.length > 0 ? "guild" : "global",
guild_id: guild.length > 0 ? guild : "global",
count: registered.size,
}),
)
}).pipe(
Effect.tapError((cause) =>
Effect.logError("Discord slash command registration failed").pipe(
Effect.annotateLogs({
event: "discord.commands.failed",
message: messageOf(cause),
}),
)),
Effect.catchAll(() => Effect.void),
)
yield* registerCommands
client.on("messageCreate", onMessage)
client.on("interactionCreate", onInteraction)
yield* Effect.addFinalizer(() =>
Effect.gen(function* () {
client.off("messageCreate", onMessage)
client.off("interactionCreate", onInteraction)
yield* input.shutdown
}),
)
yield* recoverMissedMessages.pipe(
Effect.catchAll((error) =>
Effect.logError("Discord catch-up failed").pipe(
Effect.annotateLogs({ event: "conversation.catchup.failed", error: messageOf(error) }),
)),
)
const inbox = Inbox.of({
events: Stream.fromQueue(input, { shutdown: false }),
})
const channelOf = (threadId: ThreadId, action: Action["kind"]) => {
const channel = chats.get(threadId)
if (channel) return Effect.succeed(channel)
return Effect.tryPromise(() => client.channels.fetch(threadId)).pipe(
Effect.flatMap((fetched) => {
const thread = asThreadChannel(fetched)
if (thread) {
chats.set(threadId, thread)
return Effect.succeed(thread)
}
return DeliveryError.make({
threadId,
action,
message: "missing-thread-channel",
retriable: false,
})
}),
Effect.mapError((cause) =>
DeliveryError.make({
threadId,
action,
message: messageOf(cause),
retriable: deliveryRetriable(cause),
})),
)
}
const deliver = (threadId: ThreadId, action: Action["kind"], send: Effect.Effect<unknown, unknown>) =>
Effect.gen(function* () {
const attempts = yield* Ref.make(0)
yield* send.pipe(
Effect.mapError((cause) =>
DeliveryError.make({
threadId,
action,
message: messageOf(cause),
retriable: deliveryRetriable(cause),
})),
Effect.tapError((error) =>
Ref.updateAndGet(attempts, (n) => n + 1).pipe(
Effect.flatMap((attempt) =>
Effect.logWarning("Discord delivery attempt failed").pipe(
Effect.annotateLogs({
event: "conversation.delivery.retry",
thread_id: threadId,
action,
attempt,
retriable: error.retriable,
message: error.message,
}),
)),
),
),
Effect.retry(deliveryRetry),
Effect.tapError((error) =>
Ref.get(attempts).pipe(
Effect.flatMap((attempt) =>
Effect.logError("Discord delivery failed").pipe(
Effect.annotateLogs({
event: "conversation.delivery.failed",
thread_id: threadId,
action,
attempts: attempt,
retriable: error.retriable,
message: error.message,
}),
)),
),
),
)
})
const sendTyping = (threadId: ThreadId) =>
Effect.gen(function* () {
const channel = yield* channelOf(threadId, "typing")
yield* deliver(threadId, "typing", Effect.tryPromise(() => channel.sendTyping()))
})
const sendText = (threadId: ThreadId, action: "send" | "reply", text: string) =>
Effect.gen(function* () {
const channel = yield* channelOf(threadId, action)
yield* Effect.forEach(
splitForDiscord(cleanResponse(text)),
(chunk) => deliver(threadId, action, Effect.tryPromise(() => channel.send(chunk))),
{ discard: true },
)
})
const publish = (action: Action) => {
if (action.kind === "typing") return sendTyping(action.threadId)
return sendText(action.threadId, action.kind, action.text)
}
const withTyping = <A, E, R>(threadId: ThreadId, self: Effect.Effect<A, E, R>) =>
Effect.scoped(
Effect.gen(function* () {
const pulse = publish(
Typing.make({
kind: "typing",
threadId,
}),
).pipe(Effect.catchAll(() => Effect.void))
yield* pulse
yield* Effect.forkScoped(
Effect.repeat(pulse, Schedule.spaced(TYPING_INTERVAL)).pipe(
Effect.delay(TYPING_INTERVAL),
),
)
return yield* self
}),
)
const outbox = Outbox.of({ publish, withTyping })
const history = History.of({
rehydrate: (threadId, latest: string) =>
Effect.gen(function* () {
const channel = chats.get(threadId)
if (!channel) return latest
return yield* buildHistoryReplayPrompt(channel, latest).pipe(
Effect.mapError((cause) =>
HistoryError.make({
threadId,
message: messageOf(cause),
retriable: true,
})),
)
}),
})
const threads = Threads.of({
ensure: (event, name: string) => {
if (event.kind === "thread_message") {
return Effect.succeed(ThreadRef.make({ threadId: event.threadId, channelId: event.channelId }))
}
const known = roots.get(event.messageId)
if (known) {
return Effect.succeed(ThreadRef.make({ threadId: known, channelId: event.channelId }))
}
return Effect.gen(function* () {
const local = texts.get(event.channelId)
const channel = local
? local
: yield* Effect.tryPromise(() => client.channels.fetch(event.channelId)).pipe(
Effect.map((fetched) => asTextChannel(fetched)),
Effect.mapError((cause) =>
ThreadEnsureError.make({
channelId: event.channelId,
message: messageOf(cause),
retriable: deliveryRetriable(cause),
})),
)
if (!channel) {
return yield* ThreadEnsureError.make({
channelId: event.channelId,
message: "missing-parent-channel",
retriable: false,
})
}
texts.set(event.channelId, channel)
const base = refs.get(event.messageId)
const thread = yield* Effect.tryPromise(() =>
channel.threads.create({
name,
startMessage: base ?? event.messageId,
autoArchiveDuration: 60,
}),
).pipe(
Effect.tapError(() =>
Effect.tryPromise(() =>
base
? base.reply(SETUP_FAILURE_REPLY).then(() => undefined)
: Promise.resolve(undefined)
).pipe(
Effect.catchAll(() => Effect.void),
)),
Effect.mapError((cause) =>
ThreadEnsureError.make({
channelId: event.channelId,
message: messageOf(cause),
retriable: deliveryRetriable(cause),
})),
)
const threadId = ThreadId.make(thread.id)
chats.set(threadId, thread)
stash(roots, root_ids, event.messageId, threadId)
return ThreadRef.make({ threadId, channelId: event.channelId })
})
},
})
return Context.empty().pipe(
Context.add(Inbox, inbox),
Context.add(Outbox, outbox),
Context.add(History, history),
Context.add(Threads, threads),
)
}),
)
}
export const DiscordConversationServicesLive = DiscordConversationServices.portLayer

View File

@@ -0,0 +1,70 @@
import { describe, expect } from "bun:test"
import { Effect, Option } from "effect"
import type { OffsetStore } from "../conversation/offsets"
import { effectTest } from "../test/effect"
import { catchupFromOffset } from "./catchup"
const makeOffsets = () => {
const map = new Map<string, string>()
const service: OffsetStore.Service = {
getOffset: (source_id) => Effect.succeed(Option.fromNullable(map.get(source_id))),
setOffset: (source_id, messageId) =>
Effect.sync(() => {
map.set(source_id, messageId)
}),
}
return { service, map }
}
describe("catchupFromOffset", () => {
effectTest("no offset seeds latest and does not ingest", () => {
const offsets = makeOffsets()
const ingested: Array<string> = []
return catchupFromOffset({
source: "thread:t1",
pageSize: 2,
offsets: offsets.service,
fetchLatest: Effect.succeed(Option.some({ id: "m9" })),
fetchAfter: () => Effect.succeed([]),
idOf: (message) => message.id,
ingest: (message) =>
Effect.sync(() => {
ingested.push(message.id)
}),
}).pipe(
Effect.tap((count) => Effect.sync(() => {
expect(count).toBe(0)
expect(ingested).toEqual([])
expect(offsets.map.get("thread:t1")).toBe("m9")
})),
)
})
effectTest("existing offset replays all pages in order", () => {
const offsets = makeOffsets()
offsets.map.set("thread:t1", "m1")
const ingested: Array<string> = []
const pages = new Map<string, ReadonlyArray<{ id: string }>>([
["m1", [{ id: "m2" }, { id: "m3" }]],
["m3", [{ id: "m4" }]],
])
return catchupFromOffset({
source: "thread:t1",
pageSize: 2,
offsets: offsets.service,
fetchLatest: Effect.succeed(Option.none()),
fetchAfter: (after) => Effect.succeed(pages.get(after) ?? []),
idOf: (message) => message.id,
ingest: (message) =>
Effect.sync(() => {
ingested.push(message.id)
}),
}).pipe(
Effect.tap((count) => Effect.sync(() => {
expect(count).toBe(3)
expect(ingested).toEqual(["m2", "m3", "m4"])
})),
)
})
})

View File

@@ -0,0 +1,42 @@
import { Effect, Option } from "effect"
import { OffsetStore } from "../conversation/offsets"
export const catchupFromOffset = <M>(input: {
source: string
pageSize: number
offsets: OffsetStore.Service
fetchLatest: Effect.Effect<Option.Option<M>, unknown>
fetchAfter: (after: string) => Effect.Effect<ReadonlyArray<M>, unknown>
idOf: (message: M) => string
ingest: (message: M) => Effect.Effect<void, unknown>
}): Effect.Effect<number, unknown> => {
const pull = (after: string): Effect.Effect<number, unknown> =>
input.fetchAfter(after).pipe(
Effect.flatMap((rows) => {
if (rows.length === 0) return Effect.succeed(0)
const last = rows.at(-1)
if (!last) return Effect.succeed(0)
return Effect.forEach(rows, input.ingest, { discard: true }).pipe(
Effect.zipRight(
rows.length < input.pageSize
? Effect.succeed(rows.length)
: pull(input.idOf(last)).pipe(Effect.map((tail) => rows.length + tail)),
),
)
}),
)
return input.offsets.getOffset(input.source).pipe(
Effect.flatMap((offset) => {
if (Option.isNone(offset)) {
return input.fetchLatest.pipe(
Effect.flatMap((latest) => {
if (Option.isNone(latest)) return Effect.succeed(0)
return input.offsets.setOffset(input.source, input.idOf(latest.value)).pipe(Effect.as(0))
}),
)
}
return pull(offset.value)
}),
)
}

View File

@@ -0,0 +1,33 @@
import { Client, GatewayIntentBits, Partials } from "discord.js"
import { Context, Effect, Layer, Redacted } from "effect"
import { AppConfig } from "../config"
export class DiscordClient extends Context.Tag("@discord/DiscordClient")<DiscordClient, Client>() {
static readonly layer = Layer.scoped(
DiscordClient,
Effect.gen(function* () {
const config = yield* AppConfig
const client = new Client({
intents: [
GatewayIntentBits.Guilds,
GatewayIntentBits.GuildMessages,
GatewayIntentBits.MessageContent,
],
partials: [Partials.Channel],
})
yield* Effect.tryPromise(() => client.login(Redacted.value(config.discordToken)))
yield* Effect.logInfo("Discord client logged in").pipe(
Effect.annotateLogs({ event: "discord.login", tag: client.user?.tag ?? "unknown" }),
)
yield* Effect.addFinalizer(() =>
Effect.sync(() => {
client.destroy()
}),
)
return client
}),
)
}

View File

@@ -0,0 +1,2 @@
export const TYPING_INTERVAL = "8 seconds" as const
export const TYPING_INTERVAL_MS = 8_000

View File

@@ -0,0 +1,40 @@
import { ChannelType } from "discord.js"
import type { GuildMember, Message, TextChannel, ThreadChannel } from "discord.js"
import { AppConfig } from "../config"
export type ChatChannel = TextChannel | ThreadChannel
export const isChannelAllowed = (channelId: string, categoryId: string | null, config: AppConfig.Service): boolean => {
if (config.allowedChannelIds.length > 0 && config.allowedChannelIds.includes(channelId)) return true
if (config.discordCategoryId && categoryId === config.discordCategoryId) return true
return false
}
export const hasRequiredRole = (member: GuildMember | null, config: AppConfig.Service): boolean => {
if (!config.discordRequiredRoleId) return true
if (!member) return false
return member.roles.cache.has(config.discordRequiredRoleId)
}
export const asThreadChannel = (value: unknown): ThreadChannel | null => {
if (typeof value !== "object" || value === null) return null
const type = (value as { type?: unknown }).type
if (type === ChannelType.PublicThread || type === ChannelType.PrivateThread) return value as ThreadChannel
return null
}
export const asTextChannel = (value: unknown): TextChannel | null => {
if (typeof value !== "object" || value === null) return null
const type = (value as { type?: unknown }).type
if (type === ChannelType.GuildText) return value as TextChannel
return null
}
export const isMentioned = (message: Message, botUserId: string, botRoleId: string): boolean => {
if (botUserId.length > 0 && message.mentions.users.has(botUserId)) return true
if (botRoleId.length > 0 && message.mentions.roles.has(botRoleId)) return true
if (botUserId.length > 0 && message.content.includes(`<@${botUserId}>`)) return true
if (botUserId.length > 0 && message.content.includes(`<@!${botUserId}>`)) return true
if (botRoleId.length > 0 && message.content.includes(`<@&${botRoleId}>`)) return true
return false
}

View File

@@ -0,0 +1,23 @@
export const EMPTY_MENTION_REPLY = "Tag me with a question!"
export const SETUP_FAILURE_REPLY = "Something went wrong setting up the thread."
export const COMMAND_NOT_THREAD_REPLY = "Use this command inside a Discord thread."
export const COMMAND_FORBIDDEN_REPLY = "You don't have the required role for this command."
export const COMMAND_CHANNEL_REPLY = "This thread is not allowed for the bot."
export const COMMAND_ACK = "Running command in this thread..."
export const COMMANDS = [
{
name: "status",
description: "Show sandbox status for this thread",
},
{
name: "reset",
description: "Destroy the sandbox session for this thread",
},
] as const
export const commandText = (name: string): string => {
if (name === "status") return "!status"
if (name === "reset") return "!reset"
return ""
}

View File

@@ -0,0 +1,33 @@
import { Schedule } from "effect"
import { DeliveryError, messageOf } from "../conversation/model/errors"
const statusOf = (cause: unknown): number | null => {
if (typeof cause !== "object" || cause === null) return null
const status = (cause as { status?: unknown }).status
if (typeof status === "number") return status
const code = (cause as { code?: unknown }).code
if (typeof code === "number") return code
return null
}
export const deliveryRetriable = (cause: unknown): boolean => {
const status = statusOf(cause)
if (status === 429) return true
if (status !== null && status >= 500) return true
return false
}
export const catchupBenign = (cause: unknown): boolean => {
const text = messageOf(cause).toLowerCase()
if (text.includes("missing access")) return true
if (text.includes("missing permissions")) return true
if (text.includes("unknown channel")) return true
if (text.includes("50001")) return true
if (text.includes("50013")) return true
return false
}
export const deliveryRetry = Schedule.exponential("200 millis").pipe(
Schedule.intersect(Schedule.recurs(3)),
Schedule.whileInput((error: DeliveryError) => error.retriable),
)

View File

@@ -0,0 +1,47 @@
import { Effect } from "effect"
import { type ChatChannel } from "./conversation-channels"
const HISTORY_FETCH_LIMIT = 40
const HISTORY_LINE_CHAR_LIMIT = 500
const HISTORY_TOTAL_CHAR_LIMIT = 6000
export const buildHistoryReplayPrompt = Effect.fn("DiscordAdapter.buildHistoryReplayPrompt")(
function* (channel: ChatChannel, latest: string) {
const fetched = yield* Effect.tryPromise(() => channel.messages.fetch({ limit: HISTORY_FETCH_LIMIT }))
const ordered = [...fetched.values()].sort((a, b) => a.createdTimestamp - b.createdTimestamp)
const lines = ordered
.filter((prior) => !prior.system)
.flatMap((prior) => {
const text = prior.content.replace(/\s+/g, " ").trim()
const files = prior.attachments.size > 0
? `[attachments: ${[...prior.attachments.values()].map((att) => att.name ?? "file").join(", ")}]`
: ""
const line = text || files
if (!line) return []
const value = line.length > HISTORY_LINE_CHAR_LIMIT ? `${line.slice(0, HISTORY_LINE_CHAR_LIMIT)}...` : line
return [`${prior.author.bot ? "assistant" : "user"}: ${value}`]
})
const prior = lines.at(-1) === `user: ${latest}` ? lines.slice(0, -1) : lines
if (prior.length === 0) return latest
const selected = prior.reduceRight(
(state, candidate) => {
if (state.stop) return state
if (state.total + candidate.length > HISTORY_TOTAL_CHAR_LIMIT && state.list.length > 0) {
return { ...state, stop: true }
}
return { list: [candidate, ...state.list], total: state.total + candidate.length, stop: false }
},
{ list: [] as ReadonlyArray<string>, total: 0, stop: false },
).list
return [
"Conversation history from this same Discord thread (oldest to newest):",
selected.join("\n"),
"",
"Continue the same conversation and respond to the latest user message:",
latest,
].join("\n")
},
)

View File

@@ -0,0 +1,70 @@
const MAX_MESSAGE_LENGTH = 1900;
/**
* Splits a long response into Discord-safe message chunks (<2000 chars).
* Splits at code block boundaries, paragraph breaks, or sentence ends.
* Handles unclosed code blocks across chunks.
*/
export function splitForDiscord(text: string): string[] {
if (!text || text.length === 0) return ["(No response)"];
if (text.length <= MAX_MESSAGE_LENGTH) return [text];
const messages: string[] = [];
let remaining = text;
while (remaining.length > 0) {
if (remaining.length <= MAX_MESSAGE_LENGTH) {
messages.push(remaining);
break;
}
// Find best split point
let splitAt = -1;
// Prefer splitting at end of code block
splitAt = remaining.lastIndexOf("\n```\n", MAX_MESSAGE_LENGTH);
if (splitAt !== -1) splitAt += 4; // include the closing ```\n
// Then paragraph break
if (splitAt === -1 || splitAt < MAX_MESSAGE_LENGTH / 2) {
const paraBreak = remaining.lastIndexOf("\n\n", MAX_MESSAGE_LENGTH);
if (paraBreak > MAX_MESSAGE_LENGTH / 2) splitAt = paraBreak;
}
// Then sentence end
if (splitAt === -1 || splitAt < MAX_MESSAGE_LENGTH / 2) {
const sentenceEnd = remaining.lastIndexOf(". ", MAX_MESSAGE_LENGTH);
if (sentenceEnd > MAX_MESSAGE_LENGTH / 2) splitAt = sentenceEnd + 1;
}
// Fallback: hard cut
if (splitAt === -1 || splitAt < MAX_MESSAGE_LENGTH / 4) {
splitAt = MAX_MESSAGE_LENGTH;
}
const chunk = remaining.slice(0, splitAt);
remaining = remaining.slice(splitAt).trimStart();
// Handle unclosed code blocks
const backtickCount = (chunk.match(/```/g) || []).length;
if (backtickCount % 2 !== 0) {
// Odd = unclosed code block
messages.push(chunk + "\n```");
remaining = "```\n" + remaining;
} else {
messages.push(chunk);
}
}
return messages.filter((m) => m.trim().length > 0);
}
/**
* Cleans up the response text for Discord.
* Strips any leading/trailing whitespace and limits consecutive newlines.
*/
export function cleanResponse(text: string): string {
return text
.trim()
.replace(/\n{4,}/g, "\n\n\n"); // max 3 consecutive newlines
}

View File

@@ -0,0 +1,134 @@
import { LanguageModel } from "@effect/ai"
import { AnthropicLanguageModel } from "@effect/ai-anthropic"
import { Context, Effect, Layer, Schema } from "effect"
import { AppConfig } from "../config"
export class TurnRoutingDecision extends Schema.Class<TurnRoutingDecision>("TurnRoutingDecision")({
shouldRespond: Schema.Boolean,
reason: Schema.String,
}) {}
export class TurnRoutingInput extends Schema.Class<TurnRoutingInput>("TurnRoutingInput")({
content: Schema.String,
botUserId: Schema.String,
botRoleId: Schema.String,
mentionedUserIds: Schema.Array(Schema.String),
mentionedRoleIds: Schema.Array(Schema.String),
}) {}
const QUICK_CHAT_RE = /^(ok|okay|k|kk|thanks|thank you|thx|lol|lmao|haha|nice|cool|yup|yep|nah|nope|got it|sgtm)[!. ]*$/i
const heuristicDecision = (input: TurnRoutingInput): TurnRoutingDecision | null => {
const text = input.content.trim()
const lower = text.toLowerCase()
if (!text) return TurnRoutingDecision.make({ shouldRespond: false, reason: "empty-message" })
if (input.mentionedUserIds.some((id) => id !== input.botUserId))
return TurnRoutingDecision.make({ shouldRespond: false, reason: "mentions-other-user" })
if (input.mentionedRoleIds.some((id) => id !== input.botRoleId))
return TurnRoutingDecision.make({ shouldRespond: false, reason: "mentions-other-role" })
if (text.length <= 40 && QUICK_CHAT_RE.test(text))
return TurnRoutingDecision.make({ shouldRespond: false, reason: "quick-chat" })
if (/\b(opencode|bot)\b/i.test(text))
return TurnRoutingDecision.make({ shouldRespond: true, reason: "bot-keyword" })
if (text.includes("?") && /\b(you|your|can you|could you|would you|please|help)\b/i.test(text))
return TurnRoutingDecision.make({ shouldRespond: true, reason: "direct-question" })
if (text.includes("?") && /\b(how|what|why|where|when|which)\b/i.test(text))
return TurnRoutingDecision.make({ shouldRespond: true, reason: "general-question" })
if (lower.startsWith("do this") || lower.startsWith("run ") || lower.startsWith("fix "))
return TurnRoutingDecision.make({ shouldRespond: true, reason: "instruction" })
return null
}
const fallbackThreadName = (message: string): string =>
message.slice(0, 95) + (message.length > 95 ? "..." : "")
export declare namespace TurnRouter {
export interface Service {
readonly shouldRespond: (input: TurnRoutingInput) => Effect.Effect<TurnRoutingDecision>
readonly generateThreadName: (userMessage: string) => Effect.Effect<string>
}
}
export class TurnRouter extends Context.Tag("@discord/TurnRouter")<TurnRouter, TurnRouter.Service>() {
static readonly layer = Layer.effect(
TurnRouter,
Effect.gen(function* () {
const config = yield* AppConfig
const model = yield* LanguageModel.LanguageModel
const aiDecision = (input: TurnRoutingInput): Effect.Effect<TurnRoutingDecision> => {
const prompt = [
"You route turns for an engineering Discord bot.",
"Decide if the latest message is directed at the bot assistant or is side conversation.",
"Return EXACTLY one token: RESPOND or SKIP.",
"",
`Message: ${input.content}`,
`MentionsOtherUser: ${input.mentionedUserIds.some((id) => id !== input.botUserId)}`,
`MentionsOtherRole: ${input.mentionedRoleIds.some((id) => id !== input.botRoleId)}`,
].join("\n")
return AnthropicLanguageModel.withConfigOverride(
model.generateText({ prompt }).pipe(
Effect.map((response) => {
const output = response.text.trim().toUpperCase()
if (output.includes("SKIP")) return TurnRoutingDecision.make({ shouldRespond: false, reason: "ai-skip" })
return TurnRoutingDecision.make({
shouldRespond: true,
reason: output.includes("RESPOND") ? "ai-respond" : "ai-default-respond",
})
}),
Effect.catchAll(() =>
Effect.succeed(TurnRoutingDecision.make({ shouldRespond: true, reason: "ai-error-default-respond" })),
),
),
{ model: config.turnRoutingModel, max_tokens: 10 },
)
}
const shouldRespond = Effect.fn("TurnRouter.shouldRespond")(function* (input: TurnRoutingInput) {
if (config.turnRoutingMode === "off") {
return TurnRoutingDecision.make({ shouldRespond: true, reason: "routing-off" })
}
const heuristic = heuristicDecision(input)
if (heuristic) return heuristic
if (config.turnRoutingMode === "heuristic") {
return TurnRoutingDecision.make({
shouldRespond: true,
reason: "heuristic-uncertain-default-respond",
})
}
return yield* aiDecision(input)
})
const generateThreadName = Effect.fn("TurnRouter.generateThreadName")(function* (userMessage: string) {
return yield* AnthropicLanguageModel.withConfigOverride(
model.generateText({
prompt: `Generate a short, descriptive thread title (max 90 chars) for this Discord question. Return ONLY the title, no quotes, no explanation.\n\nQuestion: ${userMessage}`,
}).pipe(
Effect.map((response) => {
const title = response.text.trim()
if (!title || title.length === 0) return fallbackThreadName(userMessage)
return title.slice(0, 95) + (title.length > 95 ? "..." : "")
}),
Effect.catchAll(() => Effect.succeed(fallbackThreadName(userMessage))),
),
{ model: "claude-haiku-4-5", max_tokens: 60 },
)
})
return TurnRouter.of({ shouldRespond, generateThreadName })
}),
)
}

View File

@@ -0,0 +1,117 @@
import { Schema } from "effect"
import { SandboxId, SessionId, ThreadId } from "./types"
// -- Sandbox errors (Daytona SDK) --
export class SandboxCreateError extends Schema.TaggedError<SandboxCreateError>()(
"SandboxCreateError",
{
sandboxId: Schema.optional(SandboxId),
cause: Schema.Defect,
},
) {}
export class SandboxNotFoundError extends Schema.TaggedError<SandboxNotFoundError>()(
"SandboxNotFoundError",
{
sandboxId: SandboxId,
},
) {}
export class SandboxExecError extends Schema.TaggedError<SandboxExecError>()(
"SandboxExecError",
{
sandboxId: SandboxId,
label: Schema.String,
exitCode: Schema.Number,
output: Schema.String,
},
) {}
export class SandboxStartError extends Schema.TaggedError<SandboxStartError>()(
"SandboxStartError",
{
sandboxId: SandboxId,
cause: Schema.Defect,
},
) {}
// -- OpenCode client errors --
export class HealthCheckError extends Schema.TaggedError<HealthCheckError>()(
"HealthCheckError",
{
lastStatus: Schema.String,
},
) {}
export const OpenCodeFailureKind = Schema.Literal("session-missing", "sandbox-down", "non-recoverable")
export type OpenCodeFailureKind = typeof OpenCodeFailureKind.Type
export const classifyOpenCodeFailure = (statusCode: number, body: string): OpenCodeFailureKind => {
if (statusCode === 404) return "session-missing"
if (statusCode === 0 || statusCode >= 500) return "sandbox-down"
const text = body.toLowerCase()
if (text.includes("sandbox not found") || text.includes("is the sandbox started")) return "sandbox-down"
return "non-recoverable"
}
export class OpenCodeClientError extends Schema.TaggedError<OpenCodeClientError>()(
"OpenCodeClientError",
{
operation: Schema.String,
statusCode: Schema.Number,
body: Schema.String,
kind: OpenCodeFailureKind,
},
) {}
export const isOpenCodeSandboxUnavailable = (error: OpenCodeClientError) => {
if (error.kind === "session-missing") return true
return error.kind === "sandbox-down"
}
export class SessionMissingError extends Schema.TaggedError<SessionMissingError>()(
"SessionMissingError",
{
sessionId: SessionId,
},
) {}
// -- Session lifecycle errors --
export class SandboxDeadError extends Schema.TaggedError<SandboxDeadError>()(
"SandboxDeadError",
{
threadId: ThreadId,
reason: Schema.String,
},
) {}
export class ResumeFailedError extends Schema.TaggedError<ResumeFailedError>()(
"ResumeFailedError",
{
threadId: ThreadId,
sandboxId: SandboxId,
cause: Schema.Defect,
},
) {}
// -- Config errors --
export class ConfigEncodeError extends Schema.TaggedError<ConfigEncodeError>()(
"ConfigEncodeError",
{
config: Schema.String,
cause: Schema.Defect,
},
) {}
// -- Database errors --
export class DatabaseError extends Schema.TaggedError<DatabaseError>()(
"DatabaseError",
{
cause: Schema.Defect,
},
) {}

View File

@@ -0,0 +1,72 @@
import { HttpLayerRouter, HttpServerResponse } from "@effect/platform"
import { BunHttpServer } from "@effect/platform-bun"
import { Context, Effect, Layer } from "effect"
import { AppConfig } from "../config"
import { DiscordClient } from "../discord/client"
import { SessionStore } from "../session/store"
export declare namespace HealthServer {
export interface Service {
readonly started: true
}
}
export class HealthServer extends Context.Tag("@discord/HealthServer")<HealthServer, HealthServer.Service>() {
static readonly layer = Layer.scoped(
HealthServer,
Effect.gen(function* () {
const config = yield* AppConfig
const client = yield* DiscordClient
const sessions = yield* SessionStore
const startedAt = Date.now()
const routes = HttpLayerRouter.use((router) =>
Effect.all([
router.add(
"GET",
"/healthz",
Effect.gen(function* () {
const activeSessions = yield* sessions.listActive().pipe(
Effect.catchAll(() => Effect.succeed([])),
Effect.map((rows) => rows.length),
)
return HttpServerResponse.unsafeJson({
ok: true,
uptimeSec: Math.floor((Date.now() - startedAt) / 1000),
discordReady: client.isReady(),
activeSessions,
})
}),
),
router.add(
"GET",
"/readyz",
Effect.sync(() => {
const ready = client.isReady()
return HttpServerResponse.unsafeJson({ ok: ready, discordReady: ready }, { status: ready ? 200 : 503 })
}),
),
]),
)
const server = HttpLayerRouter.serve(routes, { disableLogger: true, disableListenLog: true }).pipe(
Layer.provide(
BunHttpServer.layer({
hostname: config.healthHost,
port: config.healthPort,
}),
),
)
yield* Layer.launch(server).pipe(Effect.forkScoped)
yield* Effect.logInfo("Health server started").pipe(
Effect.annotateLogs({ event: "health.server.started", host: config.healthHost, port: config.healthPort }),
)
return {
started: true as const,
}
}),
)
}

View File

@@ -0,0 +1,28 @@
import { BunRuntime } from "@effect/platform-bun"
import { Effect, Layer } from "effect"
import { AppConversationLayer } from "./app/layers"
import { Conversation } from "./conversation/conversation"
import { DiscordClient } from "./discord/client"
import { HealthServer } from "./http/health"
const AppLayer = Layer.provideMerge(HealthServer.layer, AppConversationLayer)
const main = Effect.gen(function* () {
const client = yield* DiscordClient
const conversation = yield* Conversation
yield* HealthServer
yield* Effect.forkScoped(conversation.run)
yield* Effect.logInfo("Discord bot ready").pipe(
Effect.annotateLogs({ event: "discord.ready", tag: client.user?.tag }),
)
yield* Effect.logInfo("Discord bot started")
return yield* Effect.never
})
main.pipe(
Effect.provide(AppLayer),
Effect.scoped,
BunRuntime.runMain,
)

View File

@@ -0,0 +1,41 @@
# ActorMap
Per-key serialized execution primitive. Think of it as a `Map<K, SerialQueue>` with optional idle timeouts and persistent state.
## Core Semantics
- `run(key, effect)` enqueues work onto the key's serial queue. Creates the actor (fiber + queue) on first access.
- Effects for the **same key** execute sequentially (FIFO). Effects for **different keys** run concurrently.
- `run` returns a `Deferred` result — the caller suspends until the work completes on the actor's fiber.
- `touch: false` option skips resetting the idle timer (used for bookkeeping reads that shouldn't extend session lifetime)
## State Management
`ActorMap<K, S>` supports optional per-key state (`Ref<Option<S>>`):
- `load(key)` hook runs on actor creation to hydrate from persistence (e.g. `SessionStore`)
- `save(key, state)` hook runs after `run` completes if state changed (reference equality check: `stateBefore !== stateAfter`)
- `run` can accept a function `(state: Ref<Option<S>>) => Effect<A, E>` instead of a bare Effect — this gives the callback access to the actor's state ref
## Idle Timeout Mechanics
When `idleTimeout` + `onIdle` are configured:
- Each `run` (with `touch: true`, the default) replaces the key's timer fiber in a `FiberMap`
- Timer fires `onIdle(key)` after the idle duration — typically pauses the sandbox and calls `actors.remove(key)`
- `cancelIdle(key)` cancels the timer without removing the actor
## Internal Structure
- `FiberMap<K>` for worker fibers (one per actor)
- `FiberMap<K>` for idle timer fibers (one per actor)
- `SynchronizedRef<Map<K, Entry<S>>>` for the actor registry
- `Queue.unbounded<Job>` per actor for the serial work queue
- Jobs use `Effect.uninterruptibleMask` + `Deferred` for safe completion signaling
## Gotchas
- `remove(key)` cancels all pending work (interrupts via `Deferred.interrupt`) and shuts down the queue. The key can be re-created by a subsequent `run`.
- State save is best-effort: `options.save` errors are silently caught (`Effect.catchAll(() => Effect.void)`)
- `load` errors are also silently caught — returns `Option.none()` on failure
- The `run` overload detection uses `Effect.isEffect(effectOrFn)` to distinguish bare effects from state-accessing functions

View File

@@ -0,0 +1,338 @@
import { describe, expect } from "bun:test"
import { Effect, Either, Exit, Option, Ref, Schema } from "effect"
import { effectTest } from "../../test/effect"
import { ActorMap } from "./keyed"
class LoadTestError extends Schema.TaggedError<LoadTestError>()("LoadTestError", {
message: Schema.String,
}) {}
describe("ActorMap", () => {
effectTest("serializes work for the same key", () =>
Effect.gen(function* () {
const log: Array<string> = []
const keyed = yield* ActorMap.make<string>()
const one = keyed.run(
"t1",
Effect.gen(function* () {
log.push("one:start")
yield* Effect.sleep("40 millis")
log.push("one:end")
return "one"
}),
)
const two = keyed.run(
"t1",
Effect.gen(function* () {
log.push("two:start")
log.push("two:end")
return "two"
}),
)
const out = yield* Effect.all([one, two], { concurrency: "unbounded" })
expect(out).toEqual(["one", "two"])
expect(log).toEqual(["one:start", "one:end", "two:start", "two:end"])
}),
)
effectTest("allows different keys to run concurrently", () =>
Effect.gen(function* () {
const log: Array<string> = []
const keyed = yield* ActorMap.make<string>()
const slow = keyed.run(
"a",
Effect.gen(function* () {
log.push("a:start")
yield* Effect.sleep("50 millis")
log.push("a:end")
}),
)
const fast = keyed.run(
"b",
Effect.gen(function* () {
log.push("b:start")
log.push("b:end")
}),
)
yield* Effect.all([slow, fast], { concurrency: "unbounded" })
expect(log.indexOf("b:end")).toBeLessThan(log.indexOf("a:end"))
}),
)
effectTest("triggers idle callback once after inactivity", () =>
Effect.gen(function* () {
const n = yield* Ref.make(0)
const keyed = yield* ActorMap.make<string>({
idleTimeout: "30 millis",
onIdle: () => Ref.update(n, (x) => x + 1),
})
yield* keyed.run("t1", Effect.void)
yield* Effect.sleep("80 millis")
expect(yield* Ref.get(n)).toBe(1)
}),
)
effectTest("touch extends idle deadline", () =>
Effect.gen(function* () {
const n = yield* Ref.make(0)
const keyed = yield* ActorMap.make<string>({
idleTimeout: "40 millis",
onIdle: () => Ref.update(n, (x) => x + 1),
})
yield* keyed.run("t1", Effect.void)
yield* Effect.sleep("25 millis")
yield* keyed.touch("t1")
yield* Effect.sleep("25 millis")
expect(yield* Ref.get(n)).toBe(0)
yield* Effect.sleep("40 millis")
expect(yield* Ref.get(n)).toBe(1)
}),
)
effectTest("run can skip idle touch", () =>
Effect.gen(function* () {
const n = yield* Ref.make(0)
const keyed = yield* ActorMap.make<string>({
idleTimeout: "25 millis",
onIdle: () => Ref.update(n, (x) => x + 1),
})
yield* keyed.run("t1", Effect.void, { touch: false })
yield* Effect.sleep("40 millis")
expect(yield* Ref.get(n)).toBe(0)
yield* keyed.touch("t1")
yield* Effect.sleep("40 millis")
expect(yield* Ref.get(n)).toBe(1)
}),
)
effectTest("remove clears entry and allows recreation", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string>()
yield* keyed.run("t1", Effect.void)
expect(yield* keyed.size).toBe(1)
yield* keyed.remove("t1")
expect(yield* keyed.size).toBe(0)
yield* keyed.run("t1", Effect.succeed("ok"))
expect(yield* keyed.size).toBe(1)
}),
)
effectTest("failure does not poison the key queue", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string>()
const first = yield* keyed.run("t1", Effect.fail("boom")).pipe(Effect.either)
expect(Either.isLeft(first)).toBe(true)
if (Either.isLeft(first)) {
expect(first.left).toBe("boom")
}
const second = yield* keyed.run("t1", Effect.succeed("ok"))
expect(second).toBe("ok")
}),
)
effectTest("cancelIdle cancels the pending idle timer", () =>
Effect.gen(function* () {
const n = yield* Ref.make(0)
const keyed = yield* ActorMap.make<string>({
idleTimeout: "25 millis",
onIdle: () => Ref.update(n, (x) => x + 1),
})
yield* keyed.run("t1", Effect.void)
yield* keyed.cancelIdle("t1")
yield* Effect.sleep("60 millis")
expect(yield* Ref.get(n)).toBe(0)
}),
)
effectTest("stop removes all keys and cancels all idle timers", () =>
Effect.gen(function* () {
const n = yield* Ref.make(0)
const keyed = yield* ActorMap.make<string>({
idleTimeout: "30 millis",
onIdle: () => Ref.update(n, (x) => x + 1),
})
yield* Effect.all(
[
keyed.run("t1", Effect.void),
keyed.run("t2", Effect.void),
],
{ concurrency: "unbounded" },
)
expect(yield* keyed.size).toBe(2)
yield* keyed.stop
expect(yield* keyed.size).toBe(0)
yield* Effect.sleep("70 millis")
expect(yield* Ref.get(n)).toBe(0)
yield* keyed.run("t1", Effect.void)
expect(yield* keyed.size).toBe(1)
}),
)
effectTest("touch and cancelIdle on unknown key are no-ops", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string>({
idleTimeout: "20 millis",
onIdle: () => Effect.void,
})
yield* keyed.touch("missing")
yield* keyed.cancelIdle("missing")
expect(yield* keyed.size).toBe(0)
}),
)
effectTest("remove on unknown key is a no-op", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string>()
yield* keyed.remove("missing")
expect(yield* keyed.size).toBe(0)
}),
)
effectTest("remove interrupts in-flight run calls", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string>()
const fiber = yield* keyed
.run(
"t1",
Effect.gen(function* () {
yield* Effect.sleep("5 seconds")
return "should not reach"
}),
)
.pipe(Effect.fork)
// Give the job time to start executing on the worker fiber
yield* Effect.sleep("20 millis")
yield* keyed.remove("t1")
expect(yield* keyed.size).toBe(0)
const exit = yield* fiber.await
expect(Exit.isInterrupted(exit)).toBe(true)
}),
)
})
describe("ActorMap (stateful)", () => {
effectTest("load hydrates state on first activation", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string, number>({
load: (key) => Effect.succeed(key === "a" ? Option.some(42) : Option.none()),
})
const result = yield* keyed.run("a", (state) =>
Ref.get(state).pipe(Effect.map((s) => Option.isSome(s) ? s.value : -1)),
)
expect(result).toBe(42)
const result2 = yield* keyed.run("b", (state) =>
Ref.get(state).pipe(Effect.map((s) => Option.isSome(s) ? s.value : -1)),
)
expect(result2).toBe(-1)
}),
)
effectTest("save is called when state changes during run", () =>
Effect.gen(function* () {
const saved: Array<[string, number]> = []
const keyed = yield* ActorMap.make<string, number>({
save: (key, value) =>
Effect.sync(() => {
saved.push([key, value])
}),
})
yield* keyed.run("a", (state) => Ref.set(state, Option.some(10)))
expect(saved).toEqual([["a", 10]])
}),
)
effectTest("save is not called when state is unchanged", () =>
Effect.gen(function* () {
const saved: Array<[string, number]> = []
const keyed = yield* ActorMap.make<string, number>({
load: () => Effect.succeed(Option.some(5)),
save: (key, value) =>
Effect.sync(() => {
saved.push([key, value])
}),
})
// Run without touching state
yield* keyed.run("a", (_state) => Effect.succeed("noop"))
expect(saved).toEqual([])
}),
)
effectTest("getState returns current state for existing key", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string, number>({
load: () => Effect.succeed(Option.some(99)),
})
yield* keyed.run("a", Effect.void)
const result = yield* keyed.getState("a")
expect(result).toEqual(Option.some(99))
}),
)
effectTest("getState returns None for unknown key", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string, number>()
const result = yield* keyed.getState("missing")
expect(result).toEqual(Option.none())
}),
)
effectTest("stateful run with function receives state ref", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string, string>()
yield* keyed.run("a", (state) => Ref.set(state, Option.some("hello")))
const result = yield* keyed.getState("a")
expect(result).toEqual(Option.some("hello"))
const read = yield* keyed.run("a", (state) =>
Ref.get(state).pipe(Effect.map((s) => Option.isSome(s) ? s.value : "")),
)
expect(read).toBe("hello")
}),
)
effectTest("stateless run still works with stateful actor map", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string, number>()
const result = yield* keyed.run("a", Effect.succeed(42))
expect(result).toBe(42)
}),
)
effectTest("load error falls back to None", () =>
Effect.gen(function* () {
const keyed = yield* ActorMap.make<string, number>({
load: () => Effect.fail(LoadTestError.make({ message: "db down" })),
})
const result = yield* keyed.run("a", (state) =>
Ref.get(state).pipe(Effect.map(Option.isNone)),
)
expect(result).toBe(true)
}),
)
})

View File

@@ -0,0 +1,217 @@
import { Deferred, Effect, FiberMap, Option, Queue, Ref, SynchronizedRef, type Duration } from "effect"
import type * as Scope from "effect/Scope"
type Job = {
run: Effect.Effect<void, never>
cancel: Effect.Effect<void, never>
}
/**
* A keyed actor map — a concurrent map of serial work queues.
*
* Each key gets its own fiber-backed queue. Effects submitted to the same key
* are executed sequentially (preserving order), while different keys run
* concurrently. Optionally supports idle timeouts per key and per-key state
* with persistence hooks.
*/
export declare namespace ActorMap {
/**
* Configuration for idle-timeout behavior and optional per-key state.
*
* When both `idleTimeout` and `onIdle` are provided, each key starts a timer
* after activity. If no further activity (or explicit `touch`) occurs before
* the timer expires, `onIdle` is called with that key.
*
* When `load` and/or `save` are provided, the actor map manages per-key
* state of type `S`. `load` is called when an actor is first created to
* hydrate state from storage. `save` is called after `run` completes when
* the state has been modified.
*/
export interface Options<K, S = void> {
/** How long a key must be idle before `onIdle` fires. */
idleTimeout?: Duration.DurationInput
/** Callback invoked when a key's idle timer expires. */
onIdle?: (key: K) => Effect.Effect<void, unknown, never>
/** Load persisted state when an actor is first activated. */
load?: (key: K) => Effect.Effect<Option.Option<S>, unknown, never>
/** Save state after it has been modified during `run`. */
save?: (key: K, state: S) => Effect.Effect<void, unknown, never>
}
export interface ActorMap<K, S = void> {
/** Enqueue an effect onto a key's serial queue. Creates the actor if it
* doesn't exist yet. By default resets the key's idle timer (`touch: true`). */
run: {
<A, E>(
key: K,
effect: Effect.Effect<A, E>,
options?: { touch?: boolean },
): Effect.Effect<A, E>
<A, E>(
key: K,
f: (state: Ref.Ref<Option.Option<S>>) => Effect.Effect<A, E>,
options?: { touch?: boolean },
): Effect.Effect<A, E>
}
/** Reset the idle timer for a key without enqueuing work. No-op if the key
* doesn't exist or no idle timeout is configured. */
touch: (key: K) => Effect.Effect<void>
/** Cancel the pending idle timer for a key without removing the actor. */
cancelIdle: (key: K) => Effect.Effect<void>
/** Tear down an actor: cancel its idle timer, interrupt its worker fiber,
* and shut down its queue. In-flight `run` calls are interrupted.
* The key can be re-created by a subsequent `run`. */
remove: (key: K) => Effect.Effect<void>
/** Remove all actors and cancel all idle timers. */
stop: Effect.Effect<void>
/** The number of currently active actor keys. */
size: Effect.Effect<number>
/** Read the current state for a key without running an effect.
* Returns None if the actor doesn't exist or has no state. */
getState: (key: K) => Effect.Effect<Option.Option<S>>
}
}
interface Entry<S> {
queue: Queue.Queue<Job>
state: Ref.Ref<Option.Option<S>>
}
export const ActorMap = {
make: <K, S = void>(options?: ActorMap.Options<K, S>): Effect.Effect<ActorMap.ActorMap<K, S>, never, Scope.Scope> =>
Effect.gen(function* () {
const workers = yield* FiberMap.make<K>()
const timers = yield* FiberMap.make<K>()
const state = yield* SynchronizedRef.make(new Map<K, Entry<S>>())
const has = (key: K) =>
Effect.map(SynchronizedRef.get(state), (map) => map.has(key))
const ensure = (key: K): Effect.Effect<Entry<S>> =>
SynchronizedRef.modifyEffect(state, (map) => {
const current = map.get(key)
if (current) {
return Effect.succeed([current, map] as const)
}
return Effect.gen(function* () {
const q = yield* Queue.unbounded<Job>()
// Load initial state from persistence hook if provided
const initial: Option.Option<S> = options?.load
? yield* options.load(key).pipe(Effect.catchAll(() => Effect.succeed(Option.none<S>())))
: Option.none<S>()
const stateRef = yield* Ref.make(initial)
yield* FiberMap.run(
workers,
key,
Effect.forever(
q.take.pipe(Effect.flatMap((job) => job.run)),
),
).pipe(Effect.asVoid)
const entry: Entry<S> = { queue: q, state: stateRef }
const next = new Map(map)
next.set(key, entry)
return [entry, next] as const
})
})
const cancelIdle = (key: K) =>
FiberMap.remove(timers, key)
const remove = (key: K) =>
Effect.gen(function* () {
const entry = yield* SynchronizedRef.modify(state, (map) => {
const current = map.get(key)
if (!current) return [null as Entry<S> | null, map] as const
const next = new Map(map)
next.delete(key)
return [current, next] as const
})
if (!entry) {
yield* cancelIdle(key)
return
}
yield* cancelIdle(key)
yield* FiberMap.remove(workers, key)
yield* entry.queue.takeAll.pipe(
Effect.flatMap(
Effect.forEach((job) => job.cancel, { discard: true }),
),
)
yield* entry.queue.shutdown
})
const touch = (key: K) =>
Effect.gen(function* () {
if (!options?.idleTimeout || !options.onIdle) return
if (!(yield* has(key))) return
yield* FiberMap.run(
timers,
key,
options.onIdle(key).pipe(
Effect.catchAll(() => Effect.void),
Effect.delay(options.idleTimeout),
),
).pipe(Effect.asVoid)
})
const run = <A, E>(
key: K,
effectOrFn: Effect.Effect<A, E> | ((state: Ref.Ref<Option.Option<S>>) => Effect.Effect<A, E>),
runOptions?: { touch?: boolean },
): Effect.Effect<A, E> =>
Effect.gen(function* () {
const entry = yield* ensure(key)
const done = yield* Deferred.make<A, E>()
// Snapshot state before running so we can detect changes
const stateBefore = yield* Ref.get(entry.state)
const effect: Effect.Effect<A, E> = Effect.isEffect(effectOrFn)
? effectOrFn
: (effectOrFn as (state: Ref.Ref<Option.Option<S>>) => Effect.Effect<A, E>)(entry.state)
yield* entry.queue.offer({
run: Effect.uninterruptibleMask((restore) =>
restore(effect).pipe(
Effect.exit,
Effect.flatMap((exit) => Deferred.done(done, exit)),
Effect.asVoid,
)),
cancel: Deferred.interrupt(done).pipe(Effect.asVoid),
}).pipe(Effect.asVoid)
if (runOptions?.touch ?? true) {
yield* touch(key)
}
const result = yield* Deferred.await(done)
// Persist state if it changed and a save hook is configured
if (options?.save) {
const stateAfter = yield* Ref.get(entry.state)
if (stateBefore !== stateAfter && Option.isSome(stateAfter)) {
yield* options.save(key, stateAfter.value).pipe(
Effect.catchAll(() => Effect.void),
)
}
}
return result
})
const stop = Effect.gen(function* () {
const keys = [...(yield* SynchronizedRef.get(state)).keys()]
yield* Effect.forEach(keys, (key) => remove(key), { discard: true, concurrency: "unbounded" })
})
const size = Effect.map(SynchronizedRef.get(state), (map) => map.size)
const getState = (key: K): Effect.Effect<Option.Option<S>> =>
Effect.gen(function* () {
const map = yield* SynchronizedRef.get(state)
const entry = map.get(key)
if (!entry) return Option.none<S>()
return yield* Ref.get(entry.state)
})
return { run, touch, cancelIdle, remove, stop, size, getState } satisfies ActorMap.ActorMap<K, S>
}),
}

View File

@@ -0,0 +1,23 @@
export declare namespace StatefulActor {
export interface Stateful<S> {
readonly get: () => S
readonly set: (state: S) => void
readonly update: (f: (state: S) => S) => S
}
}
export const StatefulActor = {
make: <S>(initial: S): StatefulActor.Stateful<S> => {
let state = initial
return {
get: () => state,
set: (next) => {
state = next
},
update: (f) => {
state = f(state)
return state
},
}
},
} as const

View File

@@ -0,0 +1,9 @@
import { Effect } from "effect"
/** Swallow errors with a warning log. Use for best-effort bookkeeping writes. */
export const logIgnore = <A>(effect: Effect.Effect<A, unknown>, context: string) =>
effect.pipe(
Effect.catchAll((err) =>
Effect.logWarning(`${context} failed (ignored)`).pipe(Effect.annotateLogs({ error: String(err) })),
),
)

4
packages/discord/src/md.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
declare module "*.md" {
const text: string
export default text
}

View File

@@ -0,0 +1,6 @@
import { Layer, Logger, LogLevel } from "effect"
export const LoggerLive = Layer.merge(
Logger.replace(Logger.defaultLogger, Logger.jsonLogger),
Logger.minimumLogLevel(LogLevel.Debug),
)

View File

@@ -0,0 +1,77 @@
# Sandbox Module
Manages Daytona sandbox lifecycle and the OpenCode server running inside each sandbox.
## Three-Layer Architecture
1. **DaytonaService** (`daytona.ts`) — thin wrapper around `@daytonaio/sdk`. Creates/starts/stops/destroys sandboxes, executes commands, gets preview links. All methods return `Effect` with typed errors.
2. **SandboxProvisioner** (`provisioner.ts`) — orchestrates sandbox + OpenCode session lifecycle. Handles provision, resume, health checks, send-failure recovery.
3. **ThreadAgentPool** (`pool.ts`) — per-thread concurrency layer. Wraps provisioner with `ActorMap<ThreadId>` for serialized access per thread. Manages idle timeouts and cleanup loops.
## Sandbox Creation Flow
`provision()` uses `Effect.acquireUseRelease`:
- **acquire**: `daytonaService.create()` — creates sandbox with `Image.base("node:22-bookworm-slim")` + custom setup
- **use**: clones opencode repo, writes auth/config JSON via env vars, starts `opencode serve`, waits for health, creates session
- **release on failure**: destroys the sandbox (cleanup), marks session as errored
The `discordBotImage` in `daytona.ts` uses Daytona's `Image.base().runCommands().workdir()` builder — NOT a Dockerfile. It installs git, curl, gh CLI, opencode-ai, and bun globally.
## OpenCode Server Communication
`OpenCodeClient` (`opencode-client.ts`) uses `@effect/platform`'s `HttpClient`:
- Each request uses `scopedClient(preview)` which prepends the sandbox preview URL and adds `x-daytona-preview-token` header
- `HttpClient.filterStatusOk` auto-rejects non-2xx responses as `ResponseError`
- `mapErrors` helper converts `HttpClientError` + `ParseResult.ParseError``OpenCodeClientError`
- Health polling: `waitForHealthy` retries every 2s up to `maxWaitMs / 2000` attempts
## `PreviewAccess` — The Connectivity Token
`PreviewAccess` (defined in `types.ts`) carries `previewUrl` + `previewToken`. It's extracted from Daytona's `getPreviewLink(4096)` response (port 4096 is OpenCode's serve port). The token may also be embedded in the URL as `?tkn=``parsePreview` normalizes this.
`PreviewAccess.from(source)` factory works with any object having those two fields — used with `SandboxHandle`, `SessionInfo`.
## Resume Flow (Non-Obvious)
`provisioner.resume()` does NOT just restart. It:
1. Calls `daytonaService.start()` (re-starts the stopped Daytona sandbox)
2. Runs `restartOpenCodeServe` — a shell command that pkills old opencode processes and re-launches
3. Waits for health (120s default)
4. Calls `findOrCreateSessionId` — tries to find existing session by title (`Discord thread <threadId>`), creates new if not found
5. Returns `Resumed` or `ResumeFailed { allowRecreate }``allowRecreate: false` means "don't try recreating, something is fundamentally wrong"
## Send Failure Classification
`classifySendError` in provisioner maps HTTP status codes to recovery strategies:
- 404 → `session-missing` (session deleted, mark error)
- 0 or 5xx → `sandbox-down` (pause sandbox for later resume)
- body contains "sandbox not found" / "is the sandbox started" → `sandbox-down`
- anything else → `non-recoverable` (no automatic recovery)
## ThreadAgentPool — The ActorMap Bridge
`ThreadAgentPool` creates `ActorMap<ThreadId, SessionInfo>` with:
- `idleTimeout`: from config `sandboxTimeout` (default 30min)
- `onIdle`: pauses the sandbox and removes the actor
- `load`: reads from `SessionStore` on first access
- `save`: writes to `SessionStore` after state changes
`runtime(threadId, stateRef)` creates a `Runtime` object with `current/ensure/send/pause/destroy` methods. `runRuntime` submits work to the actor queue via `actors.run(threadId, (state) => ...)`.
## Background Cleanup Loop
Forked with `Effect.forkScoped` on `Schedule.spaced(config.cleanupInterval)`:
- Pauses stale-active sessions (no activity for `sandboxTimeout + graceMinutes`)
- Destroys expired-paused sessions (paused longer than `pausedTtlMinutes`)
## Files That Must Change Together
- Adding a new Daytona operation → `daytona.ts` + add error type in `errors.ts` if needed
- Changing sandbox setup (image, commands) → `daytona.ts` image builder + `provisioner.ts` exec commands
- Adding a new pool operation → `pool.ts` interface + wire into `conversation/services/conversation.ts`

View File

@@ -0,0 +1,182 @@
import { Daytona as DaytonaSDK, Image } from "@daytonaio/sdk"
import { Context, Effect, Layer, Redacted, Schema } from "effect"
import { AppConfig } from "../../config"
import { SandboxCreateError, SandboxExecError, SandboxNotFoundError, SandboxStartError } from "../../errors"
import { GuildId, SandboxId, ThreadId, PreviewAccess } from "../../types"
export class SandboxHandle extends Schema.Class<SandboxHandle>("SandboxHandle")({
id: SandboxId,
previewUrl: Schema.String,
previewToken: Schema.Union(Schema.Null, Schema.String),
}) {}
export class ExecResult extends Schema.Class<ExecResult>("ExecResult")({
exitCode: Schema.Number,
output: Schema.String,
}) {}
export declare namespace DaytonaService {
export interface Service {
readonly create: (opts: {
threadId: ThreadId
guildId: GuildId
timeout: number
}) => Effect.Effect<SandboxHandle, SandboxCreateError>
readonly exec: (
sandboxId: SandboxId,
label: string,
command: string,
opts?: { cwd?: string; env?: Record<string, string> },
) => Effect.Effect<ExecResult, SandboxExecError | SandboxNotFoundError>
readonly start: (
sandboxId: SandboxId,
timeout: number,
) => Effect.Effect<SandboxHandle, SandboxStartError | SandboxNotFoundError>
readonly stop: (sandboxId: SandboxId) => Effect.Effect<void, SandboxNotFoundError>
readonly destroy: (sandboxId: SandboxId) => Effect.Effect<void>
readonly getPreview: (sandboxId: SandboxId) => Effect.Effect<PreviewAccess, SandboxNotFoundError>
}
}
export const discordBotImage = Image.base("node:22-bookworm-slim")
.runCommands(
"apt-get update && apt-get install -y git curl && rm -rf /var/lib/apt/lists/*",
"curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg -o /usr/share/keyrings/githubcli-archive-keyring.gpg && echo \"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main\" > /etc/apt/sources.list.d/github-cli.list && apt-get update && apt-get install -y gh && rm -rf /var/lib/apt/lists/*",
"npm install -g opencode-ai@latest bun",
)
.workdir("/home/daytona")
export class DaytonaService extends Context.Tag("@discord/DaytonaService")<DaytonaService, DaytonaService.Service>() {
static readonly layer = Layer.effect(
DaytonaService,
Effect.gen(function* () {
const config = yield* AppConfig
const sdk = new DaytonaSDK({
apiKey: Redacted.value(config.daytonaApiKey),
_experimental: {},
})
const getSandbox = (sandboxId: SandboxId) =>
Effect.tryPromise({
try: () => sdk.get(sandboxId),
catch: () => new SandboxNotFoundError({ sandboxId }),
})
const toHandle = <E>(
sandboxId: SandboxId,
sandbox: { getPreviewLink: (timeout: number) => Promise<{ url: string; token?: string | null }> },
error: (cause: unknown) => E,
) =>
Effect.tryPromise({
try: () => sandbox.getPreviewLink(4096),
catch: error,
}).pipe(
Effect.map((preview) =>
SandboxHandle.make({
id: sandboxId,
previewUrl: preview.url.replace(/\/$/, ""),
previewToken: preview.token ?? null,
}),
),
)
const create = Effect.fn("DaytonaService.create")(
function* (opts: { threadId: ThreadId; guildId: GuildId; timeout: number }) {
const base = {
labels: { app: "opencord", threadId: opts.threadId, guildId: opts.guildId },
autoStopInterval: 0,
autoArchiveInterval: 0,
}
const snapshot = config.daytonaSnapshot.trim()
const sandbox = yield* Effect.tryPromise({
try: () =>
snapshot.length > 0
? sdk.create(
{
...base,
snapshot,
},
{ timeout: opts.timeout },
)
: sdk.create(
{
...base,
image: discordBotImage,
},
{ timeout: opts.timeout },
),
catch: (cause) => new SandboxCreateError({ cause }),
})
const sandboxId = SandboxId.make(sandbox.id)
return yield* toHandle(
sandboxId,
sandbox,
(cause) => new SandboxCreateError({ sandboxId, cause }),
)
},
)
const exec = Effect.fn("DaytonaService.exec")(
function* (
sandboxId: SandboxId,
label: string,
command: string,
opts?: { cwd?: string; env?: Record<string, string> },
) {
const sandbox = yield* getSandbox(sandboxId)
const result = yield* Effect.tryPromise({
try: () => sandbox.process.executeCommand(command, opts?.cwd, opts?.env),
catch: () => new SandboxExecError({ sandboxId, label, exitCode: -1, output: "exec failed" }),
})
if (result.exitCode !== 0) {
return yield* new SandboxExecError({
sandboxId,
label,
exitCode: result.exitCode,
output: result.result.slice(0, 500),
})
}
return ExecResult.make({
exitCode: result.exitCode,
output: result.result.trim(),
})
},
)
const start = Effect.fn("DaytonaService.start")(function* (sandboxId: SandboxId, timeout: number) {
const sandbox = yield* getSandbox(sandboxId)
yield* Effect.tryPromise({
try: () => sdk.start(sandbox, timeout),
catch: (cause) => new SandboxStartError({ sandboxId, cause }),
})
return yield* toHandle(sandboxId, sandbox, (cause) => new SandboxStartError({ sandboxId, cause }))
})
const stop = Effect.fn("DaytonaService.stop")(function* (sandboxId: SandboxId) {
const sandbox = yield* getSandbox(sandboxId)
yield* Effect.tryPromise({
try: () => sdk.stop(sandbox),
catch: () => new SandboxNotFoundError({ sandboxId }),
})
})
const destroy = Effect.fn("DaytonaService.destroy")(function* (sandboxId: SandboxId) {
yield* Effect.tryPromise({
try: async () => {
const sandbox = await sdk.get(sandboxId)
await sdk.delete(sandbox)
},
catch: () => undefined,
}).pipe(Effect.ignore)
})
const getPreview = Effect.fn("DaytonaService.getPreview")(function* (sandboxId: SandboxId) {
const sandbox = yield* getSandbox(sandboxId)
const handle = yield* toHandle(sandboxId, sandbox, () => new SandboxNotFoundError({ sandboxId }))
return PreviewAccess.from(handle)
})
return DaytonaService.of({ create, exec, start, stop, destroy, getPreview })
}),
)
}

View File

@@ -0,0 +1,44 @@
import { Daytona as DaytonaSDK } from "@daytonaio/sdk"
import { discordBotImage } from "./service"
const now = () => new Date().toISOString().slice(0, 10).replaceAll("-", "")
const apiKey = process.env["DAYTONA_API_KEY"]?.trim() ?? ""
if (apiKey.length === 0) {
console.error("DAYTONA_API_KEY is required")
process.exit(1)
}
const name = Bun.argv[2]?.trim() || `opencode-discord-${now()}`
const regionId = process.env["DAYTONA_REGION_ID"]?.trim() ?? ""
const run = async () => {
const sdk = new DaytonaSDK({
apiKey,
_experimental: {},
})
const snapshot = await sdk.snapshot.create(
regionId.length > 0
? {
name,
image: discordBotImage,
regionId,
}
: {
name,
image: discordBotImage,
},
{
onLogs: (chunk) => process.stdout.write(chunk),
},
)
const active = await sdk.snapshot.activate(snapshot)
console.log(`snapshot ready: ${active.name}`)
console.log(`set DAYTONA_SNAPSHOT=${active.name}`)
}
void run().catch((error) => {
const message = error instanceof Error ? error.message : String(error)
console.error(`snapshot creation failed: ${message}`)
process.exit(1)
})

View File

@@ -0,0 +1,250 @@
import { Context, Effect, Layer, ParseResult, Schema, Schedule } from "effect"
import { HttpBody, HttpClient, HttpClientError, HttpClientRequest, HttpClientResponse } from "@effect/platform"
import { classifyOpenCodeFailure, HealthCheckError, OpenCodeClientError } from "../../errors"
import { PreviewAccess, SessionId } from "../../types"
const HealthResponse = Schema.Struct({
healthy: Schema.Boolean,
})
const CreateSessionResponse = Schema.Struct({
id: SessionId,
})
const ListSessionsResponse = Schema.Array(
Schema.Struct({
id: SessionId,
title: Schema.optional(Schema.String),
time: Schema.optional(Schema.Struct({ updated: Schema.optional(Schema.Number) })),
}),
)
const SendPromptResponse = Schema.Struct({
parts: Schema.optional(
Schema.Array(Schema.Struct({
type: Schema.String,
text: Schema.optional(Schema.String),
content: Schema.optional(Schema.String),
})),
),
})
export class OpenCodeSessionSummary extends Schema.Class<OpenCodeSessionSummary>("OpenCodeSessionSummary")({
id: SessionId,
title: Schema.String,
updatedAt: Schema.optional(Schema.Number),
}) {}
const parsePreview = (input: PreviewAccess): { base: string; token: string | null } => {
const url = new URL(input.previewUrl)
const token = input.previewToken ?? url.searchParams.get("tkn")
url.searchParams.delete("tkn")
return { base: url.toString().replace(/\/$/, ""), token }
}
/**
* HTTP client for an OpenCode server running inside a Daytona sandbox.
*
* Each method takes a {@link PreviewAccess} to locate the sandbox's preview
* tunnel. Typical lifecycle:
*
* 1. `waitForHealthy` — poll until the server is ready after creation/resume
* 2. `createSession` — start a new chat session
* 3. `sendPrompt` — send user messages, returns the agent's text response
* 4. `abortSession` — cancel an in-flight generation
*/
export declare namespace OpenCodeClient {
export interface Service {
/** Poll the health endpoint until the server responds healthy, or timeout. */
readonly waitForHealthy: (
preview: PreviewAccess,
maxWaitMs?: number,
) => Effect.Effect<boolean, HealthCheckError>
/** Create a new chat session with the given title. Returns the session ID. */
readonly createSession: (
preview: PreviewAccess,
title: string,
) => Effect.Effect<SessionId, OpenCodeClientError>
/** Check whether a session still exists on the server. */
readonly sessionExists: (
preview: PreviewAccess,
sessionId: SessionId,
) => Effect.Effect<boolean, OpenCodeClientError>
/** List recent sessions, ordered by update time. */
readonly listSessions: (
preview: PreviewAccess,
limit?: number,
) => Effect.Effect<ReadonlyArray<OpenCodeSessionSummary>, OpenCodeClientError>
/** Send a user prompt and return the agent's text response. */
readonly sendPrompt: (
preview: PreviewAccess,
sessionId: SessionId,
text: string,
) => Effect.Effect<string, OpenCodeClientError>
/** Cancel an in-flight generation. Best-effort, errors are swallowed. */
readonly abortSession: (
preview: PreviewAccess,
sessionId: SessionId,
) => Effect.Effect<void>
}
}
export class OpenCodeClient extends Context.Tag("@discord/OpenCodeClient")<OpenCodeClient, OpenCodeClient.Service>() {
static readonly layer = Layer.effect(
OpenCodeClient,
Effect.gen(function* () {
const baseClient = yield* HttpClient.HttpClient
/** Build a scoped client for a specific preview, with auth header and 2xx filtering. */
const scopedClient = (preview: PreviewAccess) => {
const { base, token } = parsePreview(preview)
return baseClient.pipe(
HttpClient.mapRequest((req) =>
token ? HttpClientRequest.setHeader(req, "x-daytona-preview-token", token) : req
),
HttpClient.mapRequest(HttpClientRequest.prependUrl(base)),
HttpClient.filterStatusOk,
)
}
/** Map HttpClientError + ParseError to OpenCodeClientError for a given operation. */
const openCodeError = (operation: string, statusCode: number, body: string) =>
OpenCodeClientError.make({
operation,
statusCode,
body,
kind: classifyOpenCodeFailure(statusCode, body),
})
const mapErrors = <A, R>(
operation: string,
effect: Effect.Effect<A, HttpClientError.HttpClientError | ParseResult.ParseError, R>,
) =>
effect.pipe(
Effect.catchTags({
ResponseError: (err) =>
openCodeError(operation, err.response.status, err.message),
RequestError: (err) =>
openCodeError(operation, 0, err.message),
ParseError: (err) =>
openCodeError(operation, 0, `Decode: ${err.message}`),
}),
)
const waitForHealthy = Effect.fn("OpenCodeClient.waitForHealthy")(
function* (preview: PreviewAccess, maxWaitMs = 120_000) {
const maxAttempts = Math.max(1, Math.ceil(maxWaitMs / 2000))
const api = scopedClient(preview)
const poll = api.get("/global/health").pipe(
Effect.flatMap(HttpClientResponse.schemaBodyJson(HealthResponse)),
Effect.scoped,
Effect.flatMap((body) =>
body.healthy
? Effect.succeed(true)
: new HealthCheckError({ lastStatus: `200 but healthy=${body.healthy}` }),
),
Effect.catchAll((cause) => new HealthCheckError({ lastStatus: String(cause) })),
)
return yield* poll.pipe(
Effect.retry(
Schedule.intersect(
Schedule.spaced("2 seconds"),
Schedule.recurs(maxAttempts - 1),
),
),
Effect.catchAll(() => Effect.succeed(false)),
)
},
)
const createSession = (preview: PreviewAccess, title: string) =>
mapErrors(
"createSession",
scopedClient(preview)
.post("/session", { body: HttpBody.unsafeJson({ title }) })
.pipe(
Effect.flatMap(HttpClientResponse.schemaBodyJson(CreateSessionResponse)),
Effect.scoped,
Effect.map((body) => body.id),
),
)
const sessionExists = (preview: PreviewAccess, sessionId: SessionId) =>
scopedClient(preview)
.get(`/session/${sessionId}`)
.pipe(
Effect.scoped,
Effect.as(true),
Effect.catchTag("ResponseError", (err) =>
err.response.status === 404
? Effect.succeed(false)
: openCodeError("sessionExists", err.response.status, err.message),
),
Effect.catchTag("RequestError", (err) =>
openCodeError("sessionExists", 0, err.message),
),
)
const listSessions = (preview: PreviewAccess, limit = 50) =>
mapErrors(
"listSessions",
scopedClient(preview)
.get(`/session${limit > 0 ? `?limit=${limit}` : ""}`)
.pipe(
Effect.flatMap(HttpClientResponse.schemaBodyJson(ListSessionsResponse)),
Effect.scoped,
Effect.map((sessions) =>
sessions.map((s) =>
OpenCodeSessionSummary.make({
id: s.id,
title: s.title ?? "",
...(s.time?.updated != null ? { updatedAt: s.time.updated } : {}),
}),
),
),
),
)
const sendPrompt = (preview: PreviewAccess, sessionId: SessionId, text: string) =>
mapErrors(
"sendPrompt",
scopedClient(preview)
.post(`/session/${sessionId}/message`, {
body: HttpBody.unsafeJson({ parts: [{ type: "text", text }] }),
})
.pipe(
Effect.flatMap(HttpClientResponse.schemaBodyJson(SendPromptResponse)),
Effect.scoped,
Effect.map((result) => {
const parts = result.parts ?? []
const textContent = parts
.filter((p) => p.type === "text")
.map((p) => p.text || p.content || "")
.filter(Boolean)
return textContent.join("\n\n") || "(No response from agent)"
}),
),
)
const abortSession = (preview: PreviewAccess, sessionId: SessionId) =>
scopedClient(preview)
.post(`/session/${sessionId}/abort`)
.pipe(
Effect.scoped,
Effect.asVoid,
Effect.catchAll(() => Effect.void),
)
return OpenCodeClient.of({
waitForHealthy,
createSession,
sessionExists,
listSessions,
sendPrompt,
abortSession,
})
}),
)
}

View File

@@ -0,0 +1,470 @@
import agentPrompt from "../agent-prompt.md" with { type: "text" }
import { Context, Effect, Exit, Layer, Option, Redacted, Schema } from "effect"
import { AppConfig } from "../config"
import {
ConfigEncodeError,
DatabaseError,
type HealthCheckError,
type OpenCodeClientError,
SandboxCreateError,
SandboxDeadError,
type SandboxExecError,
type SandboxNotFoundError,
type SandboxStartError,
} from "../errors"
import { SessionStore } from "../session/store"
import { ChannelId, GuildId, PreviewAccess, SandboxId, SessionInfo, ThreadId } from "../types"
import { DaytonaService, type SandboxHandle } from "./daytona/service"
import { OpenCodeClient, OpenCodeSessionSummary } from "./opencode/client"
import { logIgnore } from "../lib/log"
const OpenCodeAuth = Schema.parseJson(
Schema.Struct({
opencode: Schema.Struct({
type: Schema.Literal("api"),
key: Schema.String,
}),
}),
)
const OpenCodeConfig = Schema.parseJson(
Schema.Struct({
model: Schema.String,
share: Schema.String,
permission: Schema.String,
agent: Schema.Struct({
build: Schema.Struct({
mode: Schema.Literal("primary"),
prompt: Schema.String,
}),
}),
}),
)
export class Resumed extends Schema.Class<Resumed>("Resumed")({
session: SessionInfo,
}) {}
export class ResumeFailed extends Schema.Class<ResumeFailed>("ResumeFailed")({
allowRecreate: Schema.Boolean,
}) {}
export type ResumeResult = Resumed | ResumeFailed
export declare namespace SandboxProvisioner {
export interface Service {
/** Creates a brand new sandbox + OpenCode session. */
readonly provision: (
threadId: ThreadId,
channelId: ChannelId,
guildId: GuildId,
) => Effect.Effect<
SessionInfo,
| SandboxCreateError
| SandboxExecError
| SandboxNotFoundError
| SandboxStartError
| HealthCheckError
| OpenCodeClientError
| ConfigEncodeError
| DatabaseError
>
/** Attempts to resume an existing sandbox/session. Returns Resumed or Failed. */
readonly resume: (
session: SessionInfo,
) => Effect.Effect<ResumeResult>
/** Ensures a thread has an active healthy session, resuming or recreating if needed. */
readonly ensureActive: (input: {
threadId: ThreadId
channelId: ChannelId
guildId: GuildId
current: Option.Option<SessionInfo>
}) => Effect.Effect<
SessionInfo,
| SandboxCreateError
| SandboxExecError
| SandboxNotFoundError
| SandboxStartError
| HealthCheckError
| OpenCodeClientError
| ConfigEncodeError
| SandboxDeadError
| DatabaseError
>
/** Verifies active session health and attachment before reusing it. */
readonly ensureHealthy: (
session: SessionInfo,
maxWaitMs: number,
) => Effect.Effect<boolean, HealthCheckError>
/** Applies session-state recovery policy after a send failure. */
readonly recoverSendFailure: (
threadId: ThreadId,
session: SessionInfo,
error: OpenCodeClientError,
) => Effect.Effect<SessionInfo, DatabaseError>
/** Pauses a session by stopping its sandbox. */
readonly pause: (
threadId: ThreadId,
session: SessionInfo,
reason: string,
) => Effect.Effect<SessionInfo, DatabaseError>
/** Destroys a session by destroying its sandbox. */
readonly destroy: (
threadId: ThreadId,
session: SessionInfo,
reason?: string,
) => Effect.Effect<SessionInfo, DatabaseError>
}
}
export class SandboxProvisioner extends Context.Tag("@discord/SandboxProvisioner")<
SandboxProvisioner,
SandboxProvisioner.Service
>() {
static readonly layer = Layer.effect(
SandboxProvisioner,
Effect.gen(function* () {
const config = yield* AppConfig
const daytonaService = yield* DaytonaService
const oc = yield* OpenCodeClient
const store = yield* SessionStore
/** Best-effort read of the OpenCode startup log from inside a sandbox. */
const readStartupLog = (sandboxId: SandboxId, lines = 100) =>
daytonaService.exec(sandboxId, "read-opencode-log", `cat /tmp/opencode.log 2>/dev/null | tail -${lines}`).pipe(
Effect.map((r) => r.output),
Effect.catchAll(() => Effect.succeed("(unable to read log)")),
)
/** Locate the existing OpenCode session or create a fresh one for a thread. */
const findOrCreateSessionId = Effect.fnUntraced(function* (preview: PreviewAccess, session: SessionInfo) {
const exists = yield* oc
.sessionExists(preview, session.sessionId)
.pipe(Effect.catchAll(() => Effect.succeed(false)))
if (exists) return session.sessionId
const title = `Discord thread ${session.threadId}`
const sessions = yield* oc
.listSessions(preview, 50)
.pipe(Effect.catchAll(() => Effect.succeed([] as ReadonlyArray<OpenCodeSessionSummary>)))
const match = [...sessions]
.filter((c) => c.title === title)
.sort((a, b) => (b.updatedAt ?? 0) - (a.updatedAt ?? 0))[0]
return match ? match.id : yield* oc.createSession(preview, title)
})
const buildRuntimeEnv = (input?: Record<string, string>): Record<string, string> => {
const runtimeEnv: Record<string, string> = {}
const githubToken = config.githubToken.trim()
if (githubToken.length > 0) {
runtimeEnv.GH_TOKEN = githubToken
runtimeEnv.GITHUB_TOKEN = githubToken
}
if (!input) return runtimeEnv
return { ...runtimeEnv, ...input }
}
/** Best-effort: record a resume failure reason and mark the session as errored. */
const recordFailure = (threadId: ThreadId, reason: string) =>
Effect.all(
[
logIgnore(store.incrementResumeFailure(threadId, reason), "incrementResumeFailure"),
logIgnore(store.updateStatus(threadId, "error", reason), "updateStatus"),
],
{ discard: true },
)
const restartOpenCodeServe =
'pkill -f \'opencode serve --port 4096\' >/dev/null 2>&1 || true; for d in "$HOME/opencode" "/home/daytona/opencode" "/root/opencode"; do if [ -d "$d" ]; then cd "$d" && setsid opencode serve --port 4096 --hostname 0.0.0.0 > /tmp/opencode.log 2>&1 & exit 0; fi; done; exit 1'
const provision = Effect.fn("SandboxProvisioner.provision")(function* (
threadId: ThreadId,
channelId: ChannelId,
guildId: GuildId,
) {
yield* logIgnore(store.updateStatus(threadId, "creating"), "updateStatus")
return yield* Effect.acquireUseRelease(
daytonaService.create({
threadId,
guildId,
timeout: config.sandboxCreationTimeout,
}),
(handle) =>
Effect.gen(function* () {
const sandboxId = handle.id
yield* Effect.logInfo("Created sandbox").pipe(
Effect.annotateLogs({ event: "sandbox.create.started", threadId, channelId, guildId, sandboxId }),
)
const opencodeConfig: string = yield* Schema.encode(OpenCodeConfig)({
model: config.openCodeModel,
share: "disabled",
permission: "allow",
agent: { build: { mode: "primary", prompt: agentPrompt } },
}).pipe(Effect.mapError((cause) => new ConfigEncodeError({ config: "OpenCodeConfig", cause })))
const authJson: string = yield* Schema.encode(OpenCodeAuth)({
opencode: { type: "api", key: Redacted.value(config.openCodeZenApiKey) },
}).pipe(Effect.mapError((cause) => new ConfigEncodeError({ config: "OpenCodeAuth", cause })))
yield* daytonaService.exec(
sandboxId,
"setup-opencode",
[
`set -e`,
`git clone --depth=1 https://github.com/anomalyco/opencode.git $HOME/opencode`,
`mkdir -p $HOME/.local/share/opencode`,
`printf '%s' "$OPENCODE_AUTH_JSON" > $HOME/.local/share/opencode/auth.json`,
`printf '%s' "$OPENCODE_CONFIG_JSON" > $HOME/opencode/opencode.json`,
`cd $HOME/opencode`,
`setsid opencode serve --port 4096 --hostname 0.0.0.0 > /tmp/opencode.log 2>&1 &`,
].join("\n"),
{
env: buildRuntimeEnv({
OPENCODE_AUTH_JSON: authJson,
OPENCODE_CONFIG_JSON: opencodeConfig,
}),
},
)
const healthy = yield* oc.waitForHealthy(
PreviewAccess.from(handle),
config.startupHealthTimeoutMs,
)
if (!healthy) {
const startupLog = yield* readStartupLog(sandboxId)
return yield* new SandboxCreateError({
sandboxId: handle.id,
cause: new Error(`OpenCode server did not become healthy: ${startupLog.slice(0, 400)}`),
})
}
const sessionId = yield* oc.createSession(
PreviewAccess.from(handle),
`Discord thread ${threadId}`,
)
const session: SessionInfo = SessionInfo.make({
threadId,
channelId,
guildId,
sandboxId: handle.id,
sessionId,
previewUrl: handle.previewUrl,
previewToken: handle.previewToken,
status: "active",
lastError: null,
resumeFailCount: 0,
})
yield* store.markHealthOk(threadId)
yield* Effect.logInfo("Session is ready").pipe(
Effect.annotateLogs({ event: "sandbox.create.ready", threadId, sandboxId, sessionId }),
)
return session
}),
(handle, exit) =>
Exit.isFailure(exit)
? Effect.gen(function* () {
yield* Effect.logError("Failed to create session").pipe(
Effect.annotateLogs({ event: "sandbox.create.failed", threadId, sandboxId: handle.id }),
)
yield* logIgnore(store.updateStatus(threadId, "error", "creation-failed"), "updateStatus")
yield* daytonaService.destroy(handle.id)
})
: Effect.void,
)
})
const failed = (allowRecreate: boolean) => ResumeFailed.make({ allowRecreate })
const resume = (session: SessionInfo): Effect.Effect<ResumeResult> =>
Effect.gen(function* () {
if (!["paused", "destroyed", "error", "pausing", "resuming"].includes(session.status)) {
return failed(true)
}
yield* store.updateStatus(session.threadId, "resuming")
const startResult = yield* daytonaService.start(session.sandboxId, config.sandboxCreationTimeout).pipe(
Effect.map(Option.some),
Effect.catchTag("SandboxNotFoundError", (err) =>
Effect.gen(function* () {
yield* logIgnore(store.incrementResumeFailure(session.threadId, err.message), "incrementResumeFailure")
yield* logIgnore(store.updateStatus(session.threadId, "destroyed", err.message), "updateStatus")
return Option.none<SandboxHandle>()
}),
),
Effect.catchTag("SandboxStartError", (err) =>
recordFailure(session.threadId, String(err.cause)).pipe(Effect.as(Option.none<SandboxHandle>())),
),
)
if (Option.isNone(startResult)) return failed(true)
const handle = startResult.value
yield* logIgnore(
daytonaService.exec(session.sandboxId, "restart-opencode-serve", restartOpenCodeServe, {
env: buildRuntimeEnv(),
}),
"restart-opencode-serve",
)
const preview = PreviewAccess.from(handle)
const healthy = yield* oc.waitForHealthy(preview, config.resumeHealthTimeoutMs)
if (!healthy) {
const startupLog = yield* readStartupLog(session.sandboxId, 120)
yield* recordFailure(
session.threadId,
`OpenCode health check failed after resume. Log: ${startupLog.slice(0, 500)}`,
)
return failed(false)
}
const sessionId = yield* findOrCreateSessionId(preview, session)
const resumed = SessionInfo.make({
...session,
sessionId,
previewUrl: handle.previewUrl,
previewToken: handle.previewToken,
status: "active",
})
yield* store.markHealthOk(session.threadId)
yield* Effect.logInfo("Resumed existing sandbox").pipe(
Effect.annotateLogs({ event: "sandbox.resumed", threadId: session.threadId, sandboxId: session.sandboxId }),
)
return Resumed.make({ session: resumed })
}).pipe(
Effect.catchAll((err) =>
recordFailure(session.threadId, String(err)).pipe(Effect.as(failed(false))),
),
)
const ensureHealthy = Effect.fn("SandboxProvisioner.ensureHealthy")(function* (
session: SessionInfo,
maxWaitMs: number,
) {
const healthy = yield* oc.waitForHealthy(PreviewAccess.from(session), maxWaitMs)
if (!healthy) {
yield* recordFailure(session.threadId, "active-session-healthcheck-failed")
return false
}
const attached = yield* oc
.sessionExists(PreviewAccess.from(session), session.sessionId)
.pipe(Effect.catchAll(() => Effect.succeed(false)))
if (!attached) {
yield* recordFailure(session.threadId, "active-session-missing")
return false
}
yield* logIgnore(store.markHealthOk(session.threadId), "markHealthOk")
return true
})
const pause = Effect.fn("SandboxProvisioner.pause")(function* (
threadId: ThreadId,
session: SessionInfo,
reason: string,
) {
if (session.status === "paused") return session
yield* store.updateStatus(threadId, "pausing", reason)
const stopped = Exit.isSuccess(yield* Effect.exit(daytonaService.stop(session.sandboxId)))
if (stopped) {
yield* store.updateStatus(threadId, "paused", null)
return session.withStatus("paused")
}
yield* store.updateStatus(threadId, "destroyed", "sandbox-unavailable-during-pause")
return session.withStatus("destroyed")
})
const destroy = Effect.fn("SandboxProvisioner.destroy")(function* (
threadId: ThreadId,
session: SessionInfo,
reason?: string,
) {
if (session.status === "destroyed") return session
yield* store.updateStatus(threadId, "destroying", reason ?? null)
yield* daytonaService.destroy(session.sandboxId)
yield* store.updateStatus(threadId, "destroyed", reason ?? null)
return session.withStatus("destroyed")
})
const ensureActive = Effect.fn("SandboxProvisioner.ensureActive")(function* (input: {
threadId: ThreadId
channelId: ChannelId
guildId: GuildId
current: Option.Option<SessionInfo>
}) {
if (Option.isNone(input.current)) {
return yield* provision(input.threadId, input.channelId, input.guildId)
}
let candidate = input.current.value
if (candidate.status === "active") {
const healthy = yield* ensureHealthy(candidate, config.activeHealthCheckTimeoutMs)
if (healthy) return candidate
const refreshed = yield* store.getByThread(input.threadId)
candidate = Option.isSome(refreshed) ? refreshed.value : candidate.withStatus("error")
}
if (config.sandboxReusePolicy === "resume_preferred") {
const resumed = yield* resume(candidate)
if (resumed instanceof Resumed) return resumed.session
if (!resumed.allowRecreate) {
return yield* new SandboxDeadError({
threadId: input.threadId,
reason: "Unable to reattach to existing sandbox session. Try again shortly.",
})
}
}
yield* destroy(input.threadId, candidate, "recreate-after-resume-failure")
return yield* provision(input.threadId, input.channelId, input.guildId)
})
const recoverSendFailure = Effect.fn("SandboxProvisioner.recoverSendFailure")(function* (
threadId: ThreadId,
session: SessionInfo,
error: OpenCodeClientError,
) {
const kind = error.kind
if (kind === "non-recoverable") return session
yield* store.incrementResumeFailure(threadId, String(error))
if (kind === "session-missing") {
yield* store.updateStatus(threadId, "error", "opencode-session-missing")
return session.withStatus("error")
}
return yield* pause(threadId, session, "recoverable send failure")
})
return SandboxProvisioner.of({
provision,
resume,
ensureActive,
ensureHealthy,
recoverSendFailure,
pause,
destroy,
})
}),
)
}

View File

@@ -0,0 +1,199 @@
import type * as Client from "@effect/sql/SqlClient"
import { describe, expect } from "bun:test"
import { Duration, Effect, Layer, Option, Redacted } from "effect"
import { AppConfig } from "../config"
import { SqliteDb } from "../db/client"
import { effectTest, withTempSqliteFile } from "../test/effect"
import { ChannelId, GuildId, SandboxId, SessionId, SessionInfo, ThreadId } from "../types"
import { SessionStore } from "./store"
const makeConfig = (databasePath: string) =>
AppConfig.of({
discordToken: Redacted.make("token"),
allowedChannelIds: [],
discordCategoryId: "",
discordRoleId: "",
discordRequiredRoleId: "",
discordCommandGuildId: "",
databasePath,
daytonaApiKey: Redacted.make("daytona"),
daytonaSnapshot: "",
openCodeZenApiKey: Redacted.make("zen"),
githubToken: "",
logLevel: "info",
healthHost: "127.0.0.1",
healthPort: 8787,
turnRoutingMode: "off",
turnRoutingModel: "claude-haiku-4-5",
sandboxReusePolicy: "resume_preferred",
sandboxTimeout: Duration.minutes(30),
cleanupInterval: Duration.minutes(5),
staleActiveGraceMinutes: 5 as AppConfig.Service["staleActiveGraceMinutes"],
pausedTtlMinutes: 180 as AppConfig.Service["pausedTtlMinutes"],
activeHealthCheckTimeoutMs: 15000 as AppConfig.Service["activeHealthCheckTimeoutMs"],
startupHealthTimeoutMs: 120000 as AppConfig.Service["startupHealthTimeoutMs"],
resumeHealthTimeoutMs: 120000 as AppConfig.Service["resumeHealthTimeoutMs"],
sandboxCreationTimeout: 180 as AppConfig.Service["sandboxCreationTimeout"],
openCodeModel: "opencode/claude-sonnet-4-5",
})
const makeSession = (threadId: string, status: "creating" | "active" | "paused" = "active") =>
SessionInfo.make({
threadId: ThreadId.make(threadId),
channelId: ChannelId.make("c1"),
guildId: GuildId.make("g1"),
sandboxId: SandboxId.make("sb1"),
sessionId: SessionId.make(`s-${threadId}`),
previewUrl: `https://preview/${threadId}`,
previewToken: null,
status,
lastError: null,
resumeFailCount: 0,
})
const withStore = <A, E, R>(run: (store: SessionStore.Service, sql: Client.SqlClient) => Effect.Effect<A, E, R>) =>
withTempSqliteFile((databasePath) =>
Effect.gen(function* () {
const config = Layer.succeed(AppConfig, makeConfig(databasePath))
const sqlite = SqliteDb.layer.pipe(
Layer.provide(config),
)
const live = Layer.merge(
SessionStore.layer.pipe(
Layer.provide(sqlite),
),
sqlite,
)
const program = Effect.all([SessionStore, SqliteDb]).pipe(
Effect.flatMap(([store, sql]) => run(store, sql)),
)
return yield* program.pipe(Effect.provide(live))
}),
"discord-store-",
)
const getTransitions = (sql: Client.SqlClient, threadId: ThreadId) =>
sql<{
pause_requested_at: string | null
paused_at: string | null
resume_attempted_at: string | null
resumed_at: string | null
destroyed_at: string | null
}>`SELECT pause_requested_at, paused_at, resume_attempted_at, resumed_at, destroyed_at
FROM discord_sessions
WHERE thread_id = ${threadId}
LIMIT 1`.pipe(
Effect.map((rows) => rows[0] ?? null),
)
describe("SessionStore", () => {
effectTest("runs typed CRUD flow against sqlite", () =>
withStore((store, sql) =>
Effect.gen(function* () {
const t1 = ThreadId.make("t1")
const t2 = ThreadId.make("t2")
yield* store.upsert(makeSession("t1", "active"))
expect(yield* store.hasTrackedThread(t1)).toBe(true)
expect(Option.map(yield* store.getByThread(t1), (s) => s.threadId)).toEqual(Option.some(t1))
expect(Option.map(yield* store.getActive(t1), (s) => s.status)).toEqual(Option.some("active"))
yield* store.updateStatus(t1, "paused", "pause")
yield* store.incrementResumeFailure(t1, "resume-fail")
expect(Option.isNone(yield* store.getActive(t1))).toBe(true)
expect(Option.map(yield* store.getByThread(t1), (s) => s.resumeFailCount)).toEqual(Option.some(1))
expect(Option.map(yield* store.getByThread(t1), (s) => s.lastError)).toEqual(Option.some("resume-fail"))
yield* store.updateStatus(t1, "active")
yield* sql`UPDATE discord_sessions
SET last_activity = datetime('now', '-40 minutes')
WHERE thread_id = ${t1}`
expect((yield* store.listStaleActive(30)).map((row) => row.threadId)).toEqual([t1])
expect((yield* store.listStaleActive(120)).map((row) => row.threadId)).toEqual([])
yield* store.upsert(makeSession("t2", "creating"))
yield* store.updateStatus(t2, "paused")
yield* sql`UPDATE discord_sessions
SET paused_at = datetime('now', '-40 minutes')
WHERE thread_id = ${t2}`
expect((yield* store.listExpiredPaused(30)).map((row) => row.threadId)).toEqual([t2])
expect((yield* store.listExpiredPaused(120)).map((row) => row.threadId)).toEqual([])
expect(new Set(yield* store.listTrackedThreads())).toEqual(new Set([t1, t2]))
yield* store.updateStatus(t2, "destroyed")
expect(new Set(yield* store.listTrackedThreads())).toEqual(new Set([t1]))
const next = SessionInfo.make({
...makeSession("t1", "active"),
sessionId: SessionId.make("s-t1-new"),
previewToken: "ptok",
})
yield* store.upsert(next)
expect(Option.map(yield* store.getByThread(t1), (s) => s.sessionId)).toEqual(Option.some(SessionId.make("s-t1-new")))
expect(Option.map(yield* store.getByThread(t1), (s) => s.previewToken)).toEqual(Option.some("ptok"))
expect((yield* store.listActive()).map((row) => row.threadId)).toContain(t1)
}),
),
)
effectTest("tracks lifecycle transition timestamps by status", () =>
withStore((store, sql) =>
Effect.gen(function* () {
const t = ThreadId.make("tx")
yield* store.upsert(makeSession("tx", "creating"))
yield* store.updateStatus(t, "pausing", "queued")
const pausing = yield* getTransitions(sql, t)
expect(pausing).not.toBeNull()
if (pausing === null) return
expect(pausing.pause_requested_at).not.toBeNull()
expect(pausing.paused_at).toBeNull()
expect(pausing.resume_attempted_at).toBeNull()
expect(pausing.resumed_at).toBeNull()
expect(pausing.destroyed_at).toBeNull()
yield* store.updateStatus(t, "paused")
const paused = yield* getTransitions(sql, t)
expect(paused).not.toBeNull()
if (paused === null) return
expect(paused.pause_requested_at).not.toBeNull()
expect(paused.paused_at).not.toBeNull()
expect(paused.resume_attempted_at).toBeNull()
expect(paused.resumed_at).toBeNull()
expect(paused.destroyed_at).toBeNull()
yield* store.updateStatus(t, "resuming")
const resuming = yield* getTransitions(sql, t)
expect(resuming).not.toBeNull()
if (resuming === null) return
expect(resuming.pause_requested_at).not.toBeNull()
expect(resuming.paused_at).not.toBeNull()
expect(resuming.resume_attempted_at).not.toBeNull()
expect(resuming.resumed_at).toBeNull()
expect(resuming.destroyed_at).toBeNull()
yield* store.updateStatus(t, "active")
const active = yield* getTransitions(sql, t)
expect(active).not.toBeNull()
if (active === null) return
expect(active.pause_requested_at).not.toBeNull()
expect(active.paused_at).not.toBeNull()
expect(active.resume_attempted_at).not.toBeNull()
expect(active.resumed_at).not.toBeNull()
expect(active.destroyed_at).toBeNull()
yield* store.updateStatus(t, "destroyed")
const destroyed = yield* getTransitions(sql, t)
expect(destroyed).not.toBeNull()
if (destroyed === null) return
expect(destroyed.pause_requested_at).not.toBeNull()
expect(destroyed.paused_at).not.toBeNull()
expect(destroyed.resume_attempted_at).not.toBeNull()
expect(destroyed.resumed_at).not.toBeNull()
expect(destroyed.destroyed_at).not.toBeNull()
}),
),
)
})

View File

@@ -0,0 +1,268 @@
import * as Client from "@effect/sql/SqlClient"
import * as SqlSchema from "@effect/sql/SqlSchema"
import { Context, Effect, Layer, Option, Schema } from "effect"
import { SqliteDb } from "../db/client"
import { initializeSchema } from "../db/init"
import { DatabaseError } from "../errors"
import { ChannelId, GuildId, SandboxId, SessionId, SessionInfo, SessionStatus, ThreadId } from "../types"
const ROW = `thread_id AS threadId, channel_id AS channelId, guild_id AS guildId, sandbox_id AS sandboxId, session_id AS sessionId,
preview_url AS previewUrl, preview_token AS previewToken, status, last_error AS lastError, resume_fail_count AS resumeFailCount`
class Write extends Schema.Class<Write>("Write")({
thread_id: ThreadId,
channel_id: ChannelId,
guild_id: GuildId,
sandbox_id: SandboxId,
session_id: SessionId,
preview_url: Schema.String,
preview_token: Schema.Union(Schema.Null, Schema.String),
status: SessionStatus,
last_error: Schema.Union(Schema.Null, Schema.String),
}) {}
const Thread = Schema.Struct({ thread_id: ThreadId })
const Status = Schema.Struct({
thread_id: ThreadId,
status: SessionStatus,
last_error: Schema.Union(Schema.Null, Schema.String),
})
const Resume = Schema.Struct({ thread_id: ThreadId, last_error: Schema.String })
const Minutes = Schema.Struct({ minutes: Schema.Number })
const STATUS_COLUMNS = [
["pausing", "pause_requested_at"],
["paused", "paused_at"],
["resuming", "resume_attempted_at"],
["active", "resumed_at"],
["destroyed", "destroyed_at"],
] as const
const toWrite = (session: SessionInfo) =>
Write.make({
thread_id: session.threadId,
channel_id: session.channelId,
guild_id: session.guildId,
sandbox_id: session.sandboxId,
session_id: session.sessionId,
preview_url: session.previewUrl,
preview_token: session.previewToken,
status: session.status,
last_error: session.lastError,
})
export declare namespace SessionStore {
export interface Service {
readonly upsert: (session: SessionInfo) => Effect.Effect<void, DatabaseError>
readonly getByThread: (threadId: ThreadId) => Effect.Effect<Option.Option<SessionInfo>, DatabaseError>
readonly hasTrackedThread: (threadId: ThreadId) => Effect.Effect<boolean, DatabaseError>
readonly getActive: (threadId: ThreadId) => Effect.Effect<Option.Option<SessionInfo>, DatabaseError>
readonly markActivity: (threadId: ThreadId) => Effect.Effect<void, DatabaseError>
readonly markHealthOk: (threadId: ThreadId) => Effect.Effect<void, DatabaseError>
readonly updateStatus: (threadId: ThreadId, status: SessionStatus, lastError?: string | null) => Effect.Effect<void, DatabaseError>
readonly incrementResumeFailure: (threadId: ThreadId, lastError: string) => Effect.Effect<void, DatabaseError>
readonly listActive: () => Effect.Effect<ReadonlyArray<SessionInfo>, DatabaseError>
readonly listTrackedThreads: () => Effect.Effect<ReadonlyArray<ThreadId>, DatabaseError>
readonly listStaleActive: (cutoffMinutes: number) => Effect.Effect<ReadonlyArray<SessionInfo>, DatabaseError>
readonly listExpiredPaused: (pausedTtlMinutes: number) => Effect.Effect<ReadonlyArray<SessionInfo>, DatabaseError>
}
}
export class SessionStore extends Context.Tag("@discord/SessionStore")<SessionStore, SessionStore.Service>() {
static readonly layer = Layer.effect(
SessionStore,
Effect.gen(function* () {
const sql = yield* SqliteDb
const db = <A, E, R>(effect: Effect.Effect<A, E, R>) =>
effect.pipe(Effect.mapError((cause) => new DatabaseError({ cause })))
yield* db(initializeSchema.pipe(Effect.provideService(Client.SqlClient, sql)))
const statusSet = (status: SessionStatus) =>
sql.join(",\n", false)(STATUS_COLUMNS.map(([value, column]) =>
sql`${sql(column)} = CASE WHEN ${status} = ${value} THEN CURRENT_TIMESTAMP ELSE ${sql(column)} END`
))
const touch = (column: "last_activity" | "last_health_ok_at") =>
SqlSchema.void({
Request: Thread,
execute: ({ thread_id }) =>
sql`UPDATE discord_sessions
SET ${sql(column)} = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP
WHERE thread_id = ${thread_id}`,
})
const upsertQ = SqlSchema.void({
Request: Write,
execute: (session) =>
sql`INSERT INTO discord_sessions (
thread_id, channel_id, guild_id, sandbox_id, session_id,
preview_url, preview_token, status, last_error,
last_activity, resumed_at, created_at, updated_at
) VALUES (
${session.thread_id}, ${session.channel_id}, ${session.guild_id}, ${session.sandbox_id}, ${session.session_id},
${session.preview_url}, ${session.preview_token}, ${session.status}, ${session.last_error},
CURRENT_TIMESTAMP,
CASE WHEN ${session.status} = 'active' THEN CURRENT_TIMESTAMP ELSE NULL END,
CURRENT_TIMESTAMP, CURRENT_TIMESTAMP
)
ON CONFLICT(thread_id) DO UPDATE SET
channel_id = excluded.channel_id,
guild_id = excluded.guild_id,
sandbox_id = excluded.sandbox_id,
session_id = excluded.session_id,
preview_url = excluded.preview_url,
preview_token = excluded.preview_token,
status = excluded.status,
last_error = excluded.last_error,
last_activity = CURRENT_TIMESTAMP,
resumed_at = CASE WHEN excluded.status = 'active' THEN CURRENT_TIMESTAMP ELSE discord_sessions.resumed_at END,
updated_at = CURRENT_TIMESTAMP`,
})
const byThreadQ = SqlSchema.findOne({
Request: Thread,
Result: SessionInfo,
execute: ({ thread_id }) => sql`SELECT ${sql.literal(ROW)} FROM discord_sessions WHERE thread_id = ${thread_id} LIMIT 1`,
})
const activeQ = SqlSchema.findOne({
Request: Thread,
Result: SessionInfo,
execute: ({ thread_id }) =>
sql`SELECT ${sql.literal(ROW)} FROM discord_sessions WHERE thread_id = ${thread_id} AND status = 'active' LIMIT 1`,
})
const markActivityQ = touch("last_activity")
const markHealthOkQ = touch("last_health_ok_at")
const updateStatusQ = SqlSchema.void({
Request: Status,
execute: ({ thread_id, status, last_error }) =>
sql`UPDATE discord_sessions SET
status = ${status}, last_error = ${last_error},
${statusSet(status)},
updated_at = CURRENT_TIMESTAMP
WHERE thread_id = ${thread_id}`,
})
const incrementResumeFailureQ = SqlSchema.void({
Request: Resume,
execute: ({ thread_id, last_error }) =>
sql`UPDATE discord_sessions SET
resume_fail_count = resume_fail_count + 1, last_error = ${last_error}, updated_at = CURRENT_TIMESTAMP
WHERE thread_id = ${thread_id}`,
})
const listActiveQ = SqlSchema.findAll({
Request: Schema.Void,
Result: SessionInfo,
execute: () =>
sql`SELECT ${sql.literal(ROW)}
FROM discord_sessions
WHERE status = 'active'
ORDER BY last_activity DESC`,
})
const listTrackedThreadsQ = SqlSchema.findAll({
Request: Schema.Void,
Result: Schema.Struct({ threadId: ThreadId }),
execute: () =>
sql`SELECT thread_id AS threadId
FROM discord_sessions
WHERE status != 'destroyed'
ORDER BY updated_at DESC`,
})
const listStaleActiveQ = SqlSchema.findAll({
Request: Minutes,
Result: SessionInfo,
execute: ({ minutes }) =>
sql`SELECT ${sql.literal(ROW)}
FROM discord_sessions
WHERE status = 'active' AND last_activity < datetime('now', '-' || ${minutes} || ' minutes')
ORDER BY last_activity ASC`,
})
const listExpiredPausedQ = SqlSchema.findAll({
Request: Minutes,
Result: SessionInfo,
execute: ({ minutes }) =>
sql`SELECT ${sql.literal(ROW)}
FROM discord_sessions
WHERE status = 'paused' AND paused_at IS NOT NULL
AND paused_at < datetime('now', '-' || ${minutes} || ' minutes')
ORDER BY paused_at ASC`,
})
const upsert = Effect.fn("SessionStore.upsert")(function* (session: SessionInfo) {
yield* db(upsertQ(toWrite(session)))
})
const getByThread = Effect.fn("SessionStore.getByThread")(function* (threadId: ThreadId) {
return yield* db(byThreadQ({ thread_id: threadId }))
})
const hasTrackedThread = Effect.fn("SessionStore.hasTrackedThread")(function* (threadId: ThreadId) {
const row = yield* db(byThreadQ({ thread_id: threadId }))
return Option.isSome(row)
})
const getActive = Effect.fn("SessionStore.getActive")(function* (threadId: ThreadId) {
return yield* db(activeQ({ thread_id: threadId }))
})
const markActivity = Effect.fn("SessionStore.markActivity")(function* (threadId: ThreadId) {
yield* db(markActivityQ({ thread_id: threadId }))
})
const markHealthOk = Effect.fn("SessionStore.markHealthOk")(function* (threadId: ThreadId) {
yield* db(markHealthOkQ({ thread_id: threadId }))
})
const updateStatus = Effect.fn("SessionStore.updateStatus")(function* (threadId: ThreadId, status: SessionStatus, lastError?: string | null) {
yield* db(updateStatusQ({ thread_id: threadId, status, last_error: lastError ?? null }))
})
const incrementResumeFailure = Effect.fn("SessionStore.incrementResumeFailure")(function* (threadId: ThreadId, lastError: string) {
yield* db(incrementResumeFailureQ({ thread_id: threadId, last_error: lastError }))
})
const listActive = Effect.fn("SessionStore.listActive")(function* () {
return yield* db(listActiveQ(undefined))
})
const listTrackedThreads = Effect.fn("SessionStore.listTrackedThreads")(function* () {
return (yield* db(listTrackedThreadsQ(undefined))).map((row) => row.threadId)
})
const listStaleActive = Effect.fn("SessionStore.listStaleActive")(function* (cutoffMinutes: number) {
return yield* db(listStaleActiveQ({ minutes: cutoffMinutes }))
})
const listExpiredPaused = Effect.fn("SessionStore.listExpiredPaused")(function* (pausedTtlMinutes: number) {
return yield* db(listExpiredPausedQ({ minutes: pausedTtlMinutes }))
})
return SessionStore.of({
upsert,
getByThread,
hasTrackedThread,
getActive,
markActivity,
markHealthOk,
updateStatus,
incrementResumeFailure,
listActive,
listTrackedThreads,
listStaleActive,
listExpiredPaused,
})
}),
)
static readonly defaultLayer = SessionStore.layer.pipe(
Layer.provide(SqliteDb.layer),
)
}

View File

@@ -0,0 +1,66 @@
import { Reactivity } from "@effect/experimental"
import * as FileSystem from "@effect/platform/FileSystem"
import { BunFileSystem } from "@effect/platform-bun"
import { SqliteClient } from "@effect/sql-sqlite-bun"
import type * as Client from "@effect/sql/SqlClient"
import { test } from "bun:test"
import { Duration, Effect, Layer, Redacted } from "effect"
import type { Scope } from "effect/Scope"
import type { TestOptions } from "bun:test"
import { AppConfig, Milliseconds, Minutes, Seconds } from "../config"
export const effectTest = (
name: string,
run: () => Effect.Effect<unknown, unknown, Scope | never>,
options?: number | TestOptions,
) =>
test(name, () => Effect.runPromise(run().pipe(Effect.scoped)), options)
export const withSqlite = <A, E, R>(filename: string, run: (db: Client.SqlClient) => Effect.Effect<A, E, R>) =>
SqliteClient.make({ filename }).pipe(
Effect.provide(Reactivity.layer),
Effect.flatMap(run),
Effect.scoped,
)
export const withTempSqliteFile = <A, E, R>(
run: (filename: string) => Effect.Effect<A, E, R>,
prefix = "discord-test-",
) =>
Effect.gen(function* () {
const fs = yield* FileSystem.FileSystem
const filename = yield* fs.makeTempFileScoped({ prefix, suffix: ".sqlite" })
return yield* run(filename)
}).pipe(Effect.provide(BunFileSystem.layer))
export const testConfigLayer = Layer.succeed(
AppConfig,
AppConfig.of({
discordToken: Redacted.make("test"),
allowedChannelIds: [],
discordCategoryId: "",
discordRoleId: "",
discordRequiredRoleId: "",
discordCommandGuildId: "",
databasePath: ":memory:",
daytonaApiKey: Redacted.make("test"),
daytonaSnapshot: "",
openCodeZenApiKey: Redacted.make("test"),
githubToken: "",
logLevel: "info" as const,
healthHost: "0.0.0.0",
healthPort: 8787,
turnRoutingMode: "off" as const,
turnRoutingModel: "test",
sandboxReusePolicy: "resume_preferred" as const,
sandboxTimeout: Duration.minutes(30),
cleanupInterval: Duration.minutes(5),
staleActiveGraceMinutes: Minutes.make(5),
pausedTtlMinutes: Minutes.make(180),
activeHealthCheckTimeoutMs: Milliseconds.make(15000),
startupHealthTimeoutMs: Milliseconds.make(120000),
resumeHealthTimeoutMs: Milliseconds.make(120000),
sandboxCreationTimeout: Seconds.make(180),
openCodeModel: "opencode/claude-sonnet-4-5",
}),
)

View File

@@ -0,0 +1,63 @@
import { Schema } from "effect"
export const ThreadId = Schema.String.pipe(Schema.brand("ThreadId"))
export type ThreadId = typeof ThreadId.Type
export const ChannelId = Schema.String.pipe(Schema.brand("ChannelId"))
export type ChannelId = typeof ChannelId.Type
export const GuildId = Schema.String.pipe(Schema.brand("GuildId"))
export type GuildId = typeof GuildId.Type
export const SandboxId = Schema.String.pipe(Schema.brand("SandboxId"))
export type SandboxId = typeof SandboxId.Type
export const SessionId = Schema.String.pipe(Schema.brand("SessionId"))
export type SessionId = typeof SessionId.Type
export const SessionStatus = Schema.Literal(
"creating",
"active",
"pausing",
"paused",
"resuming",
"destroying",
"destroyed",
"error",
)
export type SessionStatus = typeof SessionStatus.Type
/**
* Daytona preview link — the URL + token from `sandbox.getPreviewLink()`.
*
* This is Daytona's canonical way to reach a port inside a sandbox over HTTP.
* Used by {@link OpenCodeClient} to talk to the OpenCode server on port 4096.
*/
export class PreviewAccess extends Schema.Class<PreviewAccess>("PreviewAccess")({
/** Daytona preview URL (HTTP tunnel into the sandbox). */
previewUrl: Schema.String,
/** Auth token for the preview link. May be embedded in the URL as `?tkn=`. */
previewToken: Schema.Union(Schema.Null, Schema.String),
}) {
/** Derive from anything carrying `previewUrl` + `previewToken` (e.g. SandboxHandle, SessionInfo). */
static from(source: { previewUrl: string; previewToken: string | null }) {
return PreviewAccess.make({ previewUrl: source.previewUrl, previewToken: source.previewToken })
}
}
export class SessionInfo extends Schema.Class<SessionInfo>("SessionInfo")({
threadId: ThreadId,
channelId: ChannelId,
guildId: GuildId,
sandboxId: SandboxId,
sessionId: SessionId,
previewUrl: Schema.String,
previewToken: Schema.Union(Schema.Null, Schema.String),
status: SessionStatus,
lastError: Schema.Union(Schema.Null, Schema.String),
resumeFailCount: Schema.Number,
}) {
withStatus(status: SessionStatus) {
return SessionInfo.make({ ...this, status })
}
}

View File

@@ -0,0 +1,22 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"moduleResolution": "bundler",
"moduleDetection": "force",
"strict": true,
"exactOptionalPropertyTypes": true,
"noUnusedLocals": true,
"noImplicitOverride": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"sourceMap": true,
"outDir": "./dist",
"types": ["bun"],
"plugins": [{ "name": "@effect/language-service" }]
},
"include": ["src/**/*.ts", "src/**/*.d.ts"],
"exclude": ["node_modules", "dist"]
}