Compare commits

...

2 Commits

Author SHA1 Message Date
Shoubhit Dash
2fef5e6ce6 refactor(search): route callers through search service 2026-04-14 20:24:45 +05:30
Shoubhit Dash
8eb913ba12 feat(search): add fff-backed search service 2026-04-14 20:24:36 +05:30
21 changed files with 768 additions and 101 deletions

View File

@@ -342,6 +342,7 @@
"@aws-sdk/credential-providers": "3.993.0",
"@clack/prompts": "1.0.0-alpha.1",
"@effect/platform-node": "catalog:",
"@ff-labs/fff-bun": "0.5.2",
"@gitlab/opencode-gitlab-auth": "1.3.3",
"@hono/node-server": "1.19.11",
"@hono/node-ws": "1.3.0",
@@ -1152,6 +1153,24 @@
"@fastify/rate-limit": ["@fastify/rate-limit@10.3.0", "", { "dependencies": { "@lukeed/ms": "^2.0.2", "fastify-plugin": "^5.0.0", "toad-cache": "^3.7.0" } }, "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q=="],
"@ff-labs/fff-bin-darwin-arm64": ["@ff-labs/fff-bin-darwin-arm64@0.5.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-6hkXiB5R5n0eDxibVTDFFXKca1PVpVysWOwgFLyXYr6b0BLI5UcuDSz4vZ1fj4eoH+9rgqIqd65YMZ7e7s3J5g=="],
"@ff-labs/fff-bin-darwin-x64": ["@ff-labs/fff-bin-darwin-x64@0.5.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-I7q1T9Iw/qnCzGT5dY7nxEdqS18vhTbXwWR2LKNPgxUpoGGv7sMe57QhEGAXT8eKvPh8hgZneU8VLt49wmFYRw=="],
"@ff-labs/fff-bin-linux-arm64-gnu": ["@ff-labs/fff-bin-linux-arm64-gnu@0.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-iputHhH4bpOegz+j14JcypuelNPdUwwx3oJavln02jNY1oouPNhC2tNMJKSXgkUgAbuGRn26iXSlUPNwIl0ftQ=="],
"@ff-labs/fff-bin-linux-arm64-musl": ["@ff-labs/fff-bin-linux-arm64-musl@0.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5ySavoc0Q5Cr1qKIBj2s8roQEFhCgJM8ndCM6AcPv5tuFirBHJe9iT3OG0x0/u9Mm70MosIYbd3dnbC3QwIWzw=="],
"@ff-labs/fff-bin-linux-x64-gnu": ["@ff-labs/fff-bin-linux-x64-gnu@0.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-lRMcoeNlGsqN1jVup95TfWfz74Funn8nuzVVxZUzmZbQOWEDkhfULUB8jpaz9Q7sDq5hqhMa5+zJf29sKuw0hw=="],
"@ff-labs/fff-bin-linux-x64-musl": ["@ff-labs/fff-bin-linux-x64-musl@0.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-QvNTGvZNKj8h/ZuCY/g+/WMQagK6E8U0Zv3vCbZgUsegvMtGpVNW42w8jiUC21136DnmfF8rG0mz8SsR/99qHw=="],
"@ff-labs/fff-bin-win32-arm64": ["@ff-labs/fff-bin-win32-arm64@0.5.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-mT0A0FsZRgVPDsg4czksGYUuMqXodlrg97ss6jEs60upvHWGTWpgkoiRfm80u2ixVzx0z8yqhbquol1C/mvKGA=="],
"@ff-labs/fff-bin-win32-x64": ["@ff-labs/fff-bin-win32-x64@0.5.2", "", { "os": "win32", "cpu": "x64" }, "sha512-aca29wsv0XcTTwKd0GOpDBZPgMrvs0osqGSqp2cYrznH7/wQZySHYDlXni7b4SY6oqxmuQQYXXwQzAn8CoqLsQ=="],
"@ff-labs/fff-bun": ["@ff-labs/fff-bun@0.5.2", "", { "optionalDependencies": { "@ff-labs/fff-bin-darwin-arm64": "0.5.2", "@ff-labs/fff-bin-darwin-x64": "0.5.2", "@ff-labs/fff-bin-linux-arm64-gnu": "0.5.2", "@ff-labs/fff-bin-linux-arm64-musl": "0.5.2", "@ff-labs/fff-bin-linux-x64-gnu": "0.5.2", "@ff-labs/fff-bin-linux-x64-musl": "0.5.2", "@ff-labs/fff-bin-win32-arm64": "0.5.2", "@ff-labs/fff-bin-win32-x64": "0.5.2" }, "peerDependencies": { "bun": ">=1.0.0" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "x64", "arm64", ], "bin": { "fff-demo": "examples/search.ts", "fff-grep": "examples/grep.ts" } }, "sha512-puwgVLi7RfjzqB4biVB5ZCvtLbTf34yEorJ3PhBpwaM0pB8hFM+9VRXdNhGF+I3F0LU6twuNsS43AomgyrdDvQ=="],
"@floating-ui/core": ["@floating-ui/core@1.7.5", "", { "dependencies": { "@floating-ui/utils": "^0.2.11" } }, "sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ=="],
"@floating-ui/dom": ["@floating-ui/dom@1.7.6", "", { "dependencies": { "@floating-ui/core": "^1.7.5", "@floating-ui/utils": "^0.2.11" } }, "sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ=="],
@@ -1572,6 +1591,30 @@
"@oslojs/jwt": ["@oslojs/jwt@0.2.0", "", { "dependencies": { "@oslojs/encoding": "0.4.1" } }, "sha512-bLE7BtHrURedCn4Mco3ma9L4Y1GR2SMBuIvjWr7rmQ4/W/4Jy70TIAgZ+0nIlk0xHz1vNP8x8DCns45Sb2XRbg=="],
"@oven/bun-darwin-aarch64": ["@oven/bun-darwin-aarch64@1.3.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-b6CQgT28Jx7uDwMTcGo7WFqUd1+wWTdp8XyPi/4LRcL/R4deKT7cLx/Q2ZCWAiK6ZU7yexoCaIaKun6azjRLVA=="],
"@oven/bun-darwin-x64": ["@oven/bun-darwin-x64@1.3.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-//6W21c+GinAMMmxD2hFrFmJH+ZlEwJYbLzAGqp0mLFTli9y74RMtDgI2n9pCupXSpU1Kr1sSylVW9yNbAG9Xg=="],
"@oven/bun-darwin-x64-baseline": ["@oven/bun-darwin-x64-baseline@1.3.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-9jKJNOc9ID3BxPBPR4r1Mp1Wqde89Twi5zo2LoEMLMKbqpvEM/WUGdJ0Vv7OX1QPEqVblFO6NMky5yY7rjDI2w=="],
"@oven/bun-linux-aarch64": ["@oven/bun-linux-aarch64@1.3.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-eTru6tk3K4Ya3SSkUqq/LbdEjwPqLlfINmIhRORrCExBdB1tQbk+WYYflaymO61fkrjnMAjmLTGqk/K37RMIGA=="],
"@oven/bun-linux-aarch64-musl": ["@oven/bun-linux-aarch64-musl@1.3.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-HWIwFzm5fALd9Lli0CgaKb6xOGqODYyHpUTgkn/IHHuS/f3XDCu71+GgkyvfgCYbPoBSgBOfp5TzhRehPcgxow=="],
"@oven/bun-linux-x64": ["@oven/bun-linux-x64@1.3.12", "", { "os": "linux", "cpu": "x64" }, "sha512-H75bcEn46lMDxd+P+R6Q/jlIKl/YO0ZXaalSyWhQHr7qNmFhQt3rOHurFoCxuwQeqFoToh0JpWVyMVzByZqgBQ=="],
"@oven/bun-linux-x64-baseline": ["@oven/bun-linux-x64-baseline@1.3.12", "", { "os": "linux", "cpu": "x64" }, "sha512-0y+lUiQsPvSGsyM/10KtxhVAQ20p6/D+vj01l6vo9gHpYUpyc1L9pSgaPa7SC9TuaiGASlM3Cb62bmSKW0E/3Q=="],
"@oven/bun-linux-x64-musl": ["@oven/bun-linux-x64-musl@1.3.12", "", { "os": "linux", "cpu": "x64" }, "sha512-Zb7T3JxWlArSe44ATO5mtjLCBCt7kenWPl9CYD+zeqq9kHswMv8Cd3h/9uzdv2PA4Flrq57J5XBSuRdStTCXCw=="],
"@oven/bun-linux-x64-musl-baseline": ["@oven/bun-linux-x64-musl-baseline@1.3.12", "", { "os": "linux", "cpu": "x64" }, "sha512-jdsnuFD3H0l4AHtf1nInRHYWIMTWqok0aW8WysjzN5Isn6rBTBGK/ZWX6XjdTgDgcuVbVOYHiLUHHrvT9N6psA=="],
"@oven/bun-windows-aarch64": ["@oven/bun-windows-aarch64@1.3.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-Oq0FIcCgL3JWf/4qRuxI5fxsOGyWJ1j904PDx/1TxxSCWWAu0Hh2o8ck4TcaPVv/3BMc1k6UxqQQKBrdP7a+qQ=="],
"@oven/bun-windows-x64": ["@oven/bun-windows-x64@1.3.12", "", { "os": "win32", "cpu": "x64" }, "sha512-veSntY7pDLDh4XmxZMwTqxfoEVp0BDdeqCBoWL46/TigtniPtDFSTIWBxa6l/RcGzklUA/uqLqmsK/9cBZAm8Q=="],
"@oven/bun-windows-x64-baseline": ["@oven/bun-windows-x64-baseline@1.3.12", "", { "os": "win32", "cpu": "x64" }, "sha512-rV21md7QWnu3r/shev7IFMh6hX8BJHwofxESAofUT4yH866oCIbcNbzp6+fxrj4oGD8uisP6WoaTCboijv9yYg=="],
"@oxc-minify/binding-android-arm64": ["@oxc-minify/binding-android-arm64@0.96.0", "", { "os": "android", "cpu": "arm64" }, "sha512-lzeIEMu/v6Y+La5JSesq4hvyKtKBq84cgQpKYTYM/yGuNk2tfd5Ha31hnC+mTh48lp/5vZH+WBfjVUjjINCfug=="],
"@oxc-minify/binding-darwin-arm64": ["@oxc-minify/binding-darwin-arm64@0.96.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-i0LkJAUXb4BeBFrJQbMKQPoxf8+cFEffDyLSb7NEzzKuPcH8qrVsnEItoOzeAdYam8Sr6qCHVwmBNEQzl7PWpw=="],
@@ -2584,6 +2627,8 @@
"builder-util-runtime": ["builder-util-runtime@9.5.1", "", { "dependencies": { "debug": "^4.3.4", "sax": "^1.2.4" } }, "sha512-qt41tMfgHTllhResqM5DcnHyDIWNgzHvuY2jDcYP9iaGpkWxTUzV6GQjDeLnlR1/DtdlcsWQbA7sByMpmJFTLQ=="],
"bun": ["bun@1.3.12", "", { "optionalDependencies": { "@oven/bun-darwin-aarch64": "1.3.12", "@oven/bun-darwin-x64": "1.3.12", "@oven/bun-darwin-x64-baseline": "1.3.12", "@oven/bun-linux-aarch64": "1.3.12", "@oven/bun-linux-aarch64-musl": "1.3.12", "@oven/bun-linux-x64": "1.3.12", "@oven/bun-linux-x64-baseline": "1.3.12", "@oven/bun-linux-x64-musl": "1.3.12", "@oven/bun-linux-x64-musl-baseline": "1.3.12", "@oven/bun-windows-aarch64": "1.3.12", "@oven/bun-windows-x64": "1.3.12", "@oven/bun-windows-x64-baseline": "1.3.12" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "x64", "arm64", ], "bin": { "bun": "bin/bun.exe", "bunx": "bin/bunx.exe" } }, "sha512-KLwDUqs5WIny/94F4xZ4QfaAE6YWyjR+s79pt/ItQhk2CG+PJQ5xL6VuOWhiyN2eP3fryZK95vog9CTLCaYV2Q=="],
"bun-ffi-structs": ["bun-ffi-structs@0.1.2", "", { "peerDependencies": { "typescript": "^5" } }, "sha512-Lh1oQAYHDcnesJauieA4UNkWGXY9hYck7OA5IaRwE3Bp6K2F2pJSNYqq+hIy7P3uOvo3km3oxS8304g5gDMl/w=="],
"bun-pty": ["bun-pty@0.4.8", "", {}, "sha512-rO70Mrbr13+jxHHHu2YBkk2pNqrJE5cJn29WE++PUr+GFA0hq/VgtQPZANJ8dJo6d7XImvBk37Innt8GM7O28w=="],

View File

@@ -28,6 +28,11 @@
"node": "./src/storage/db.node.ts",
"default": "./src/storage/db.bun.ts"
},
"#fff": {
"bun": "./src/file/fff.bun.ts",
"node": "./src/file/fff.node.ts",
"default": "./src/file/fff.bun.ts"
},
"#pty": {
"bun": "./src/pty/pty.bun.ts",
"node": "./src/pty/pty.node.ts",
@@ -99,6 +104,7 @@
"@aws-sdk/credential-providers": "3.993.0",
"@clack/prompts": "1.0.0-alpha.1",
"@effect/platform-node": "catalog:",
"@ff-labs/fff-bun": "0.5.2",
"@gitlab/opencode-gitlab-auth": "1.3.3",
"@hono/node-server": "1.19.11",
"@hono/node-ws": "1.3.0",

View File

@@ -1,10 +1,10 @@
import { EOL } from "os"
import { Effect } from "effect"
import { AppRuntime } from "@/effect/app-runtime"
import { Search } from "@/file/search"
import { File } from "../../../file"
import { bootstrap } from "../../bootstrap"
import { cmd } from "../cmd"
import { Ripgrep } from "@/file/ripgrep"
const FileSearchCommand = cmd({
command: "search <query>",
@@ -95,7 +95,7 @@ const FileTreeCommand = cmd({
default: process.cwd(),
}),
async handler(args) {
const files = await Ripgrep.tree({ cwd: args.dir, limit: 200 })
const files = await Search.tree({ cwd: args.dir, limit: 200 })
console.log(JSON.stringify(files, null, 2))
},
})

View File

@@ -1,6 +1,6 @@
import { EOL } from "os"
import { AppRuntime } from "../../../effect/app-runtime"
import { Ripgrep } from "../../../file/ripgrep"
import { Search } from "../../../file/search"
import { Instance } from "../../../project/instance"
import { bootstrap } from "../../bootstrap"
import { cmd } from "../cmd"
@@ -21,7 +21,7 @@ const TreeCommand = cmd({
}),
async handler(args) {
await bootstrap(process.cwd(), async () => {
process.stdout.write((await Ripgrep.tree({ cwd: Instance.directory, limit: args.limit })) + EOL)
process.stdout.write((await Search.tree({ cwd: Instance.directory, limit: args.limit })) + EOL)
})
},
})
@@ -46,7 +46,7 @@ const FilesCommand = cmd({
async handler(args) {
await bootstrap(process.cwd(), async () => {
const files: string[] = []
for await (const file of await Ripgrep.files({
for await (const file of await Search.files({
cwd: Instance.directory,
glob: args.glob ? [args.glob] : undefined,
})) {
@@ -79,7 +79,7 @@ const SearchCommand = cmd({
async handler(args) {
await bootstrap(process.cwd(), async () => {
const results = await AppRuntime.runPromise(
Ripgrep.Service.use((svc) =>
Search.Service.use((svc) =>
svc.search({
cwd: Instance.directory,
pattern: args.pattern,

View File

@@ -8,7 +8,7 @@ import { Auth } from "@/auth"
import { Account } from "@/account"
import { Config } from "@/config/config"
import { Git } from "@/git"
import { Ripgrep } from "@/file/ripgrep"
import { Search } from "@/file/search"
import { FileTime } from "@/file/time"
import { File } from "@/file"
import { FileWatcher } from "@/file/watcher"
@@ -56,7 +56,7 @@ export const AppLayer = Layer.mergeAll(
Account.defaultLayer,
Config.defaultLayer,
Git.defaultLayer,
Ripgrep.defaultLayer,
Search.defaultLayer,
FileTime.defaultLayer,
File.defaultLayer,
FileWatcher.defaultLayer,

View File

@@ -0,0 +1,89 @@
import {
FileFinder,
type FileItem,
type GrepCursor,
type GrepMatch,
type GrepResult,
type SearchResult,
} from "@ff-labs/fff-bun"
export namespace Fff {
export type Result<T> = { ok: true; value: T } | { ok: false; error: string }
export interface Init {
basePath: string
frecencyDbPath?: string
historyDbPath?: string
aiMode?: boolean
}
export interface Search {
items: FileItem[]
scores: SearchResult["scores"]
totalMatched: number
totalFiles: number
}
export type File = FileItem
export type Cursor = GrepCursor | null
export type Hit = GrepMatch
export interface Grep {
items: GrepResult["items"]
totalMatched: number
totalFilesSearched: number
totalFiles: number
filteredFileCount: number
nextCursor: Cursor
regexFallbackError?: string
}
export interface Picker {
destroy(): void
waitForScan(timeout?: number): Result<boolean>
refreshGitStatus(): Result<number>
fileSearch(
query: string,
opts?: {
currentFile?: string
pageIndex?: number
pageSize?: number
},
): Result<Search>
grep(
query: string,
opts?: {
mode?: "plain" | "regex" | "fuzzy"
maxMatchesPerFile?: number
timeBudgetMs?: number
beforeContext?: number
afterContext?: number
cursor?: Cursor
},
): Result<Grep>
trackQuery(query: string, file: string): Result<boolean>
getHistoricalQuery(offset: number): Result<string | null>
}
export function available() {
return FileFinder.isAvailable()
}
export function create(opts: Init): Result<Picker> {
const made = FileFinder.create(opts)
if (!made.ok) return made
const pick = made.value
return {
ok: true,
value: {
destroy: () => pick.destroy(),
waitForScan: (timeout) => pick.waitForScan(timeout),
refreshGitStatus: () => pick.refreshGitStatus(),
fileSearch: (query, next) => pick.fileSearch(query, next),
grep: (query, next) => pick.grep(query, next),
trackQuery: (query, file) => pick.trackQuery(query, file),
getHistoricalQuery: (offset) => pick.getHistoricalQuery(offset),
},
}
}
}

View File

@@ -0,0 +1,82 @@
export namespace Fff {
export type Result<T> = { ok: true; value: T } | { ok: false; error: string }
export interface Init {
basePath: string
frecencyDbPath?: string
historyDbPath?: string
aiMode?: boolean
}
export interface File {
path: string
relativePath: string
fileName: string
}
export interface Search {
items: File[]
scores: unknown[]
totalMatched: number
totalFiles: number
}
export type Cursor = null
export interface Hit {
path: string
relativePath: string
fileName: string
lineNumber: number
byteOffset: number
lineContent: string
matchRanges: [number, number][]
contextBefore?: string[]
contextAfter?: string[]
}
export interface Grep {
items: Hit[]
totalMatched: number
totalFilesSearched: number
totalFiles: number
filteredFileCount: number
nextCursor: Cursor
regexFallbackError?: string
}
export interface Picker {
destroy(): void
waitForScan(timeout?: number): Result<boolean>
refreshGitStatus(): Result<number>
fileSearch(
query: string,
opts?: {
currentFile?: string
pageIndex?: number
pageSize?: number
},
): Result<Search>
grep(
query: string,
opts?: {
mode?: "plain" | "regex" | "fuzzy"
maxMatchesPerFile?: number
timeBudgetMs?: number
beforeContext?: number
afterContext?: number
cursor?: Cursor
},
): Result<Grep>
trackQuery(query: string, file: string): Result<boolean>
getHistoricalQuery(offset: number): Result<string | null>
}
export function available() {
return false
}
export function create(): Result<Picker> {
return { ok: false, error: "fff unavailable" }
}
}

View File

@@ -14,7 +14,7 @@ import { Global } from "../global"
import { Instance } from "../project/instance"
import { Log } from "../util/log"
import { Protected } from "./protected"
import { Ripgrep } from "./ripgrep"
import { Search } from "./search"
export namespace File {
export const Info = z
@@ -344,7 +344,7 @@ export namespace File {
Service,
Effect.gen(function* () {
const appFs = yield* AppFileSystem.Service
const rg = yield* Ripgrep.Service
const searchSvc = yield* Search.Service
const git = yield* Git.Service
const state = yield* InstanceState.make<State>(
@@ -384,7 +384,7 @@ export namespace File {
next.dirs = Array.from(dirs).toSorted()
} else {
const files = yield* rg.files({ cwd: Instance.directory }).pipe(
const files = yield* searchSvc.files({ cwd: Instance.directory }).pipe(
Stream.runCollect,
Effect.map((chunk) => [...chunk]),
)
@@ -512,6 +512,8 @@ export namespace File {
if (!Instance.containsPath(full)) throw new Error("Access denied: path escapes project directory")
yield* searchSvc.open({ cwd: Instance.directory, file }).pipe(Effect.ignore)
if (isImageByExtension(file)) {
const exists = yield* appFs.existsSafe(full)
if (exists) {
@@ -617,14 +619,26 @@ export namespace File {
dirs?: boolean
type?: "file" | "directory"
}) {
yield* ensure()
const { cache } = yield* InstanceState.get(state)
const query = input.query.trim()
const limit = input.limit ?? 100
const kind = input.type ?? (input.dirs === false ? "file" : "all")
log.info("search", { query, kind })
if (query && kind === "file") {
const files = yield* searchSvc.file({
cwd: Instance.directory,
query,
limit,
})
if (files.length) {
log.info("search", { query, kind, results: files.length, mode: "fff" })
return files
}
}
yield* ensure()
const { cache } = yield* InstanceState.get(state)
const preferHidden = query.startsWith(".") || query.includes("/.")
if (!query) {
@@ -649,7 +663,7 @@ export namespace File {
)
export const defaultLayer = layer.pipe(
Layer.provide(Ripgrep.defaultLayer),
Layer.provide(Search.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(Git.defaultLayer),
)

View File

@@ -0,0 +1,465 @@
import path from "path"
import z from "zod"
import { Context, Deferred, Effect, Layer, Option } from "effect"
import * as Stream from "effect/Stream"
import { Fff } from "#fff"
import { InstanceState } from "@/effect/instance-state"
import { makeRuntime } from "@/effect/run-service"
import { AppFileSystem } from "@/filesystem"
import { Global } from "@/global"
import { Glob } from "@/util/glob"
import { Log } from "@/util/log"
import { Ripgrep } from "./ripgrep"
export namespace Search {
const log = Log.create({ service: "file.search" })
const root = path.join(Global.Path.cache, "fff")
export const Match = z.object({
path: z.object({
text: z.string(),
}),
lines: z.object({
text: z.string(),
}),
line_number: z.number(),
absolute_offset: z.number(),
submatches: z.array(
z.object({
match: z.object({
text: z.string(),
}),
start: z.number(),
end: z.number(),
}),
),
})
export type Item = z.infer<typeof Match>
export interface Result {
readonly items: Item[]
readonly partial: boolean
readonly engine: "fff" | "ripgrep"
readonly regexFallbackError?: string
}
export interface FileInput {
readonly cwd: string
readonly query: string
readonly limit?: number
readonly current?: string
}
export interface GlobInput {
readonly cwd: string
readonly pattern: string
readonly limit?: number
readonly signal?: AbortSignal
}
interface Query {
readonly dir: string
readonly text: string
readonly files: string[]
}
interface State {
readonly pick: Map<string, Fff.Picker>
readonly wait: Map<string, Deferred.Deferred<Fff.Picker, Error>>
readonly recent: Query[]
}
export interface Interface {
readonly files: (input: Ripgrep.FilesInput) => Stream.Stream<string, Error>
readonly tree: (input: Ripgrep.TreeInput) => Effect.Effect<string, Error>
readonly search: (input: Ripgrep.SearchInput) => Effect.Effect<Result, Error>
readonly file: (input: FileInput) => Effect.Effect<string[]>
readonly glob: (input: GlobInput) => Effect.Effect<{ files: string[]; truncated: boolean }, Error>
readonly open: (input: { cwd?: string; file: string }) => Effect.Effect<void>
}
export class Service extends Context.Service<Service, Interface>()("@opencode/Search") {}
function key(dir: string) {
return Buffer.from(dir).toString("base64url")
}
function norm(text: string) {
return text.replaceAll("\\", "/")
}
function blocked(rel: string) {
return norm(rel).split("/").includes(".git")
}
function base(file: string) {
return norm(file).split("/").at(-1) ?? file
}
function allow(glob: string[] | undefined, rel: string, file: string) {
if (!glob?.length) return true
const yes = glob.filter((item) => !item.startsWith("!"))
const no = glob.filter((item) => item.startsWith("!")).map((item) => item.slice(1))
if (yes.length > 0 && !yes.some((item) => Glob.match(item, rel) || Glob.match(item, file))) return false
if (no.some((item) => Glob.match(item, rel) || Glob.match(item, file))) return false
return true
}
function include(pattern: string) {
const val = pattern.trim().replaceAll("\\", "/")
if (!val) return "*"
const flat = val.replaceAll("**/", "").replaceAll("/**", "/")
const idx = flat.lastIndexOf("/")
if (idx < 0) return flat
const dir = flat.slice(0, idx + 1)
const glob = flat.slice(idx + 1)
if (!glob) return dir
return `${dir} ${glob}`
}
function remember(st: State, dir: string, text: string, files: string[]) {
if (!files.length) return
const next = Array.from(new Set(files.map(AppFileSystem.resolve))).slice(0, 64)
if (!next.length) return
const old = st.recent.findIndex((item) => item.dir === dir && item.text === text)
if (old >= 0) st.recent.splice(old, 1)
st.recent.unshift({ dir, text, files: next })
if (st.recent.length > 32) st.recent.length = 32
}
function item(hit: Fff.Hit): Item {
return {
path: { text: norm(hit.relativePath) },
lines: { text: hit.lineContent },
line_number: hit.lineNumber,
absolute_offset: hit.byteOffset,
submatches: hit.matchRanges
.map(([start, end]) => {
const text = hit.lineContent.slice(start, end)
if (!text) return undefined
return {
match: { text },
start,
end,
}
})
.filter((row): row is Item["submatches"][number] => Boolean(row)),
}
}
export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const rg = yield* Ripgrep.Service
const state = yield* InstanceState.make<State>(
Effect.fn("Search.state")(() =>
Effect.gen(function* () {
const next: State = {
pick: new Map(),
wait: new Map(),
recent: [],
}
yield* Effect.addFinalizer(() =>
Effect.sync(() => {
for (const pick of next.pick.values()) pick.destroy()
}),
)
return next
}),
),
)
const rip = Effect.fn("Search.rip")(function* (input: Ripgrep.SearchInput) {
const out = yield* rg.search(input)
return {
items: out.items,
partial: out.partial,
engine: "ripgrep" as const,
}
})
const picker = Effect.fn("Search.picker")(function* (cwd: string) {
if (!Fff.available()) return undefined
const dir = AppFileSystem.resolve(cwd)
const st = yield* InstanceState.get(state)
const old = st.pick.get(dir)
if (old) return old
const wait = st.wait.get(dir)
if (wait) return yield* Deferred.await(wait)
const gate = yield* Deferred.make<Fff.Picker, Error>()
st.wait.set(dir, gate)
try {
yield* fs.ensureDir(root)
const id = key(dir)
const made = yield* Effect.sync(() =>
Fff.create({
basePath: dir,
frecencyDbPath: path.join(root, `${id}.frecency.mdb`),
historyDbPath: path.join(root, `${id}.history.mdb`),
aiMode: true,
}),
)
if (!made.ok) {
const err = new Error(made.error)
yield* Deferred.fail(gate, err)
return yield* Effect.fail(err)
}
const pick = made.value
const done = yield* Effect.sync(() => pick.waitForScan(5_000))
if (!done.ok) {
pick.destroy()
const err = new Error(done.error)
yield* Deferred.fail(gate, err)
return yield* Effect.fail(err)
}
if (!done.value) {
pick.destroy()
const err = new Error("fff scan timed out")
yield* Deferred.fail(gate, err)
return yield* Effect.fail(err)
}
const git = yield* Effect.sync(() => pick.refreshGitStatus())
if (!git.ok) {
log.warn("git refresh failed", { dir, error: git.error })
}
st.pick.set(dir, pick)
yield* Deferred.succeed(gate, pick)
return pick
} finally {
if (st.wait.get(dir) === gate) st.wait.delete(dir)
}
})
const files: Interface["files"] = (input) => rg.files(input)
const tree: Interface["tree"] = (input) => rg.tree(input)
const file: Interface["file"] = Effect.fn("Search.file")(function* (input) {
const query = input.query.trim()
if (!query) return []
const pick = yield* picker(input.cwd).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (!pick) return []
const dir = AppFileSystem.resolve(input.cwd)
const out = yield* Effect.sync(() =>
pick.fileSearch(query, {
currentFile: input.current
? path.isAbsolute(input.current)
? input.current
: path.join(dir, input.current)
: undefined,
pageIndex: 0,
pageSize: Math.max(input.limit ?? 100, 100),
}),
)
if (!out.ok) {
log.warn("fff file search failed", { dir, query, error: out.error })
return []
}
const min = query.length * 10
const rows = Array.from(
new Set(
out.value.items.flatMap((item, idx) => {
const score = out.value.scores[idx]
if (!score || score.total < min) return []
return [norm(item.relativePath)]
}),
),
)
remember(
yield* InstanceState.get(state),
dir,
query,
rows.map((row) => path.join(dir, row)),
)
return rows.slice(0, input.limit ?? 100)
})
const search: Interface["search"] = Effect.fn("Search.search")(function* (input) {
input.signal?.throwIfAborted()
if (input.file?.length) return yield* rip(input)
const pick = yield* picker(input.cwd).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (!pick) return yield* rip(input)
const dir = AppFileSystem.resolve(input.cwd)
const limit = input.limit ?? 100
const rows: Item[] = []
const seen = new Set<string>()
let cur: Fff.Cursor = null
let err: string | undefined
while (rows.length < limit) {
input.signal?.throwIfAborted()
const out = yield* Effect.sync(() =>
pick.grep(input.pattern, {
mode: "regex",
cursor: cur,
maxMatchesPerFile: limit,
timeBudgetMs: 1_500,
}),
)
if (!out.ok) {
log.warn("fff grep failed", { dir, pattern: input.pattern, error: out.error })
return yield* rip(input)
}
err = err ?? out.value.regexFallbackError
for (const hit of out.value.items) {
const rel = norm(hit.relativePath)
if (!allow(input.glob, rel, norm(hit.fileName))) continue
const id = `${rel}:${hit.lineNumber}:${hit.byteOffset}`
if (seen.has(id)) continue
seen.add(id)
rows.push(item(hit))
if (rows.length >= limit) break
}
if (!out.value.nextCursor) break
cur = out.value.nextCursor
}
if (!rows.length && input.glob?.length) return yield* rip(input)
remember(
yield* InstanceState.get(state),
dir,
input.pattern,
Array.from(new Set(rows.map((row) => path.join(dir, row.path.text)))),
)
return {
items: rows,
partial: false,
engine: "fff" as const,
regexFallbackError: err,
}
})
const glob: Interface["glob"] = Effect.fn("Search.glob")(function* (input) {
input.signal?.throwIfAborted()
const dir = AppFileSystem.resolve(input.cwd)
const limit = input.limit ?? 100
const pick = yield* picker(dir).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (pick) {
const out = yield* Effect.sync(() =>
pick.fileSearch(include(input.pattern), {
currentFile: path.join(dir, ".opencode"),
pageIndex: 0,
pageSize: Math.max(limit * 4, 200),
}),
)
if (out.ok) {
const rows = Array.from(
new Set(
out.value.items
.map((item) => norm(item.relativePath))
.filter((item) => !blocked(item))
.filter((item) => Glob.match(input.pattern, item) || Glob.match(input.pattern, base(item))),
),
)
if (rows.length > 0) {
remember(
yield* InstanceState.get(state),
dir,
input.pattern,
rows.map((row) => path.join(dir, row)),
)
return {
files: rows.slice(0, limit).map((row) => path.join(dir, row)),
truncated: rows.length > limit,
}
}
} else {
log.warn("fff glob search failed", { dir, pattern: input.pattern, error: out.error })
}
}
const rows = yield* rg.files({ cwd: dir, glob: [input.pattern], signal: input.signal }).pipe(
Stream.take(limit + 1),
Stream.runCollect,
Effect.map((chunk) => [...chunk]),
)
const cut = rows.length > limit
if (cut) rows.length = limit
const out = yield* Effect.forEach(
rows,
Effect.fnUntraced(function* (file) {
const full = path.join(dir, file)
const info = yield* fs.stat(full).pipe(Effect.catch(() => Effect.succeed(undefined)))
const time =
info?.mtime.pipe(
Option.map((item) => item.getTime()),
Option.getOrElse(() => 0),
) ?? 0
return { file: full, time }
}),
{ concurrency: 16 },
)
out.sort((a, b) => b.time - a.time)
return {
files: out.map((item) => item.file),
truncated: cut,
}
})
const open: Interface["open"] = Effect.fn("Search.open")(function* (input) {
const st = yield* InstanceState.get(state)
const file = input.cwd
? AppFileSystem.resolve(path.isAbsolute(input.file) ? input.file : path.join(input.cwd, input.file))
: AppFileSystem.resolve(input.file)
const idx = st.recent.findIndex((item) => item.files.includes(file))
if (idx < 0) return
const row = st.recent[idx]
st.recent.splice(idx, 1)
const pick = st.pick.get(row.dir)
if (!pick) return
const out = yield* Effect.sync(() => pick.trackQuery(row.text, file))
if (!out.ok) {
log.warn("fff track query failed", { dir: row.dir, query: row.text, file, error: out.error })
}
})
return Service.of({ files, tree, search, file, glob, open })
}),
)
export const defaultLayer = layer.pipe(Layer.provide(Ripgrep.defaultLayer), Layer.provide(AppFileSystem.defaultLayer))
const { runPromise } = makeRuntime(Service, defaultLayer)
export function files(input: Ripgrep.FilesInput) {
return runPromise((svc) => Stream.toAsyncIterableEffect(svc.files(input)))
}
export function tree(input: Ripgrep.TreeInput) {
return runPromise((svc) => svc.tree(input))
}
export function search(input: Ripgrep.SearchInput) {
return runPromise((svc) => svc.search(input))
}
export function file(input: FileInput) {
return runPromise((svc) => svc.file(input))
}
export function glob(input: GlobInput) {
return runPromise((svc) => svc.glob(input))
}
export function open(input: { cwd?: string; file: string }) {
return runPromise((svc) => svc.open(input))
}
}

View File

@@ -4,7 +4,7 @@ import { Effect } from "effect"
import z from "zod"
import { AppRuntime } from "../../effect/app-runtime"
import { File } from "../../file"
import { Ripgrep } from "../../file/ripgrep"
import { Search } from "../../file/search"
import { LSP } from "../../lsp"
import { Instance } from "../../project/instance"
import { lazy } from "../../util/lazy"
@@ -15,14 +15,14 @@ export const FileRoutes = lazy(() =>
"/find",
describeRoute({
summary: "Find text",
description: "Search for text patterns across files in the project using ripgrep.",
description: "Search for text patterns across files in the project.",
operationId: "find.text",
responses: {
200: {
description: "Matches",
content: {
"application/json": {
schema: resolver(Ripgrep.Match.shape.data.array()),
schema: resolver(Search.Match.array()),
},
},
},
@@ -37,7 +37,7 @@ export const FileRoutes = lazy(() =>
async (c) => {
const pattern = c.req.valid("query").pattern
const result = await AppRuntime.runPromise(
Ripgrep.Service.use((svc) => svc.search({ cwd: Instance.directory, pattern, limit: 10 })),
Search.Service.use((svc) => svc.search({ cwd: Instance.directory, pattern, limit: 10 })),
)
return c.json(result.items)
},

View File

@@ -1,10 +1,9 @@
import path from "path"
import z from "zod"
import { Effect, Option } from "effect"
import * as Stream from "effect/Stream"
import { Effect } from "effect"
import { InstanceState } from "@/effect/instance-state"
import { AppFileSystem } from "../filesystem"
import { Ripgrep } from "../file/ripgrep"
import { Search } from "../file/search"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./glob.txt"
import { Tool } from "./tool"
@@ -12,8 +11,8 @@ import { Tool } from "./tool"
export const GlobTool = Tool.define(
"glob",
Effect.gen(function* () {
const rg = yield* Ripgrep.Service
const fs = yield* AppFileSystem.Service
const searchSvc = yield* Search.Service
return {
description: DESCRIPTION,
@@ -48,36 +47,18 @@ export const GlobTool = Tool.define(
yield* assertExternalDirectoryEffect(ctx, search, { kind: "directory" })
const limit = 100
let truncated = false
const files = yield* rg.files({ cwd: search, glob: [params.pattern], signal: ctx.abort }).pipe(
Stream.mapEffect((file) =>
Effect.gen(function* () {
const full = path.resolve(search, file)
const info = yield* fs.stat(full).pipe(Effect.catch(() => Effect.succeed(undefined)))
const mtime =
info?.mtime.pipe(
Option.map((date) => date.getTime()),
Option.getOrElse(() => 0),
) ?? 0
return { path: full, mtime }
}),
),
Stream.take(limit + 1),
Stream.runCollect,
Effect.map((chunk) => [...chunk]),
)
if (files.length > limit) {
truncated = true
files.length = limit
}
files.sort((a, b) => b.mtime - a.mtime)
const files = yield* searchSvc.glob({
cwd: search,
pattern: params.pattern,
limit,
signal: ctx.abort,
})
const output = []
if (files.length === 0) output.push("No files found")
if (files.length > 0) {
output.push(...files.map((file) => file.path))
if (truncated) {
if (files.files.length === 0) output.push("No files found")
if (files.files.length > 0) {
output.push(...files.files)
if (files.truncated) {
output.push("")
output.push(
`(Results are truncated: showing first ${limit} results. Consider using a more specific path or pattern.)`,
@@ -88,8 +69,8 @@ export const GlobTool = Tool.define(
return {
title: path.relative(ins.worktree, search),
metadata: {
count: files.length,
truncated,
count: files.files.length,
truncated: files.truncated,
},
output: output.join("\n"),
}

View File

@@ -1,9 +1,9 @@
import path from "path"
import z from "zod"
import { Effect, Option } from "effect"
import { Effect } from "effect"
import { InstanceState } from "@/effect/instance-state"
import { AppFileSystem } from "../filesystem"
import { Ripgrep } from "../file/ripgrep"
import { Search } from "../file/search"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./grep.txt"
import { Tool } from "./tool"
@@ -14,7 +14,7 @@ export const GrepTool = Tool.define(
"grep",
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const rg = yield* Ripgrep.Service
const searchSvc = yield* Search.Service
return {
description: DESCRIPTION,
@@ -58,7 +58,7 @@ export const GrepTool = Tool.define(
kind: info?.type === "Directory" ? "directory" : "file",
})
const result = yield* rg.search({
const result = yield* searchSvc.search({
cwd,
pattern: params.pattern,
glob: params.include ? [params.include] : undefined,
@@ -74,37 +74,13 @@ export const GrepTool = Tool.define(
line: item.line_number,
text: item.lines.text,
}))
const times = new Map(
(yield* Effect.forEach(
[...new Set(rows.map((row) => row.path))],
Effect.fnUntraced(function* (file) {
const info = yield* fs.stat(file).pipe(Effect.catch(() => Effect.succeed(undefined)))
if (!info || info.type === "Directory") return undefined
return [
file,
info.mtime.pipe(
Option.map((time) => time.getTime()),
Option.getOrElse(() => 0),
) ?? 0,
] as const
}),
{ concurrency: 16 },
)).filter((entry): entry is readonly [string, number] => Boolean(entry)),
)
const matches = rows.flatMap((row) => {
const mtime = times.get(row.path)
if (mtime === undefined) return []
return [{ ...row, mtime }]
})
matches.sort((a, b) => b.mtime - a.mtime)
const limit = 100
const truncated = matches.length > limit
const final = truncated ? matches.slice(0, limit) : matches
const truncated = rows.length > limit
const final = truncated ? rows.slice(0, limit) : rows
if (final.length === 0) return empty
const total = matches.length
const total = rows.length
const output = [`Found ${total} matches${truncated ? ` (showing first ${limit})` : ""}`]
let current = ""
@@ -130,6 +106,10 @@ export const GrepTool = Tool.define(
output.push("")
output.push("(Some paths were inaccessible and skipped)")
}
if (result.regexFallbackError) {
output.push("")
output.push(`(Regex fallback: ${result.regexFallbackError})`)
}
return {
title: params.pattern,

View File

@@ -3,7 +3,7 @@ import z from "zod"
import { Effect } from "effect"
import * as Stream from "effect/Stream"
import { InstanceState } from "@/effect/instance-state"
import { Ripgrep } from "../file/ripgrep"
import { Search } from "../file/search"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./ls.txt"
import { Tool } from "./tool"
@@ -40,7 +40,7 @@ const LIMIT = 100
export const ListTool = Tool.define(
"list",
Effect.gen(function* () {
const rg = yield* Ripgrep.Service
const searchSvc = yield* Search.Service
return {
description: DESCRIPTION,
@@ -67,7 +67,7 @@ export const ListTool = Tool.define(
})
const glob = IGNORE_PATTERNS.map((item) => `!${item}*`).concat(params.ignore?.map((item) => `!${item}`) || [])
const files = yield* rg.files({ cwd: search, glob, signal: ctx.abort }).pipe(
const files = yield* searchSvc.files({ cwd: search, glob, signal: ctx.abort }).pipe(
Stream.take(LIMIT + 1),
Stream.runCollect,
Effect.map((chunk) => [...chunk]),

View File

@@ -8,6 +8,7 @@ import { Tool } from "./tool"
import { AppFileSystem } from "../filesystem"
import { LSP } from "../lsp"
import { FileTime } from "../file/time"
import { Search } from "../file/search"
import DESCRIPTION from "./read.txt"
import { Instance } from "../project/instance"
import { assertExternalDirectoryEffect } from "./external-directory"
@@ -31,6 +32,7 @@ export const ReadTool = Tool.define(
const fs = yield* AppFileSystem.Service
const instruction = yield* Instruction.Service
const lsp = yield* LSP.Service
const search = yield* Search.Service
const time = yield* FileTime.Service
const scope = yield* Scope.Scope
@@ -76,6 +78,7 @@ export const ReadTool = Tool.define(
})
const warm = Effect.fn("ReadTool.warm")(function* (filepath: string, sessionID: Tool.Context["sessionID"]) {
yield* search.open({ file: filepath }).pipe(Effect.ignore)
yield* lsp.touchFile(filepath, false).pipe(Effect.ignore, Effect.forkIn(scope))
yield* time.read(sessionID, filepath)
})

View File

@@ -33,7 +33,7 @@ import { Effect, Layer, Context } from "effect"
import { FetchHttpClient, HttpClient } from "effect/unstable/http"
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
import * as CrossSpawnSpawner from "@/effect/cross-spawn-spawner"
import { Ripgrep } from "../file/ripgrep"
import { Search } from "../file/search"
import { Format } from "../format"
import { InstanceState } from "@/effect/instance-state"
import { Env } from "../env"
@@ -93,7 +93,7 @@ export namespace ToolRegistry {
| Bus.Service
| HttpClient.HttpClient
| ChildProcessSpawner
| Ripgrep.Service
| Search.Service
| Format.Service
| Truncate.Service
> = Layer.effect(
@@ -344,7 +344,7 @@ export namespace ToolRegistry {
Layer.provide(FetchHttpClient.layer),
Layer.provide(Format.defaultLayer),
Layer.provide(CrossSpawnSpawner.defaultLayer),
Layer.provide(Ripgrep.defaultLayer),
Layer.provide(Search.defaultLayer),
Layer.provide(Truncate.defaultLayer),
),
)

View File

@@ -4,7 +4,7 @@ import z from "zod"
import { Effect } from "effect"
import * as Stream from "effect/Stream"
import { EffectLogger } from "@/effect/logger"
import { Ripgrep } from "../file/ripgrep"
import { Search } from "../file/search"
import { Skill } from "../skill"
import { Tool } from "./tool"
@@ -16,7 +16,7 @@ export const SkillTool = Tool.define(
"skill",
Effect.gen(function* () {
const skill = yield* Skill.Service
const rg = yield* Ripgrep.Service
const searchSvc = yield* Search.Service
return () =>
Effect.gen(function* () {
@@ -62,7 +62,7 @@ export const SkillTool = Tool.define(
const dir = path.dirname(info.location)
const base = pathToFileURL(dir).href
const limit = 10
const files = yield* rg.files({ cwd: dir, follow: false, hidden: true, signal: ctx.abort }).pipe(
const files = yield* searchSvc.files({ cwd: dir, follow: false, hidden: true, signal: ctx.abort }).pipe(
Stream.filter((file) => !file.includes("SKILL.md")),
Stream.map((file) => path.resolve(dir, file)),
Stream.take(limit),

View File

@@ -40,7 +40,7 @@ import { ToolRegistry } from "../../src/tool/registry"
import { Truncate } from "../../src/tool/truncate"
import { Log } from "../../src/util/log"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { Ripgrep } from "../../src/file/ripgrep"
import { Search } from "../../src/file/search"
import { Format } from "../../src/format"
import { provideTmpdirInstance, provideTmpdirServer } from "../fixture/fixture"
import { testEffect } from "../lib/effect"
@@ -187,7 +187,7 @@ function makeHttp() {
Layer.provide(Skill.defaultLayer),
Layer.provide(FetchHttpClient.layer),
Layer.provide(CrossSpawnSpawner.defaultLayer),
Layer.provide(Ripgrep.defaultLayer),
Layer.provide(Search.defaultLayer),
Layer.provide(Format.defaultLayer),
Layer.provideMerge(todo),
Layer.provideMerge(question),

View File

@@ -55,7 +55,7 @@ import { ToolRegistry } from "../../src/tool/registry"
import { Truncate } from "../../src/tool/truncate"
import { AppFileSystem } from "../../src/filesystem"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { Ripgrep } from "../../src/file/ripgrep"
import { Search } from "../../src/file/search"
import { Format } from "../../src/format"
Log.init({ print: false })
@@ -141,7 +141,7 @@ function makeHttp() {
Layer.provide(Skill.defaultLayer),
Layer.provide(FetchHttpClient.layer),
Layer.provide(CrossSpawnSpawner.defaultLayer),
Layer.provide(Ripgrep.defaultLayer),
Layer.provide(Search.defaultLayer),
Layer.provide(Format.defaultLayer),
Layer.provideMerge(todo),
Layer.provideMerge(question),

View File

@@ -4,7 +4,7 @@ import { Cause, Effect, Exit, Layer } from "effect"
import { GlobTool } from "../../src/tool/glob"
import { SessionID, MessageID } from "../../src/session/schema"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { Ripgrep } from "../../src/file/ripgrep"
import { Search } from "../../src/file/search"
import { AppFileSystem } from "../../src/filesystem"
import { Truncate } from "../../src/tool/truncate"
import { Agent } from "../../src/agent/agent"
@@ -15,7 +15,7 @@ const it = testEffect(
Layer.mergeAll(
CrossSpawnSpawner.defaultLayer,
AppFileSystem.defaultLayer,
Ripgrep.defaultLayer,
Search.defaultLayer,
Truncate.defaultLayer,
Agent.defaultLayer,
),

View File

@@ -7,7 +7,7 @@ import { SessionID, MessageID } from "../../src/session/schema"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { Truncate } from "../../src/tool/truncate"
import { Agent } from "../../src/agent/agent"
import { Ripgrep } from "../../src/file/ripgrep"
import { Search } from "../../src/file/search"
import { AppFileSystem } from "../../src/filesystem"
import { testEffect } from "../lib/effect"
@@ -15,7 +15,7 @@ const it = testEffect(
Layer.mergeAll(
CrossSpawnSpawner.defaultLayer,
AppFileSystem.defaultLayer,
Ripgrep.defaultLayer,
Search.defaultLayer,
Truncate.defaultLayer,
Agent.defaultLayer,
),

View File

@@ -4,6 +4,7 @@ import path from "path"
import { Agent } from "../../src/agent/agent"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
import { AppFileSystem } from "../../src/filesystem"
import { Search } from "../../src/file/search"
import { FileTime } from "../../src/file/time"
import { LSP } from "../../src/lsp"
import { Permission } from "../../src/permission"
@@ -42,6 +43,7 @@ const it = testEffect(
FileTime.defaultLayer,
Instruction.defaultLayer,
LSP.defaultLayer,
Search.defaultLayer,
Truncate.defaultLayer,
),
)