diff --git a/bun.lock b/bun.lock index 01d63a24ab89..d0ceba948119 100644 --- a/bun.lock +++ b/bun.lock @@ -6,6 +6,7 @@ "name": "opencode", "dependencies": { "@aws-sdk/client-s3": "3.933.0", + "@ff-labs/fff-bun": "0.6.4", "@opencode-ai/plugin": "workspace:*", "@opencode-ai/script": "workspace:*", "@opencode-ai/sdk": "workspace:*", @@ -380,6 +381,7 @@ "@clack/prompts": "1.0.0-alpha.1", "@effect/opentelemetry": "catalog:", "@effect/platform-node": "catalog:", + "@ff-labs/fff-bun": "0.6.4", "@gitlab/opencode-gitlab-auth": "1.3.3", "@hono/node-server": "1.19.11", "@hono/node-ws": "1.3.0", @@ -1193,6 +1195,24 @@ "@fastify/rate-limit": ["@fastify/rate-limit@10.3.0", "", { "dependencies": { "@lukeed/ms": "^2.0.2", "fastify-plugin": "^5.0.0", "toad-cache": "^3.7.0" } }, "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q=="], + "@ff-labs/fff-bin-darwin-arm64": ["@ff-labs/fff-bin-darwin-arm64@0.6.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-IiLrUeVL+6oeHlzyBGp6ACrb4j7vl66/GXtkoSASVtXJGt5J3IH9m9OvQTwwBxrQtPSgb0tI7TvdYPX0RGH9Qg=="], + + "@ff-labs/fff-bin-darwin-x64": ["@ff-labs/fff-bin-darwin-x64@0.6.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-COd27jhKVKIsLT3krQ2DrOkgMYJ5jPSnRLrbarwtYe5jVbayKR6N4wUhtGF3VXWnNoYTNstq2uBYcHphOp5AkQ=="], + + "@ff-labs/fff-bin-linux-arm64-gnu": ["@ff-labs/fff-bin-linux-arm64-gnu@0.6.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-E2UWCgpBwCPWaupHnn7RAmHjyCyEmKwruaMyigTjI2tg6gXYvoAIjNR+TGsKkTqGl0B1BmP0sD8Uh65K5koxTQ=="], + + "@ff-labs/fff-bin-linux-arm64-musl": ["@ff-labs/fff-bin-linux-arm64-musl@0.6.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-t9K3NzYkBxK2UYp6Xy7Lac/RAL+cWa+78HcRNI693k0U9c6G65J2/cVn5SVOiJ1EcnEnC3MCuPsCEmj/9d3Pqw=="], + + "@ff-labs/fff-bin-linux-x64-gnu": ["@ff-labs/fff-bin-linux-x64-gnu@0.6.4", "", { "os": "linux", "cpu": "x64" }, "sha512-tNSYpZok9HpXZoAlTEWbJF0ZQtZKZNmpyU2WfVRa3WhsPZ/ej+YUuPEPI83OYRqxy4F0OVtGYCqj7dos/AVSLg=="], + + "@ff-labs/fff-bin-linux-x64-musl": ["@ff-labs/fff-bin-linux-x64-musl@0.6.4", "", { "os": "linux", "cpu": "x64" }, "sha512-GHJhJ3P7cGth4F0VTyoe3maFQT0cW+RbMm7R5XKkNZZW2rNtR+4jo+neAU4gchYH6jv7ajCB5HQdNBcou8zxKA=="], + + "@ff-labs/fff-bin-win32-arm64": ["@ff-labs/fff-bin-win32-arm64@0.6.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-+wmwbeoAvkce2ZduY1FSjzMnmE4LNJDpXa1q++mTz+/LsUvjPBl1dSchvXxuyLzKAqhigHpJ1k9Xull0iIDUjQ=="], + + "@ff-labs/fff-bin-win32-x64": ["@ff-labs/fff-bin-win32-x64@0.6.4", "", { "os": "win32", "cpu": "x64" }, "sha512-OOEXudgFtkXS5iF+6JoawCMpK76xg0NodUhsc2bhTKXJP8DjfQhU/ARpxoPFiX7uwAy1/xaPjbaT5dfP16TaCg=="], + + "@ff-labs/fff-bun": ["@ff-labs/fff-bun@0.6.4", "", { "optionalDependencies": { "@ff-labs/fff-bin-darwin-arm64": "0.6.4", "@ff-labs/fff-bin-darwin-x64": "0.6.4", "@ff-labs/fff-bin-linux-arm64-gnu": "0.6.4", "@ff-labs/fff-bin-linux-arm64-musl": "0.6.4", "@ff-labs/fff-bin-linux-x64-gnu": "0.6.4", "@ff-labs/fff-bin-linux-x64-musl": "0.6.4", "@ff-labs/fff-bin-win32-arm64": "0.6.4", "@ff-labs/fff-bin-win32-x64": "0.6.4" }, "peerDependencies": { "bun": ">=1.0.0" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "x64", "arm64", ], "bin": { "fff-demo": "examples/search.ts", "fff-grep": "examples/grep.ts" } }, "sha512-f9k0W1zTbYX0kA+duWobeXjvnllPr9B1DyleXq/ZSz/9Hvz8Gf85V/Kad7Fi2WXenuscacdq8Asivuy4wKbFlQ=="], + "@floating-ui/core": ["@floating-ui/core@1.7.5", "", { "dependencies": { "@floating-ui/utils": "^0.2.11" } }, "sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ=="], "@floating-ui/dom": ["@floating-ui/dom@1.7.6", "", { "dependencies": { "@floating-ui/core": "^1.7.5", "@floating-ui/utils": "^0.2.11" } }, "sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ=="], @@ -1639,6 +1659,30 @@ "@oslojs/jwt": ["@oslojs/jwt@0.2.0", "", { "dependencies": { "@oslojs/encoding": "0.4.1" } }, "sha512-bLE7BtHrURedCn4Mco3ma9L4Y1GR2SMBuIvjWr7rmQ4/W/4Jy70TIAgZ+0nIlk0xHz1vNP8x8DCns45Sb2XRbg=="], + "@oven/bun-darwin-aarch64": ["@oven/bun-darwin-aarch64@1.3.13", "", { "os": "darwin", "cpu": "arm64" }, "sha512-qAS6Hg8Q14ckfBuqJ2Zh7gBQSVSUHeibSq4OFqBTv6DzyJuxYlr0sdYQzmYmnbPxbqobekqUDTa/4XEaqRi7vg=="], + + "@oven/bun-darwin-x64": ["@oven/bun-darwin-x64@1.3.13", "", { "os": "darwin", "cpu": "x64" }, "sha512-kGePeDD4IN4imo+H4uLjQGZLmvyYQg+nKr2P0nt4ksXXrWA4HE+mb0/TUPHfRI127DocXQpew+fvrHuHR5mpJQ=="], + + "@oven/bun-darwin-x64-baseline": ["@oven/bun-darwin-x64-baseline@1.3.13", "", { "os": "darwin", "cpu": "x64" }, "sha512-gMEQayUpmCPYaE9zkNBj9TiQqHupnhjOYcuSzxFjzIjHJBUO4VjNnrpbKVeXNs+rKHFothORDd2QKquu5paSPQ=="], + + "@oven/bun-linux-aarch64": ["@oven/bun-linux-aarch64@1.3.13", "", { "os": "linux", "cpu": "arm64" }, "sha512-NbLOJdr+RBFO1vFZ2YUFg4oVJ+2ua6zrwo4ZWRs0jKKcGJWtbY2wY5uz+i0PkwH6b9HYaYDgVTzE4ev06ncYZw=="], + + "@oven/bun-linux-aarch64-musl": ["@oven/bun-linux-aarch64-musl@1.3.13", "", { "os": "linux", "cpu": "arm64" }, "sha512-UV9EE18VE5aRhWtV2L6MTAGGn3slhJJ2OW/m+FJM15maHm0qf1V7TaZY0FovxhdQRvnklSiQ7Ntv0H5TUX4w0g=="], + + "@oven/bun-linux-x64": ["@oven/bun-linux-x64@1.3.13", "", { "os": "linux", "cpu": "x64" }, "sha512-UwttIUXoe9fS+40OcjoaRHgZw+HCPFqBVWEXkXqAJ3W7wA0XPZrWsoMAD9sGh3TaLqrwdiMo5xPogwpXhOtVXA=="], + + "@oven/bun-linux-x64-baseline": ["@oven/bun-linux-x64-baseline@1.3.13", "", { "os": "linux", "cpu": "x64" }, "sha512-fOi4ziKzgJG4UrrNd4AicBs6Fu9GY5xOqg+9tC76nuZNDAdSh6++kzab6TNi1Ck0Yzq6zIBIdGit6/0uSbBn8A=="], + + "@oven/bun-linux-x64-musl": ["@oven/bun-linux-x64-musl@1.3.13", "", { "os": "linux", "cpu": "x64" }, "sha512-+VHhE44kEjCXcTFHyc81zfTxL9+vzh9RqIh7gM1iWNhxpctD9kzntbUkP3UTFTwwNjoou1o8VRyxQafvc4OepA=="], + + "@oven/bun-linux-x64-musl-baseline": ["@oven/bun-linux-x64-musl-baseline@1.3.13", "", { "os": "linux", "cpu": "x64" }, "sha512-fqBKuiiWLEu2dVkowZaXgKS98xfrvBqivdoxRtRP3eINcpI1dcelGbsOz+Xphn7tbGAuBiE1/0AelvvvdqS9rg=="], + + "@oven/bun-windows-aarch64": ["@oven/bun-windows-aarch64@1.3.13", "", { "os": "win32", "cpu": "arm64" }, "sha512-+EvdRWRCRg95Xea4M2lqSJFTjzQBTJDQTMlbG8bmwFkVTN16MdmSH7xhfxVQWUOyZBLEpIwuNFIlBBxVCwSUyQ=="], + + "@oven/bun-windows-x64": ["@oven/bun-windows-x64@1.3.13", "", { "os": "win32", "cpu": "x64" }, "sha512-vqDEFX63ZZQF3YstPSpPD+RxNm5AILPdUuuKpNwsj7ld4NjhdHUYkAmLXDtKNWt9JMRL10bop//W8faY/LV+RQ=="], + + "@oven/bun-windows-x64-baseline": ["@oven/bun-windows-x64-baseline@1.3.13", "", { "os": "win32", "cpu": "x64" }, "sha512-6gy4hhQSjq/T/S9hC9m3NxY0RY+9Ww+XNlB+8koIMTsMSYEjk7Ho+hFHQz1Bn4W61Ub7Vykufg+jgDgPfa2GFA=="], + "@oxc-minify/binding-android-arm64": ["@oxc-minify/binding-android-arm64@0.96.0", "", { "os": "android", "cpu": "arm64" }, "sha512-lzeIEMu/v6Y+La5JSesq4hvyKtKBq84cgQpKYTYM/yGuNk2tfd5Ha31hnC+mTh48lp/5vZH+WBfjVUjjINCfug=="], "@oxc-minify/binding-darwin-arm64": ["@oxc-minify/binding-darwin-arm64@0.96.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-i0LkJAUXb4BeBFrJQbMKQPoxf8+cFEffDyLSb7NEzzKuPcH8qrVsnEItoOzeAdYam8Sr6qCHVwmBNEQzl7PWpw=="], @@ -2725,6 +2769,8 @@ "builder-util-runtime": ["builder-util-runtime@9.5.1", "", { "dependencies": { "debug": "^4.3.4", "sax": "^1.2.4" } }, "sha512-qt41tMfgHTllhResqM5DcnHyDIWNgzHvuY2jDcYP9iaGpkWxTUzV6GQjDeLnlR1/DtdlcsWQbA7sByMpmJFTLQ=="], + "bun": ["bun@1.3.13", "", { "optionalDependencies": { "@oven/bun-darwin-aarch64": "1.3.13", "@oven/bun-darwin-x64": "1.3.13", "@oven/bun-darwin-x64-baseline": "1.3.13", "@oven/bun-linux-aarch64": "1.3.13", "@oven/bun-linux-aarch64-musl": "1.3.13", "@oven/bun-linux-x64": "1.3.13", "@oven/bun-linux-x64-baseline": "1.3.13", "@oven/bun-linux-x64-musl": "1.3.13", "@oven/bun-linux-x64-musl-baseline": "1.3.13", "@oven/bun-windows-aarch64": "1.3.13", "@oven/bun-windows-x64": "1.3.13", "@oven/bun-windows-x64-baseline": "1.3.13" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "x64", "arm64", ], "bin": { "bun": "bin/bun.exe", "bunx": "bin/bunx.exe" } }, "sha512-b9T4xZ8KqCHs4+TkHJv540LG1B8OD7noKu0Qaizusx3jFtMDHY6osNqgbaOlwW2B8RB2AKzz+sjzlGKIGxIjZw=="], + "bun-ffi-structs": ["bun-ffi-structs@0.1.2", "", { "peerDependencies": { "typescript": "^5" } }, "sha512-Lh1oQAYHDcnesJauieA4UNkWGXY9hYck7OA5IaRwE3Bp6K2F2pJSNYqq+hIy7P3uOvo3km3oxS8304g5gDMl/w=="], "bun-pty": ["bun-pty@0.4.8", "", {}, "sha512-rO70Mrbr13+jxHHHu2YBkk2pNqrJE5cJn29WE++PUr+GFA0hq/VgtQPZANJ8dJo6d7XImvBk37Innt8GM7O28w=="], diff --git a/package.json b/package.json index 2e53fab9cc5f..6b66a838ccc8 100644 --- a/package.json +++ b/package.json @@ -98,6 +98,7 @@ }, "dependencies": { "@aws-sdk/client-s3": "3.933.0", + "@ff-labs/fff-bun": "0.6.4", "@opencode-ai/plugin": "workspace:*", "@opencode-ai/script": "workspace:*", "@opencode-ai/sdk": "workspace:*", diff --git a/packages/app/src/components/dialog-select-directory.tsx b/packages/app/src/components/dialog-select-directory.tsx index 005d28709161..1bd21de3c04a 100644 --- a/packages/app/src/components/dialog-select-directory.tsx +++ b/packages/app/src/components/dialog-select-directory.tsx @@ -200,7 +200,7 @@ function useDirectorySearch(args: { if (!isPath) { const results = await find() if (!active()) return [] - return results.map((rel) => joinPath(scopedInput.directory, rel)).slice(0, 50) + return results.map((item) => joinPath(scopedInput.directory, item.path)).slice(0, 50) } const segments = query.replace(/^\/+/, "").split("/") diff --git a/packages/app/src/components/prompt-input.tsx b/packages/app/src/components/prompt-input.tsx index 0a18096164f0..79e21cd3dfea 100644 --- a/packages/app/src/components/prompt-input.tsx +++ b/packages/app/src/components/prompt-input.tsx @@ -591,14 +591,19 @@ export const PromptInput: Component = (props) => { const seen = new Set(open) const pinned: AtOption[] = open.map((path) => ({ type: "file", path, display: path, recent: true })) if (!query.trim()) return [...agents, ...pinned] - const paths = await files.searchFilesAndDirectories(query) + const pathy = /[./\\]/.test(query) + const seek = query.replaceAll("\\", "/") + const paths = await files.searchFiles(seek) const fileOptions: AtOption[] = paths .filter((path) => !seen.has(path)) .map((path) => ({ type: "file", path, display: path })) + if (pathy) return fileOptions return [...agents, ...pinned, ...fileOptions] }, key: atKey, filterKeys: ["display"], + stale: false, + fuzzy: (query) => !/[./\\]/.test(query), groupBy: (item) => { if (item.type === "agent") return "agent" if (item.recent) return "recent" diff --git a/packages/app/src/context/file.tsx b/packages/app/src/context/file.tsx index 0298e3416afd..b39b104558cc 100644 --- a/packages/app/src/context/file.tsx +++ b/packages/app/src/context/file.tsx @@ -196,7 +196,7 @@ export const { use: useFile, provider: FileProvider } = createSimpleContext({ const search = (query: string, dirs: "true" | "false") => sdk.client.find.files({ query, dirs }).then( - (x) => (x.data ?? []).map(path.normalize), + (x) => (x.data ?? []).map((item) => path.normalize(item.path)), () => [], ) diff --git a/packages/core/src/util/log.ts b/packages/core/src/util/log.ts index a61c15f7a7a4..8d618cbcd5c8 100644 --- a/packages/core/src/util/log.ts +++ b/packages/core/src/util/log.ts @@ -18,6 +18,11 @@ const keep = 10 let level: Level = "INFO" +/** Exposes internal log level at runtime */ +export function currentLevel() { + return level +} + function shouldLog(input: Level): boolean { return levelPriority[input] >= levelPriority[level] } diff --git a/packages/opencode/bench-fff.ts b/packages/opencode/bench-fff.ts new file mode 100644 index 000000000000..d0541e792cd1 --- /dev/null +++ b/packages/opencode/bench-fff.ts @@ -0,0 +1,40 @@ +import { Fff } from "./src/file/fff" +import { Instance } from "./src/project/instance" + +const dir = process.cwd() + +await Instance.provide({ + directory: dir, + fn: async () => { + const t0 = performance.now() + const picker = await Fff.picker(dir) + console.log(`picker create: ${(performance.now() - t0).toFixed(1)}ms`) + + // wait for scan to complete so results are populated + const tw = performance.now() + picker.waitForScan(10000) + console.log(`wait for scan: ${(performance.now() - tw).toFixed(1)}ms`) + + const t1 = performance.now() + const files = await Fff.files({ cwd: dir, query: "fff" }) + console.log(`file search "fff": ${(performance.now() - t1).toFixed(1)}ms (${files.items.length} results)`) + + const t2 = performance.now() + const files2 = await Fff.files({ cwd: dir, query: "package.json" }) + console.log(`file search "package.json": ${(performance.now() - t2).toFixed(1)}ms (${files2.items.length} results)`) + + const t3 = performance.now() + const grep = await Fff.grep({ cwd: dir, query: "FileFinder", mode: "plain" }) + console.log(`grep "FileFinder": ${(performance.now() - t3).toFixed(1)}ms (${grep.items.length} matches)`) + + const t4 = performance.now() + const grep2 = await Fff.grep({ cwd: dir, query: "import", mode: "plain" }) + console.log(`grep "import": ${(performance.now() - t4).toFixed(1)}ms (${grep2.items.length} matches)`) + + const t5 = performance.now() + const search = await Fff.search({ cwd: dir, pattern: "FileFinder" }) + console.log(`search "FileFinder": ${(performance.now() - t5).toFixed(1)}ms (${search.length} results)`) + + await Instance.dispose() + }, +}) diff --git a/packages/opencode/package.json b/packages/opencode/package.json index 425ddea77acb..ceca0c364d90 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -102,6 +102,7 @@ "@clack/prompts": "1.0.0-alpha.1", "@effect/opentelemetry": "catalog:", "@effect/platform-node": "catalog:", + "@ff-labs/fff-bun": "0.6.4", "@gitlab/opencode-gitlab-auth": "1.3.3", "@hono/node-server": "1.19.11", "@hono/node-ws": "1.3.0", diff --git a/packages/opencode/src/cli/cmd/debug/file.ts b/packages/opencode/src/cli/cmd/debug/file.ts index 8e4eaa4e4d66..6a0ab14b940e 100644 --- a/packages/opencode/src/cli/cmd/debug/file.ts +++ b/packages/opencode/src/cli/cmd/debug/file.ts @@ -1,7 +1,7 @@ import { EOL } from "os" import { AppRuntime } from "@/effect/app-runtime" import { File } from "../../../file" -import { Ripgrep } from "@/file/ripgrep" +import { Fff } from "@/file/fff" import { bootstrap } from "../../bootstrap" import { cmd } from "../cmd" @@ -79,7 +79,7 @@ const FileTreeCommand = cmd({ }), async handler(args) { await bootstrap(process.cwd(), async () => { - const tree = await AppRuntime.runPromise(Ripgrep.Service.use((svc) => svc.tree({ cwd: args.dir, limit: 200 }))) + const tree = await Fff.tree({ cwd: args.dir, limit: 200 }) console.log(JSON.stringify(tree, null, 2)) }) }, diff --git a/packages/opencode/src/cli/cmd/debug/index.ts b/packages/opencode/src/cli/cmd/debug/index.ts index 194e66b1f202..b8fa4d85fa64 100644 --- a/packages/opencode/src/cli/cmd/debug/index.ts +++ b/packages/opencode/src/cli/cmd/debug/index.ts @@ -4,7 +4,7 @@ import { cmd } from "../cmd" import { ConfigCommand } from "./config" import { FileCommand } from "./file" import { LSPCommand } from "./lsp" -import { RipgrepCommand } from "./ripgrep" +import { SearchCommand } from "./search" import { ScrapCommand } from "./scrap" import { SkillCommand } from "./skill" import { SnapshotCommand } from "./snapshot" @@ -18,7 +18,7 @@ export const DebugCommand = cmd({ yargs .command(ConfigCommand) .command(LSPCommand) - .command(RipgrepCommand) + .command(SearchCommand) .command(FileCommand) .command(ScrapCommand) .command(SkillCommand) diff --git a/packages/opencode/src/cli/cmd/debug/ripgrep.ts b/packages/opencode/src/cli/cmd/debug/ripgrep.ts deleted file mode 100644 index 9b7e82691568..000000000000 --- a/packages/opencode/src/cli/cmd/debug/ripgrep.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { EOL } from "os" -import { Effect, Stream } from "effect" -import { AppRuntime } from "../../../effect/app-runtime" -import { Ripgrep } from "../../../file/ripgrep" -import { Instance } from "../../../project/instance" -import { bootstrap } from "../../bootstrap" -import { cmd } from "../cmd" - -export const RipgrepCommand = cmd({ - command: "rg", - describe: "ripgrep debugging utilities", - builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(SearchCommand).demandCommand(), - async handler() {}, -}) - -const TreeCommand = cmd({ - command: "tree", - describe: "show file tree using ripgrep", - builder: (yargs) => - yargs.option("limit", { - type: "number", - }), - async handler(args) { - await bootstrap(process.cwd(), async () => { - const tree = await AppRuntime.runPromise( - Ripgrep.Service.use((svc) => svc.tree({ cwd: Instance.directory, limit: args.limit })), - ) - process.stdout.write(tree + EOL) - }) - }, -}) - -const FilesCommand = cmd({ - command: "files", - describe: "list files using ripgrep", - builder: (yargs) => - yargs - .option("query", { - type: "string", - description: "Filter files by query", - }) - .option("glob", { - type: "string", - description: "Glob pattern to match files", - }) - .option("limit", { - type: "number", - description: "Limit number of results", - }), - async handler(args) { - await bootstrap(process.cwd(), async () => { - const files = await AppRuntime.runPromise( - Effect.gen(function* () { - const rg = yield* Ripgrep.Service - return yield* rg - .files({ - cwd: Instance.directory, - glob: args.glob ? [args.glob] : undefined, - }) - .pipe( - Stream.take(args.limit ?? Infinity), - Stream.runCollect, - Effect.map((c) => [...c]), - ) - }), - ) - process.stdout.write(files.join(EOL) + EOL) - }) - }, -}) - -const SearchCommand = cmd({ - command: "search ", - describe: "search file contents using ripgrep", - builder: (yargs) => - yargs - .positional("pattern", { - type: "string", - demandOption: true, - description: "Search pattern", - }) - .option("glob", { - type: "array", - description: "File glob patterns", - }) - .option("limit", { - type: "number", - description: "Limit number of results", - }), - async handler(args) { - await bootstrap(process.cwd(), async () => { - const results = await AppRuntime.runPromise( - Ripgrep.Service.use((svc) => - svc.search({ - cwd: Instance.directory, - pattern: args.pattern, - glob: args.glob as string[] | undefined, - limit: args.limit, - }), - ), - ) - process.stdout.write(JSON.stringify(results.items, null, 2) + EOL) - }) - }, -}) diff --git a/packages/opencode/src/cli/cmd/debug/search.ts b/packages/opencode/src/cli/cmd/debug/search.ts new file mode 100644 index 000000000000..9fca410dd3ba --- /dev/null +++ b/packages/opencode/src/cli/cmd/debug/search.ts @@ -0,0 +1,94 @@ +import { EOL } from "os" +import { Fff } from "../../../file/fff" +import { Instance } from "../../../project/instance" +import { bootstrap } from "../../bootstrap" +import { cmd } from "../cmd" +import { Glob } from "@opencode-ai/core/util/glob" + +export const SearchCommand = cmd({ + command: "search", + describe: "fff search debugging utilities", + builder: (yargs) => yargs.command(TreeCommand).command(FilesCommand).command(ContentCommand).demandCommand(), + async handler() {}, +}) + +const TreeCommand = cmd({ + command: "tree", + describe: "show file tree using fff", + builder: (yargs) => + yargs.option("limit", { + type: "number", + }), + async handler(args) { + await bootstrap(process.cwd(), async () => { + process.stdout.write((await Fff.tree({ cwd: Instance.directory, limit: args.limit })) + EOL) + }) + }, +}) + +const FilesCommand = cmd({ + command: "files", + describe: "list files using fff", + builder: (yargs) => + yargs + .option("query", { + type: "string", + description: "Filter files by query", + }) + .option("glob", { + type: "string", + description: "Glob pattern to match files", + }) + .option("limit", { + type: "number", + description: "Limit number of results", + }), + async handler(args) { + await bootstrap(process.cwd(), async () => { + const limit = args.limit ?? 100 + const files = ( + await Glob.scan("**/*", { + cwd: Instance.directory, + include: "file", + dot: true, + }) + ) + .map((x) => x.replaceAll("\\", "/")) + .filter((x) => Fff.allowed({ rel: x, hidden: true, glob: args.glob ? [args.glob] : undefined })) + .filter((x) => !args.query || x.includes(args.query)) + .slice(0, limit) + process.stdout.write(files.join(EOL) + EOL) + }) + }, +}) + +const ContentCommand = cmd({ + command: "content ", + describe: "search file contents using fff", + builder: (yargs) => + yargs + .positional("pattern", { + type: "string", + demandOption: true, + description: "Search pattern", + }) + .option("glob", { + type: "array", + description: "File glob patterns", + }) + .option("limit", { + type: "number", + description: "Limit number of results", + }), + async handler(args) { + await bootstrap(process.cwd(), async () => { + const rows = await Fff.search({ + cwd: Instance.directory, + pattern: args.pattern, + glob: args.glob as string[] | undefined, + limit: args.limit, + }) + process.stdout.write(JSON.stringify(rows, null, 2) + EOL) + }) + }, +}) diff --git a/packages/opencode/src/cli/cmd/tui/component/dialog-tag.tsx b/packages/opencode/src/cli/cmd/tui/component/dialog-tag.tsx index 6d6c62450ea3..6f5e42a253db 100644 --- a/packages/opencode/src/cli/cmd/tui/component/dialog-tag.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/dialog-tag.tsx @@ -26,8 +26,8 @@ export function DialogTag(props: { onSelect?: (value: string) => void }) { const options = createMemo(() => (files() ?? []).map((file) => ({ - value: file, - title: file, + value: file.path, + title: file.path, })), ) diff --git a/packages/opencode/src/cli/cmd/tui/component/prompt/autocomplete.tsx b/packages/opencode/src/cli/cmd/tui/component/prompt/autocomplete.tsx index 47bb162cb4bc..90f86dc50094 100644 --- a/packages/opencode/src/cli/cmd/tui/component/prompt/autocomplete.tsx +++ b/packages/opencode/src/cli/cmd/tui/component/prompt/autocomplete.tsx @@ -63,6 +63,7 @@ export type AutocompleteOption = { disabled?: boolean description?: string isDirectory?: boolean + gitStatus?: string onSelect?: () => void path?: string } @@ -295,36 +296,24 @@ export function Autocomplete(props: { const { lineRange, baseQuery } = extractLineRange(query ?? "") - // Get files from SDK const result = await sdk.client.find.files({ query: baseQuery, }) const options: AutocompleteOption[] = [] - // Add file options if (!result.error && result.data) { - const sortedFiles = result.data.sort((a, b) => { - const aScore = frecency.getFrecency(a) - const bScore = frecency.getFrecency(b) - if (aScore !== bScore) return bScore - aScore - const aDepth = a.split("/").length - const bDepth = b.split("/").length - if (aDepth !== bDepth) return aDepth - bDepth - return a.localeCompare(b) - }) - const width = props.anchor().width - 4 options.push( - ...sortedFiles.map((item): AutocompleteOption => { - const { filename, url, part } = createFilePart(item, lineRange) + ...result.data.map((item): AutocompleteOption => { + const { filename, part } = createFilePart(item.path, lineRange) - const isDir = item.endsWith("/") return { display: Locale.truncateMiddle(filename, width), value: filename, - isDirectory: isDir, - path: item, + isDirectory: item.isDirectory, + gitStatus: item.gitStatus, + path: item.path, onSelect: () => { insertPart(filename, part) }, @@ -445,6 +434,12 @@ export function Autocomplete(props: { return prev } + // fff already returns frecency-ranked fuzzy results, so for file search + // just pass them through directly instead of re-ranking with fuzzysort + if (store.visible === "@" && filesValue && filesValue.length > 0) { + return filesValue + } + const result = fuzzysort.go(removeLineRange(searchValue), mixed, { keys: [ (obj) => removeLineRange((obj.value ?? obj.display).trimEnd()), @@ -688,7 +683,6 @@ export function Autocomplete(props: { > {(option, index) => ( select()} > + + {option().gitStatus ? "▎" : " "} + {option().display} diff --git a/packages/opencode/src/file/fff.ts b/packages/opencode/src/file/fff.ts new file mode 100644 index 000000000000..c93826007056 --- /dev/null +++ b/packages/opencode/src/file/fff.ts @@ -0,0 +1,301 @@ +import path from "path" +import { setTimeout as sleep } from "node:timers/promises" +import { + FileFinder, + type FileItem, + type GrepCursor, + type GrepMatch, + type GrepMode, + type MixedItem, + type MixedSearchResult, + type SearchResult, +} from "@ff-labs/fff-bun" +import z from "zod" +import { Global } from "@opencode-ai/core/global" +import { Glob } from "@opencode-ai/core/util/glob" +import { Filesystem } from "@/util/filesystem" +import * as Log from "@opencode-ai/core/util/log" +import { registerDisposer } from "@/effect/instance-registry" + +export namespace Fff { + export const Match = z.object({ + path: z.object({ + text: z.string(), + }), + lines: z.object({ + text: z.string(), + }), + line_number: z.number(), + absolute_offset: z.number(), + submatches: z.array( + z.object({ + match: z.object({ + text: z.string(), + }), + start: z.number(), + end: z.number(), + }), + ), + }) + + const state = { + map: new Map(), + // keep the state of the already indexed fff pickers + // to avoid asking if it is finished scanned every time + ready: new Set(), + } + + registerDisposer(async (directory) => { + const dir = Filesystem.resolve(directory) + const pick = state.map.get(dir) + if (!pick) return + state.map.delete(dir) + state.ready.delete(dir) + + try { + pick.destroy() + } catch {} + }) + + const root = path.join(Global.Path.cache, "fff") + + function key(dir: string) { + return Buffer.from(dir).toString("base64url") + } + + function dbs(dir: string) { + const id = key(dir) + return { + frecency: path.join(root, `${id}.frecency.mdb`), + history: path.join(root, `${id}.history.mdb`), + } + } + + export function picker(cwd: string) { + const dir = Filesystem.resolve(cwd) + const cached = state.map.get(dir) + if (cached) return cached + + const files = dbs(dir) + const base = Log.file() + const logfile = path.join(Global.Path.log, base ? "fff-" + path.basename(base) : "fff.log") + const result = FileFinder.create({ + aiMode: true, + basePath: dir, + frecencyDbPath: files.frecency, + historyDbPath: files.history, + logFilePath: logfile, + // fff uses the same log level + logLevel: Log.currentLevel().toLowerCase() as "debug" | "info" | "warn" | "error", + // if there is second project opened within the same sesion - disable + // viertual memory mapping, the memory mapping address space is finite, so we + // don't want to blow user's computer (the limit depends on repo size) + cacheBudgetMaxFiles: state.map.size > 0 ? 0 : undefined, + }) + + if (!result.ok) throw new Error(result.error) + const pick = result.value + state.map.set(dir, pick) + return pick + } + + const FFF_WAIT_INTERVAL = 25 + async function waitForScan(picker: FileFinder, timeoutMs: number) { + const start = Date.now() + + // becuase fff is a native library it doesn't touches event loop, so + // poll for picker to be ready for returning the data if it is still scanning + while (picker.isScanning()) { + if (Date.now() - start >= timeoutMs) throw new Error("fff scan timeout") + await sleep(FFF_WAIT_INTERVAL) + } + } + + async function open(cwd: string) { + const dir = Filesystem.resolve(cwd) + const pick = picker(cwd) + + if (!state.ready.has(dir)) { + await waitForScan(pick, 5000) + state.ready.add(dir) + } + + return pick + } + + export async function files(input: { cwd: string; query: string; page?: number; size?: number; current?: string }) { + const fff = await open(input.cwd) + const out = fff.fileSearch(input.query, { + pageIndex: input.page ?? 0, + pageSize: input.size ?? 100, + currentFile: input.current, + }) + if (!out.ok) throw new Error(out.error) + return out.value + } + + export async function mixed(input: { cwd: string; query: string; page?: number; size?: number; current?: string }) { + const fff = await open(input.cwd) + const out = fff.mixedSearch(input.query, { + pageIndex: input.page ?? 0, + pageSize: input.size ?? 100, + currentFile: input.current, + }) + if (!out.ok) throw new Error(out.error) + return out.value + } + + export async function grep(input: { + cwd: string + query: string + mode?: GrepMode + max?: number + before?: number + after?: number + budget?: number + cursor?: GrepCursor | null + }) { + const pick = await open(input.cwd) + const out = pick.grep(input.query, { + mode: input.mode, + maxMatchesPerFile: input.max, + beforeContext: input.before, + afterContext: input.after, + timeBudgetMs: input.budget, + cursor: input.cursor, + }) + if (!out.ok) throw new Error(out.error) + return out.value + } + + function norm(text: string) { + return text.replaceAll("\\", "/") + } + + function hidden(rel: string) { + return norm(rel) + .split("/") + .some((part) => part.startsWith(".")) + } + + function accept(rel: string, file: string, glob?: string[], show?: boolean) { + if (show === false && hidden(rel)) return false + if (!glob?.length) return true + const allow = glob.filter((x) => !x.startsWith("!")) + const deny = glob.filter((x) => x.startsWith("!")).map((x) => x.slice(1)) + if (allow.length > 0 && !allow.some((x) => Glob.match(x, rel) || Glob.match(x, file))) return false + if (deny.some((x) => Glob.match(x, rel) || Glob.match(x, file))) return false + return true + } + + export function allowed(input: { rel: string; file?: string; glob?: string[]; hidden?: boolean }) { + return accept(input.rel, input.file ?? input.rel.split("/").at(-1) ?? input.rel, input.glob, input.hidden !== false) + } + + export async function tree(input: { cwd: string; limit?: number; signal?: AbortSignal }) { + input.signal?.throwIfAborted() + const files = ( + await Glob.scan("**/*", { + cwd: input.cwd, + include: "file", + dot: true, + }) + ) + .map((row) => norm(row)) + .filter((row) => allowed({ rel: row, hidden: true })) + .toSorted((a, b) => a.localeCompare(b)) + input.signal?.throwIfAborted() + interface Node { + name: string + children: Map + } + + function dir(node: Node, name: string) { + const old = node.children.get(name) + if (old) return old + const next = { name, children: new Map() } + node.children.set(name, next) + return next + } + + const root = { name: "", children: new Map() } + for (const file of files) { + if (file.includes(".opencode")) continue + const parts = file.split("/") + if (parts.length < 2) continue + let node = root + for (const part of parts.slice(0, -1)) { + node = dir(node, part) + } + } + + function count(node: Node): number { + return Array.from(node.children.values()).reduce((sum, child) => sum + 1 + count(child), 0) + } + + const total = count(root) + const limit = input.limit ?? total + const lines: string[] = [] + const queue = Array.from(root.children.values()) + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map((node) => ({ node, path: node.name })) + + let used = 0 + for (let i = 0; i < queue.length && used < limit; i++) { + input.signal?.throwIfAborted() + const row = queue[i] + lines.push(row.path) + used++ + queue.push( + ...Array.from(row.node.children.values()) + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map((node) => ({ node, path: `${row.path}/${node.name}` })), + ) + } + if (total > used) lines.push(`[${total - used} truncated]`) + input.signal?.throwIfAborted() + return lines.join("\n") + } + + export async function search(input: { + cwd: string + pattern: string + glob?: string[] + limit?: number + follow?: boolean + }) { + // fff has default support for globs that is done at the native level + // it prefilters files before the search so it is impossible to miss the reuslt + const constraints = input.glob?.join(" ") ?? "" + const out = await grep({ + cwd: input.cwd, + query: constraints ? `${constraints} ${input.pattern}` : input.pattern, + mode: "regex", + max: input.limit, + }) + + return out.items.slice(0, input.limit).map((row) => ({ + path: { text: row.relativePath }, + lines: { text: row.lineContent }, + line_number: row.lineNumber, + absolute_offset: row.byteOffset, + submatches: row.matchRanges + .map(([start, end]) => { + const text = row.lineContent.slice(start, end) + if (!text) return undefined + return { + match: { text }, + start, + end, + } + }) + .filter((row) => row !== undefined), + })) + } + + export type Search = SearchResult + export type Mixed = MixedSearchResult + export type MixedEntry = MixedItem + export type File = FileItem + export type Hit = GrepMatch +} diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts index 4a474881cb9f..9e19accefa17 100644 --- a/packages/opencode/src/file/index.ts +++ b/packages/opencode/src/file/index.ts @@ -4,7 +4,6 @@ import { InstanceState } from "@/effect/instance-state" import { AppFileSystem } from "@opencode-ai/core/filesystem" import { Git } from "@/git" import { Effect, Layer, Context, Schema, Scope } from "effect" -import * as Stream from "effect/Stream" import { formatPatch, structuredPatch } from "diff" import fuzzysort from "fuzzysort" import ignore from "ignore" @@ -12,8 +11,9 @@ import path from "path" import { Global } from "@opencode-ai/core/global" import { Instance } from "../project/instance" import * as Log from "@opencode-ai/core/util/log" +import { Glob } from "@opencode-ai/core/util/glob" import { Protected } from "./protected" -import { Ripgrep } from "./ripgrep" +import { Fff } from "./fff" import { zod } from "@/util/effect-zod" import { NonNegativeInt, type DeepMutable, withStatics } from "@/util/schema" @@ -38,6 +38,15 @@ export const Node = Schema.Struct({ .pipe(withStatics((s) => ({ zod: zod(s) }))) export type Node = DeepMutable> +export const SearchItem = Schema.Struct({ + path: Schema.String, + isDirectory: Schema.Boolean, + gitStatus: Schema.optional(Schema.String), +}) + .annotate({ identifier: "FileSearchItem" }) + .pipe(withStatics((s) => ({ zod: zod(s) }))) +export type SearchItem = DeepMutable> + const Hunk = Schema.Struct({ oldStart: NonNegativeInt, oldLines: NonNegativeInt, @@ -328,7 +337,7 @@ export interface Interface { limit?: number dirs?: boolean type?: "file" | "directory" - }) => Effect.Effect + }) => Effect.Effect } export class Service extends Context.Service()("@opencode/File") {} @@ -337,7 +346,6 @@ export const layer = Layer.effect( Service, Effect.gen(function* () { const appFs = yield* AppFileSystem.Service - const rg = yield* Ripgrep.Service const git = yield* Git.Service const scope = yield* Scope.Scope @@ -379,10 +387,13 @@ export const layer = Layer.effect( next.dirs = Array.from(dirs).toSorted() } else { - const files = yield* rg.files({ cwd: ctx.directory }).pipe( - Stream.runCollect, - Effect.map((chunk) => [...chunk]), - ) + const files = (yield* Effect.promise(() => + Glob.scan("**/*", { + cwd: ctx.directory, + include: "file", + dot: true, + }), + )).toSorted((a, b) => a.localeCompare(b)) const seen = new Set() for (const file of files) { next.files.push(file) @@ -619,19 +630,60 @@ export const layer = Layer.effect( dirs?: boolean type?: "file" | "directory" }) { - yield* ensure() - const { cache } = yield* InstanceState.get(state) - const query = input.query.trim() const limit = input.limit ?? 100 const kind = input.type ?? (input.dirs === false ? "file" : "all") log.info("search", { query, kind }) + if (query && kind !== "directory") { + const ctx = yield* InstanceState.context + const fast = yield* Effect.promise(() => + Fff.mixed({ + query, + cwd: ctx.directory, + size: limit, + }) + .then((out) => { + const seen = new Set() + const items: SearchItem[] = [] + for (const entry of out.items) { + const rel = entry.item.relativePath.replaceAll("\\", "/") + if (seen.has(rel)) continue + seen.add(rel) + if (entry.type === "file") { + const gs = entry.item.gitStatus + items.push({ + path: rel, + isDirectory: false, + gitStatus: gs && gs !== "clean" ? gs : undefined, + }) + } else { + items.push({ + path: rel.endsWith("/") ? rel : rel + "/", + isDirectory: true, + }) + } + } + return items.slice(0, limit) + }) + .catch(() => [] as SearchItem[]), + ) + if (fast.length) { + log.info("search", { query, kind, results: fast.length, mode: "fff" }) + return fast + } + } + + yield* ensure() + const { cache } = yield* InstanceState.get(state) + const preferHidden = query.startsWith(".") || query.includes("/.") if (!query) { - if (kind === "file") return cache.files.slice(0, limit) - return sortHiddenLast(cache.dirs.toSorted(), preferHidden).slice(0, limit) + if (kind === "file") return cache.files.slice(0, limit).map((f) => ({ path: f, isDirectory: false })) + return sortHiddenLast(cache.dirs.toSorted(), preferHidden) + .slice(0, limit) + .map((d) => ({ path: d, isDirectory: true })) } const items = kind === "file" ? cache.files : kind === "directory" ? cache.dirs : [...cache.files, ...cache.dirs] @@ -641,7 +693,7 @@ export const layer = Layer.effect( const output = kind === "directory" ? sortHiddenLast(sorted, preferHidden).slice(0, limit) : sorted log.info("search", { query, kind, results: output.length }) - return output + return output.map((p) => ({ path: p, isDirectory: p.endsWith("/") })) }) log.info("init") @@ -649,10 +701,6 @@ export const layer = Layer.effect( }), ) -export const defaultLayer = layer.pipe( - Layer.provide(Ripgrep.defaultLayer), - Layer.provide(AppFileSystem.defaultLayer), - Layer.provide(Git.defaultLayer), -) +export const defaultLayer = layer.pipe(Layer.provide(AppFileSystem.defaultLayer), Layer.provide(Git.defaultLayer)) export * as File from "." diff --git a/packages/opencode/src/server/routes/instance/file.ts b/packages/opencode/src/server/routes/instance/file.ts index d0e9ee618607..8853d820947e 100644 --- a/packages/opencode/src/server/routes/instance/file.ts +++ b/packages/opencode/src/server/routes/instance/file.ts @@ -2,7 +2,7 @@ import { Hono } from "hono" import { describeRoute, validator, resolver } from "hono-openapi" import z from "zod" import { File } from "@/file" -import { Ripgrep } from "@/file/ripgrep" +import { Fff } from "@/file/fff" import { LSP } from "@/lsp/lsp" import { Instance } from "@/project/instance" import { lazy } from "@/util/lazy" @@ -14,14 +14,14 @@ export const FileRoutes = lazy(() => "/find", describeRoute({ summary: "Find text", - description: "Search for text patterns across files in the project using ripgrep.", + description: "Search for text patterns across files in the project.", operationId: "find.text", responses: { 200: { description: "Matches", content: { "application/json": { - schema: resolver(Ripgrep.SearchMatch.zod.array()), + schema: resolver(Fff.Match.array()), }, }, }, @@ -33,13 +33,15 @@ export const FileRoutes = lazy(() => pattern: z.string(), }), ), - async (c) => - jsonRequest("FileRoutes.findText", c, function* () { - const pattern = c.req.valid("query").pattern - const svc = yield* Ripgrep.Service - const result = yield* svc.search({ cwd: Instance.directory, pattern, limit: 10 }) - return result.items - }), + async (c) => { + const pattern = c.req.valid("query").pattern + const result = await Fff.search({ + cwd: Instance.directory, + pattern, + limit: 10, + }) + return c.json(result) + }, ) .get( "/find/file", @@ -49,10 +51,10 @@ export const FileRoutes = lazy(() => operationId: "find.files", responses: { 200: { - description: "File paths", + description: "File search results", content: { "application/json": { - schema: resolver(z.string().array()), + schema: resolver(File.SearchItem.zod.array()), }, }, }, diff --git a/packages/opencode/src/server/routes/instance/httpapi/groups/file.ts b/packages/opencode/src/server/routes/instance/httpapi/groups/file.ts index b950adb383e3..cf2f68bbbca5 100644 --- a/packages/opencode/src/server/routes/instance/httpapi/groups/file.ts +++ b/packages/opencode/src/server/routes/instance/httpapi/groups/file.ts @@ -54,7 +54,7 @@ export const FileApi = HttpApi.make("file") ), HttpApiEndpoint.get("findFile", FilePaths.findFile, { query: FindFileQuery, - success: described(Schema.Array(Schema.String), "File paths"), + success: described(Schema.Array(File.SearchItem), "File search results"), }).annotateMerge( OpenApi.annotations({ identifier: "find.files", diff --git a/packages/opencode/src/tool/glob.ts b/packages/opencode/src/tool/glob.ts index 0c97b9cdf7c5..c8131e8cb145 100644 --- a/packages/opencode/src/tool/glob.ts +++ b/packages/opencode/src/tool/glob.ts @@ -1,13 +1,93 @@ import path from "path" -import { Effect, Option, Schema } from "effect" -import * as Stream from "effect/Stream" +import { Effect, Schema } from "effect" import { InstanceState } from "@/effect/instance-state" -import { AppFileSystem } from "@opencode-ai/core/filesystem" -import { Ripgrep } from "../file/ripgrep" +import { Fff } from "../file/fff" +import { Glob } from "@opencode-ai/core/util/glob" import { assertExternalDirectoryEffect } from "./external-directory" import DESCRIPTION from "./glob.txt" import * as Tool from "./tool" +type Row = { + path: string + rel: string +} + +function include(pattern: string) { + const val = pattern.trim().replaceAll("\\", "/") + if (!val) return "*" + const flat = val.replaceAll("**/", "").replaceAll("/**", "/") + const idx = flat.lastIndexOf("/") + if (idx < 0) return flat + const dir = flat.slice(0, idx + 1) + const glob = flat.slice(idx + 1) + if (!glob) return dir + return `${dir} ${glob}` +} + +function words(text: string) { + return text.trim().split(/\s+/).filter(Boolean) +} + +function norm(text: string) { + return text.replaceAll("\\", "/") +} + +function hidden(rel: string) { + return norm(rel).split("/").includes(".git") +} + +function broad(pattern: string) { + const val = norm(pattern.trim()) + if (!val) return true + if (["*", "**", "**/*", "./**", "./**/*"].includes(val)) return true + return /^(\*\*\/)?\*$/.test(val) +} + +function pick(items: { relativePath: string }[], cwd: string) { + return items + .map((item) => ({ + path: path.resolve(cwd, item.relativePath), + rel: norm(item.relativePath), + })) + .filter((item) => !hidden(item.rel)) +} + +function top(rows: Row[]) { + const out = new Map() + for (const row of rows) { + const parts = row.rel.split("/") + const key = parts.length < 2 ? "." : parts.slice(0, Math.min(2, parts.length - 1)).join("/") + "/" + out.set(key, (out.get(key) ?? 0) + 1) + } + return Array.from(out.entries()) + .sort((a, b) => b[1] - a[1] || a[0].localeCompare(b[0])) + .slice(0, 12) +} + +async function scan(pattern: string, dir: string) { + const direct = await Glob.scan(pattern, { + cwd: dir, + absolute: true, + include: "file", + dot: true, + }) + const out = + direct.length > 0 + ? direct + : await Glob.scan(`**/${pattern}`, { + cwd: dir, + absolute: true, + include: "file", + dot: true, + }) + return out + .map((file) => ({ + path: file, + rel: norm(path.relative(dir, file)), + })) + .filter((item) => !hidden(item.rel)) +} + export const Parameters = Schema.Struct({ pattern: Schema.String.annotate({ description: "The glob pattern to match files against" }), path: Schema.optional(Schema.String).annotate({ @@ -17,81 +97,96 @@ export const Parameters = Schema.Struct({ export const GlobTool = Tool.define( "glob", - Effect.gen(function* () { - const rg = yield* Ripgrep.Service - const fs = yield* AppFileSystem.Service - - return { - description: DESCRIPTION, - parameters: Parameters, - execute: (params: { pattern: string; path?: string }, ctx: Tool.Context) => - Effect.gen(function* () { - const ins = yield* InstanceState.context - yield* ctx.ask({ - permission: "glob", - patterns: [params.pattern], - always: ["*"], - metadata: { - pattern: params.pattern, - path: params.path, - }, - }) - - let search = params.path ?? ins.directory - search = path.isAbsolute(search) ? search : path.resolve(ins.directory, search) - const info = yield* fs.stat(search).pipe(Effect.catch(() => Effect.succeed(undefined))) - if (info?.type === "File") { - throw new Error(`glob path must be a directory: ${search}`) - } - yield* assertExternalDirectoryEffect(ctx, search, { kind: "directory" }) - - const limit = 100 - let truncated = false - const files = yield* rg.files({ cwd: search, glob: [params.pattern], signal: ctx.abort }).pipe( - Stream.mapEffect((file) => - Effect.gen(function* () { - const full = path.resolve(search, file) - const info = yield* fs.stat(full).pipe(Effect.catch(() => Effect.succeed(undefined))) - const mtime = - info?.mtime.pipe( - Option.map((date) => date.getTime()), - Option.getOrElse(() => 0), - ) ?? 0 - return { path: full, mtime } + Effect.succeed({ + description: DESCRIPTION, + parameters: Parameters, + execute: (params: Schema.Schema.Type, ctx: Tool.Context) => + Effect.gen(function* () { + const ins = yield* InstanceState.context + yield* ctx.ask({ + permission: "glob", + patterns: [params.pattern], + always: ["*"], + metadata: { + pattern: params.pattern, + path: params.path, + }, + }) + + let dir = params.path ?? ins.directory + dir = path.isAbsolute(dir) ? dir : path.resolve(ins.directory, dir) + yield* assertExternalDirectoryEffect(ctx, dir, { kind: "directory" }) + + const limit = 100 + const wide = broad(params.pattern) + const size = wide ? 400 : limit + 1 + + const first = yield* Effect.promise(() => + Fff.files({ + cwd: dir, + query: include(params.pattern), + size, + current: path.join(dir, ".opencode"), + }), + ) + + let fallback = false + let rows = pick(first.items, dir) + if (!rows.length) { + const list = words(params.pattern) + if (list.length >= 3) { + const short = list.slice(0, 2).join(" ") + const next = yield* Effect.promise(() => + Fff.files({ + cwd: dir, + query: include(short), + size, + current: path.join(dir, ".opencode"), }), - ), - Stream.take(limit + 1), - Stream.runCollect, - Effect.map((chunk) => [...chunk]), - ) - - if (files.length > limit) { - truncated = true - files.length = limit + ) + rows = pick(next.items, dir) } - files.sort((a, b) => b.mtime - a.mtime) + } + if (!rows.length) { + fallback = true + rows = yield* Effect.promise(() => scan(params.pattern, dir)) + } - const output = [] - if (files.length === 0) output.push("No files found") - if (files.length > 0) { - output.push(...files.map((file) => file.path)) - if (truncated) { + const truncated = rows.length > limit + const files = rows.slice(0, limit).map((row) => row.path) + + const output = [] + if (files.length === 0) output.push("No files found") + if (files.length > 0) { + output.push(...files) + if (wide && truncated) { + const dirs = top(rows) + if (dirs.length > 0) { output.push("") - output.push( - `(Results are truncated: showing first ${limit} results. Consider using a more specific path or pattern.)`, - ) + output.push("Top directories in this result set:") + output.push(...dirs.map(([dir, count]) => `${dir} (${count})`)) } } - - return { - title: path.relative(ins.worktree, search), - metadata: { - count: files.length, - truncated, - }, - output: output.join("\n"), + if (fallback) { + output.push("") + output.push("(Used filesystem glob fallback for this pattern.)") } - }).pipe(Effect.orDie), - } + if (truncated) { + output.push("") + output.push( + `(Results are truncated: showing first ${limit} results. Consider using a more specific path or pattern.)`, + ) + } + } + + return { + title: path.relative(ins.worktree, dir), + metadata: { + count: files.length, + truncated, + }, + output: output.join("\n"), + } + }).pipe(Effect.orDie), }), ) diff --git a/packages/opencode/src/tool/glob.txt b/packages/opencode/src/tool/glob.txt index 627da6cae9d7..a08461c08a8f 100644 --- a/packages/opencode/src/tool/glob.txt +++ b/packages/opencode/src/tool/glob.txt @@ -1,6 +1,6 @@ -- Fast file pattern matching tool that works with any codebase size +- Fast file pattern matching tool that uses fuzzy-first indexing and frecency ranking - Supports glob patterns like "**/*.js" or "src/**/*.ts" -- Returns matching file paths sorted by modification time +- Returns matching file paths prioritized by recent and relevant files - Use this tool when you need to find files by name patterns - When you are doing an open-ended search that may require multiple rounds of globbing and grepping, use the Task tool instead - You have the capability to call multiple tools in a single response. It is always better to speculatively perform multiple searches as a batch that are potentially useful. diff --git a/packages/opencode/src/tool/grep.ts b/packages/opencode/src/tool/grep.ts index fb3e70cad25d..49372b069d60 100644 --- a/packages/opencode/src/tool/grep.ts +++ b/packages/opencode/src/tool/grep.ts @@ -1,14 +1,122 @@ import path from "path" -import { Schema } from "effect" -import { Effect, Option } from "effect" +import { Effect, Schema } from "effect" import { InstanceState } from "@/effect/instance-state" -import { AppFileSystem } from "@opencode-ai/core/filesystem" -import { Ripgrep } from "../file/ripgrep" +import { Fff } from "../file/fff" +import type { GrepMode } from "@ff-labs/fff-bun" import { assertExternalDirectoryEffect } from "./external-directory" import DESCRIPTION from "./grep.txt" import * as Tool from "./tool" -const MAX_LINE_LENGTH = 2000 +const MAX_LINE = 180 +const MAX_MATCH = 100 +const MAX_DEF_FIRST = 8 +const MAX_DEF_NEXT = 5 + +function isRegex(pattern: string) { + return /[.*+?^${}()|[\]\\]/.test(pattern) +} + +function isConstraint(text: string) { + return text.startsWith("!") || text.startsWith("*") || text.endsWith("/") +} + +function clean(text: string) { + return text.replaceAll(":", "").replaceAll("-", "").replaceAll("_", "").toLowerCase().trim() +} + +function include(text?: string) { + if (!text) return undefined + const val = text.trim().replaceAll("\\", "/") + if (!val) return undefined + const flat = val.replaceAll("**/", "").replaceAll("/**", "/") + const idx = flat.lastIndexOf("/") + if (idx < 0) return flat + const dir = flat.slice(0, idx + 1) + const glob = flat.slice(idx + 1) + if (!glob) return dir + return `${dir} ${glob}` +} + +function query(pattern: string, inc?: string) { + if (!inc) return pattern + return `${inc} ${pattern}`.trim() +} + +function norm(text: string) { + return text.replaceAll("\\", "/") +} + +function def(line: string) { + const text = line.trim() + if (!text) return false + return /^(export\s+)?(default\s+)?(async\s+)?(function|class|interface|type|enum|const|let|var)\b/.test(text) +} + +function imp(line: string) { + return /^(import\b|export\s+\{.*\}\s+from\b|use\b|#include\b|require\()/.test(line.trim()) +} + +function line(text: string, ranges: [number, number][]) { + const trim = text.trim() + if (trim.length <= MAX_LINE) return trim + const first = ranges[0] + if (!first) return trim.slice(0, MAX_LINE - 3) + "..." + const start = Math.max(0, first[0] - Math.floor(MAX_LINE / 3)) + const end = Math.min(trim.length, start + MAX_LINE) + const body = trim.slice(start, end) + const pre = start > 0 ? "..." : "" + const post = end < trim.length ? "..." : "" + return pre + body + post +} + +function group(rows: Item[]) { + const out = new Map() + for (const row of rows) { + const list = out.get(row.hit.relativePath) + if (list) { + list.push(row) + continue + } + out.set(row.hit.relativePath, [row]) + } + return out +} + +type Item = { + hit: Fff.Hit + def: boolean + imp: boolean + idx: number +} + +async function run(input: { + cwd: string + pattern: string + inc?: string + mode: GrepMode + max: number + before: number + after: number +}) { + const first = await Fff.grep({ + cwd: input.cwd, + query: query(input.pattern, include(input.inc)), + mode: input.mode, + max: input.max, + before: input.before, + after: input.after, + }) + if (first.items.length || !input.inc) return { out: first, hits: first.items } + const raw = await Fff.grep({ + cwd: input.cwd, + query: input.pattern, + mode: input.mode, + max: input.max, + before: input.before, + after: input.after, + }) + return { out: raw, hits: raw.items } +} export const Parameters = Schema.Struct({ pattern: Schema.String.annotate({ description: "The regex pattern to search for in file contents" }), @@ -22,130 +130,193 @@ export const Parameters = Schema.Struct({ export const GrepTool = Tool.define( "grep", - Effect.gen(function* () { - const fs = yield* AppFileSystem.Service - const rg = yield* Ripgrep.Service - - return { - description: DESCRIPTION, - parameters: Parameters, - execute: (params: { pattern: string; path?: string; include?: string }, ctx: Tool.Context) => - Effect.gen(function* () { - const empty = { - title: params.pattern, - metadata: { matches: 0, truncated: false }, - output: "No files found", - } - if (!params.pattern) { - throw new Error("pattern is required") - } + Effect.succeed({ + description: DESCRIPTION, + parameters: Parameters, + execute: (params: Schema.Schema.Type, ctx: Tool.Context) => + Effect.gen(function* () { + if (!params.pattern) { + throw new Error("pattern is required") + } - yield* ctx.ask({ - permission: "grep", - patterns: [params.pattern], - always: ["*"], - metadata: { - pattern: params.pattern, - path: params.path, - include: params.include, - }, - }) - - const ins = yield* InstanceState.context - const search = AppFileSystem.resolve( - path.isAbsolute(params.path ?? ins.directory) - ? (params.path ?? ins.directory) - : path.join(ins.directory, params.path ?? "."), - ) - const info = yield* fs.stat(search).pipe(Effect.catch(() => Effect.succeed(undefined))) - const cwd = info?.type === "Directory" ? search : path.dirname(search) - const file = info?.type === "Directory" ? undefined : [path.relative(cwd, search)] - yield* assertExternalDirectoryEffect(ctx, search, { - kind: info?.type === "Directory" ? "directory" : "file", - }) - - const result = yield* rg.search({ - cwd, + yield* ctx.ask({ + permission: "grep", + patterns: [params.pattern], + always: ["*"], + metadata: { + pattern: params.pattern, + path: params.path, + include: params.include, + }, + }) + + const ins = yield* InstanceState.context + let dir = params.path ?? ins.directory + dir = path.isAbsolute(dir) ? dir : path.resolve(ins.directory, dir) + yield* assertExternalDirectoryEffect(ctx, dir, { kind: "directory" }) + + const mode: GrepMode = isRegex(params.pattern) ? "regex" : "plain" + const exact = yield* Effect.promise(() => + run({ + cwd: dir, pattern: params.pattern, - glob: params.include ? [params.include] : undefined, - file, - signal: ctx.abort, - }) - if (result.items.length === 0) return empty - - const rows = result.items.map((item) => ({ - path: AppFileSystem.resolve( - path.isAbsolute(item.path.text) ? item.path.text : path.join(cwd, item.path.text), - ), - line: item.line_number, - text: item.lines.text, - })) - const times = new Map( - (yield* Effect.forEach( - [...new Set(rows.map((row) => row.path))], - Effect.fnUntraced(function* (file) { - const info = yield* fs.stat(file).pipe(Effect.catch(() => Effect.succeed(undefined))) - if (!info || info.type === "Directory") return undefined - return [ - file, - info.mtime.pipe( - Option.map((time) => time.getTime()), - Option.getOrElse(() => 0), - ) ?? 0, - ] as const + inc: params.include, + mode, + max: 10, + before: 0, + after: 4, + }), + ) + + let phase = "exact" + let note = "" + let warn = exact.out.regexFallbackError + let hits = exact.hits + + if (!hits.length) { + const words = params.pattern.trim().split(/\s+/).filter(Boolean) + if (words.length >= 2 && !isConstraint(words[0])) { + const next = words.slice(1).join(" ") + const step = yield* Effect.promise(() => + run({ + cwd: dir, + pattern: next, + inc: params.include, + mode: isRegex(next) ? "regex" : "plain", + max: 10, + before: 0, + after: 4, }), - { concurrency: 16 }, - )).filter((entry): entry is readonly [string, number] => Boolean(entry)), - ) - const matches = rows.flatMap((row) => { - const mtime = times.get(row.path) - if (mtime === undefined) return [] - return [{ ...row, mtime }] - }) - - matches.sort((a, b) => b.mtime - a.mtime) - - const limit = 100 - const truncated = matches.length > limit - const final = truncated ? matches.slice(0, limit) : matches - if (final.length === 0) return empty - - const total = matches.length - const output = [`Found ${total} matches${truncated ? ` (showing first ${limit})` : ""}`] - - let current = "" - for (const match of final) { - if (current !== match.path) { - if (current !== "") output.push("") - current = match.path - output.push(`${match.path}:`) + ) + warn = warn ?? step.out.regexFallbackError + if (step.hits.length > 0 && step.hits.length <= 10) { + phase = "broad" + note = `0 exact matches. Broadened query \`${next}\`:` + hits = step.hits } - const text = - match.text.length > MAX_LINE_LENGTH ? match.text.substring(0, MAX_LINE_LENGTH) + "..." : match.text - output.push(` Line ${match.line}: ${text}`) } + } - if (truncated) { - output.push("") - output.push( - `(Results truncated: showing ${limit} of ${total} matches (${total - limit} hidden). Consider using a more specific path or pattern.)`, + if (!hits.length) { + const fuzzy = clean(params.pattern) + if (fuzzy) { + const step = yield* Effect.promise(() => + run({ + cwd: dir, + pattern: fuzzy, + inc: params.include, + mode: "fuzzy", + max: 3, + before: 0, + after: 2, + }), ) + if (step.hits.length) { + phase = "fuzzy" + note = `0 exact matches. ${step.hits.length} approximate:` + hits = step.hits + } } + } - if (result.partial) { - output.push("") - output.push("(Some paths were inaccessible and skipped)") + if (!hits.length && params.pattern.includes("/")) { + const files = yield* Effect.promise(() => + Fff.files({ + cwd: dir, + query: params.pattern, + size: 1, + }), + ) + const row = files.items[0] + const score = files.scores[0] + if (row && score && score.baseScore > params.pattern.length * 10) { + return { + title: params.pattern, + metadata: { matches: 0, truncated: false }, + output: `0 content matches. But there is a relevant file path:\n${row.relativePath}`, + } } + } + if (!hits.length) { return { title: params.pattern, - metadata: { - matches: total, - truncated, - }, - output: output.join("\n"), + metadata: { matches: 0, truncated: false }, + output: "No files found", } - }).pipe(Effect.orDie), - } + } + + const rows: Item[] = hits.map((hit, idx) => ({ + hit, + idx, + def: def(hit.lineContent), + imp: imp(hit.lineContent), + })) + const hasDef = rows.some((row) => row.def) + const show = hasDef ? rows.filter((row) => !row.imp || row.def) : rows + show.sort((a, b) => { + const ak = a.def ? 0 : a.imp ? 2 : 1 + const bk = b.def ? 0 : b.imp ? 2 : 1 + if (ak !== bk) return ak - bk + return a.idx - b.idx + }) + + const total = show.length + const trim = show.slice(0, MAX_MATCH) + const over = total > MAX_MATCH + const files = new Set(trim.map((row) => row.hit.relativePath)).size + const budget = files <= 3 ? 5000 : files <= 8 ? 3500 : 2500 + const read = (trim.find((row) => row.def) ?? trim[0]).hit.relativePath + + const out: string[] = [] + if (phase === "exact") out.push(`Found ${total} matches${over ? ` (showing first ${MAX_MATCH})` : ""}`) + if (phase !== "exact") out.push(note) + out.push(`Read ${read}`) + if (warn) out.push(`! regex failed: ${warn}`) + + const by = group(trim) + let used = out.join("\n").length + let cut = false + let firstDef = true + let shown = 0 + for (const [file, list] of by.entries()) { + const chunk = ["", `${file}:`] + let add = 0 + for (const row of list) { + add++ + chunk.push(` Line ${row.hit.lineNumber}: ${line(row.hit.lineContent, row.hit.matchRanges)}`) + if (!row.def) continue + const max = firstDef ? MAX_DEF_FIRST : MAX_DEF_NEXT + firstDef = false + for (const extra of (row.hit.contextAfter ?? []).slice(0, max)) { + chunk.push(` ${line(extra, [])}`) + } + } + const text = chunk.join("\n") + if (used + text.length > budget && shown > 0) { + cut = true + break + } + out.push(...chunk) + used += text.length + shown += add + } + + if (over || cut) { + out.push("") + out.push( + `(Results truncated: showing first ${shown} results. Consider using a more specific path or pattern.)`, + ) + } + + return { + title: params.pattern, + metadata: { + matches: total, + truncated: over || cut, + }, + output: out.join("\n"), + } + }).pipe(Effect.orDie), }), ) diff --git a/packages/opencode/src/tool/grep.txt b/packages/opencode/src/tool/grep.txt index adf583695aef..ebc38d4941a8 100644 --- a/packages/opencode/src/tool/grep.txt +++ b/packages/opencode/src/tool/grep.txt @@ -1,8 +1,8 @@ -- Fast content search tool that works with any codebase size -- Searches file contents using regular expressions -- Supports full regex syntax (eg. "log.*Error", "function\s+\w+", etc.) +- Fast content search tool that uses fuzzy-first indexing and frecency ranking +- Searches file contents with plain text, regex, and typo-tolerant fuzzy fallback +- Supports regex syntax (eg. "log.*Error", "function\s+\w+", etc.) - Filter files by pattern with the include parameter (eg. "*.js", "*.{ts,tsx}") -- Returns file paths and line numbers with at least one match sorted by modification time +- Returns file paths and line numbers, prioritizing likely definitions and high-signal results +- Includes smart retries (query broadening and path suggestions) when exact matches fail - Use this tool when you need to find files containing specific patterns -- If you need to identify/count the number of matches within files, use the Bash tool with `rg` (ripgrep) directly. Do NOT use `grep`. - When you are doing an open-ended search that may require multiple rounds of globbing and grepping, use the Task tool instead diff --git a/packages/opencode/src/tool/skill.ts b/packages/opencode/src/tool/skill.ts index 8c41077be5ec..1472b57c7c2c 100644 --- a/packages/opencode/src/tool/skill.ts +++ b/packages/opencode/src/tool/skill.ts @@ -1,8 +1,8 @@ import path from "path" import { pathToFileURL } from "url" import { Effect, Schema } from "effect" -import * as Stream from "effect/Stream" -import { Ripgrep } from "../file/ripgrep" +import { Fff } from "../file/fff" +import { Glob } from "@opencode-ai/core/util/glob" import { Skill } from "../skill" import * as Tool from "./tool" import DESCRIPTION from "./skill.txt" @@ -15,7 +15,6 @@ export const SkillTool = Tool.define( "skill", Effect.gen(function* () { const skill = yield* Skill.Service - const rg = yield* Ripgrep.Service return { description: DESCRIPTION, @@ -39,13 +38,22 @@ export const SkillTool = Tool.define( const dir = path.dirname(info.location) const base = pathToFileURL(dir).href const limit = 10 - const files = yield* rg.files({ cwd: dir, follow: false, hidden: true, signal: ctx.abort }).pipe( - Stream.filter((file) => !file.includes("SKILL.md")), - Stream.map((file) => path.resolve(dir, file)), - Stream.take(limit), - Stream.runCollect, - Effect.map((chunk) => [...chunk].map((file) => `${file}`).join("\n")), - ) + const files = yield* Effect.promise(async () => { + ctx.abort.throwIfAborted() + return ( + await Glob.scan("**/*", { + cwd: dir, + include: "file", + dot: true, + }) + ) + .map((file) => file.replaceAll("\\", "/")) + .filter((file) => Fff.allowed({ rel: file, hidden: true, glob: ["!node_modules/*", "!.git/*"] })) + .filter((file) => !file.includes("SKILL.md")) + .slice(0, limit) + .map((file) => `${path.resolve(dir, file)}`) + .join("\n") + }) return { title: `Loaded skill: ${info.name}`, diff --git a/packages/opencode/test/file/fff.test.ts b/packages/opencode/test/file/fff.test.ts new file mode 100644 index 000000000000..dd3284cb3802 --- /dev/null +++ b/packages/opencode/test/file/fff.test.ts @@ -0,0 +1,61 @@ +import { afterEach, describe, expect, test } from "bun:test" +import fs from "fs/promises" +import path from "path" +import { tmpdir } from "../fixture/fixture" +import { Instance } from "../../src/project/instance" +import { Fff } from "../../src/file/fff" + +async function write(file: string, body: string) { + await fs.mkdir(path.dirname(file), { recursive: true }) + await fs.writeFile(file, body) +} + +afterEach(async () => { + await Instance.disposeAll() +}) + +describe("file.fff", () => { + test("allowed respects hidden filter", async () => { + expect(Fff.allowed({ rel: "visible.txt", hidden: true })).toBe(true) + expect(Fff.allowed({ rel: ".opencode/thing.json", hidden: true })).toBe(true) + expect(Fff.allowed({ rel: ".opencode/thing.json", hidden: false })).toBe(false) + }) + + test("search returns empty when nothing matches", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await write(path.join(dir, "match.ts"), "const value = 'other'\n") + }, + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const hits = await Fff.search({ + cwd: tmp.path, + pattern: "needle", + }) + expect(hits).toEqual([]) + }, + }) + }) + + test("tree builds and truncates", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await fs.mkdir(path.join(dir, "a", "b"), { recursive: true }) + await write(path.join(dir, "a", "b", "c.ts"), "export const x = 1\n") + await write(path.join(dir, "a", "d.ts"), "export const y = 1\n") + }, + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const tree = await Fff.tree({ cwd: tmp.path, limit: 1 }) + expect(tree).toContain("a") + expect(tree).toContain("truncated") + }, + }) + }) +}) diff --git a/packages/opencode/test/file/index.test.ts b/packages/opencode/test/file/index.test.ts index 091626be8df8..ac68b78f7a66 100644 --- a/packages/opencode/test/file/index.test.ts +++ b/packages/opencode/test/file/index.test.ts @@ -710,7 +710,7 @@ describe("file/index Filesystem patterns", () => { directory: tmp.path, fn: async () => { const result = await search({ query: "main", type: "file" }) - expect(result.some((f) => f.includes("main"))).toBe(true) + expect(result.some((f) => f.path.includes("main"))).toBe(true) }, }) }) @@ -725,9 +725,12 @@ describe("file/index Filesystem patterns", () => { const result = await search({ query: "", type: "directory" }) expect(result.length).toBeGreaterThan(0) - // Find first hidden dir index - const firstHidden = result.findIndex((d) => d.split("/").some((p) => p.startsWith(".") && p.length > 1)) - const lastVisible = result.findLastIndex((d) => !d.split("/").some((p) => p.startsWith(".") && p.length > 1)) + const firstHidden = result.findIndex((d) => + d.path.split("/").some((p: string) => p.startsWith(".") && p.length > 1), + ) + const lastVisible = result.findLastIndex( + (d) => !d.path.split("/").some((p: string) => p.startsWith(".") && p.length > 1), + ) if (firstHidden >= 0 && lastVisible >= 0) { expect(firstHidden).toBeGreaterThan(lastVisible) } @@ -744,7 +747,7 @@ describe("file/index Filesystem patterns", () => { await init() const result = await search({ query: "main", type: "file" }) - expect(result.some((f) => f.includes("main"))).toBe(true) + expect(result.some((f) => f.path.includes("main"))).toBe(true) }, }) }) @@ -760,7 +763,7 @@ describe("file/index Filesystem patterns", () => { const result = await search({ query: "", type: "file" }) // Files don't end with / for (const f of result) { - expect(f.endsWith("/")).toBe(false) + expect(f.path.endsWith("/")).toBe(false) } }, }) @@ -777,7 +780,7 @@ describe("file/index Filesystem patterns", () => { const result = await search({ query: "", type: "directory" }) // Directories end with / for (const d of result) { - expect(d.endsWith("/")).toBe(true) + expect(d.path.endsWith("/")).toBe(true) } }, }) @@ -807,7 +810,7 @@ describe("file/index Filesystem patterns", () => { const result = await search({ query: ".hidden", type: "directory" }) expect(result.length).toBeGreaterThan(0) - expect(result[0]).toContain(".hidden") + expect(result[0].path).toContain(".hidden") }, }) }) @@ -819,12 +822,13 @@ describe("file/index Filesystem patterns", () => { directory: tmp.path, fn: async () => { await init() - expect(await search({ query: "fresh", type: "file" })).toEqual([]) + const before = await search({ query: "fresh", type: "file" }) + expect(before.map((r) => r.path)).not.toContain("fresh.ts") await fs.writeFile(path.join(tmp.path, "fresh.ts"), "fresh", "utf-8") const result = await search({ query: "fresh", type: "file" }) - expect(result).toContain("fresh.ts") + expect(result.map((r) => r.path)).toContain("fresh.ts") }, }) }) @@ -905,9 +909,9 @@ describe("file/index Filesystem patterns", () => { fn: async () => { await init() const results = await search({ query: "a.ts", type: "file" }) - expect(results).toContain("a.ts") + expect(results.map((r) => r.path)).toContain("a.ts") const results2 = await search({ query: "b.ts", type: "file" }) - expect(results2).not.toContain("b.ts") + expect(results2.map((r) => r.path)).not.toContain("b.ts") }, }) @@ -916,9 +920,9 @@ describe("file/index Filesystem patterns", () => { fn: async () => { await init() const results = await search({ query: "b.ts", type: "file" }) - expect(results).toContain("b.ts") + expect(results.map((r) => r.path)).toContain("b.ts") const results2 = await search({ query: "a.ts", type: "file" }) - expect(results2).not.toContain("a.ts") + expect(results2.map((r) => r.path)).not.toContain("a.ts") }, }) }) @@ -932,7 +936,7 @@ describe("file/index Filesystem patterns", () => { fn: async () => { await init() const results = await search({ query: "before", type: "file" }) - expect(results).toContain("before.ts") + expect(results.map((r) => r.path)).toContain("before.ts") }, }) @@ -946,9 +950,9 @@ describe("file/index Filesystem patterns", () => { fn: async () => { await init() const results = await search({ query: "after", type: "file" }) - expect(results).toContain("after.ts") + expect(results.map((r) => r.path)).toContain("after.ts") const stale = await search({ query: "before", type: "file" }) - expect(stale).not.toContain("before.ts") + expect(stale.map((r) => r.path)).not.toContain("before.ts") }, }) }) diff --git a/packages/opencode/test/server/httpapi-file.test.ts b/packages/opencode/test/server/httpapi-file.test.ts index b7425007e152..65675b3fae36 100644 --- a/packages/opencode/test/server/httpapi-file.test.ts +++ b/packages/opencode/test/server/httpapi-file.test.ts @@ -69,7 +69,7 @@ describe("file HttpApi", () => { expect(await text.json()).toContainEqual(expect.objectContaining({ line_number: 1 })) expect(files.status).toBe(200) - expect(await files.json()).toContain("hello.txt") + expect(await files.json()).toContainEqual(expect.objectContaining({ path: "hello.txt" })) expect(symbols.status).toBe(200) expect(await symbols.json()).toEqual([]) diff --git a/packages/opencode/test/tool/glob.test.ts b/packages/opencode/test/tool/glob.test.ts index 028436d2953d..b7c65554e962 100644 --- a/packages/opencode/test/tool/glob.test.ts +++ b/packages/opencode/test/tool/glob.test.ts @@ -1,10 +1,10 @@ import { describe, expect } from "bun:test" +import fs from "fs/promises" import path from "path" -import { Cause, Effect, Exit, Layer } from "effect" +import { Effect, Layer } from "effect" import { GlobTool } from "../../src/tool/glob" import { SessionID, MessageID } from "../../src/session/schema" import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner" -import { Ripgrep } from "../../src/file/ripgrep" import { AppFileSystem } from "@opencode-ai/core/filesystem" import { Truncate } from "@/tool/truncate" import { Agent } from "../../src/agent/agent" @@ -12,15 +12,14 @@ import { provideTmpdirInstance } from "../fixture/fixture" import { testEffect } from "../lib/effect" const it = testEffect( - Layer.mergeAll( - CrossSpawnSpawner.defaultLayer, - AppFileSystem.defaultLayer, - Ripgrep.defaultLayer, - Truncate.defaultLayer, - Agent.defaultLayer, - ), + Layer.mergeAll(CrossSpawnSpawner.defaultLayer, AppFileSystem.defaultLayer, Truncate.defaultLayer, Agent.defaultLayer), ) +async function write(file: string, body: string) { + await fs.mkdir(path.dirname(file), { recursive: true }) + await fs.writeFile(file, body) +} + const ctx = { sessionID: SessionID.make("ses_test"), messageID: MessageID.make(""), @@ -36,8 +35,8 @@ describe("tool.glob", () => { it.live("matches files from a directory path", () => provideTmpdirInstance((dir) => Effect.gen(function* () { - yield* Effect.promise(() => Bun.write(path.join(dir, "a.ts"), "export const a = 1\n")) - yield* Effect.promise(() => Bun.write(path.join(dir, "b.txt"), "hello\n")) + yield* Effect.promise(() => write(path.join(dir, "a.ts"), "export const a = 1\n")) + yield* Effect.promise(() => write(path.join(dir, "b.txt"), "hello\n")) const info = yield* GlobTool const glob = yield* info.init() const result = yield* glob.execute( @@ -53,29 +52,4 @@ describe("tool.glob", () => { }), ), ) - - it.live("rejects exact file paths", () => - provideTmpdirInstance((dir) => - Effect.gen(function* () { - const file = path.join(dir, "a.ts") - yield* Effect.promise(() => Bun.write(file, "export const a = 1\n")) - const info = yield* GlobTool - const glob = yield* info.init() - const exit = yield* glob - .execute( - { - pattern: "*.ts", - path: file, - }, - ctx, - ) - .pipe(Effect.exit) - expect(Exit.isFailure(exit)).toBe(true) - if (Exit.isFailure(exit)) { - const err = Cause.squash(exit.cause) - expect(err instanceof Error ? err.message : String(err)).toContain("glob path must be a directory") - } - }), - ), - ) }) diff --git a/packages/opencode/test/tool/grep.test.ts b/packages/opencode/test/tool/grep.test.ts index c807d12812a9..82140cc8759a 100644 --- a/packages/opencode/test/tool/grep.test.ts +++ b/packages/opencode/test/tool/grep.test.ts @@ -1,4 +1,5 @@ import { describe, expect } from "bun:test" +import fs from "fs/promises" import path from "path" import { Effect, Layer } from "effect" import { GrepTool } from "../../src/tool/grep" @@ -7,20 +8,18 @@ import { SessionID, MessageID } from "../../src/session/schema" import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner" import { Truncate } from "@/tool/truncate" import { Agent } from "../../src/agent/agent" -import { Ripgrep } from "../../src/file/ripgrep" import { AppFileSystem } from "@opencode-ai/core/filesystem" import { testEffect } from "../lib/effect" const it = testEffect( - Layer.mergeAll( - CrossSpawnSpawner.defaultLayer, - AppFileSystem.defaultLayer, - Ripgrep.defaultLayer, - Truncate.defaultLayer, - Agent.defaultLayer, - ), + Layer.mergeAll(CrossSpawnSpawner.defaultLayer, AppFileSystem.defaultLayer, Truncate.defaultLayer, Agent.defaultLayer), ) +async function write(file: string, body: string) { + await fs.mkdir(path.dirname(file), { recursive: true }) + await fs.writeFile(file, body) +} + const ctx = { sessionID: SessionID.make("ses_test"), messageID: MessageID.make(""), @@ -57,7 +56,7 @@ describe("tool.grep", () => { it.live("no matches returns correct output", () => provideTmpdirInstance((dir) => Effect.gen(function* () { - yield* Effect.promise(() => Bun.write(path.join(dir, "test.txt"), "hello world")) + yield* Effect.promise(() => write(path.join(dir, "test.txt"), "hello world")) const info = yield* GrepTool const grep = yield* info.init() const result = yield* grep.execute( @@ -76,7 +75,7 @@ describe("tool.grep", () => { it.live("finds matches in tmp instance", () => provideTmpdirInstance((dir) => Effect.gen(function* () { - yield* Effect.promise(() => Bun.write(path.join(dir, "test.txt"), "line1\nline2\nline3")) + yield* Effect.promise(() => write(path.join(dir, "test.txt"), "line1\nline2\nline3")) const info = yield* GrepTool const grep = yield* info.init() const result = yield* grep.execute( @@ -91,23 +90,40 @@ describe("tool.grep", () => { ), ) - it.live("supports exact file paths", () => + it.live("broadens multi-word query when exact has no match", () => provideTmpdirInstance((dir) => Effect.gen(function* () { - const file = path.join(dir, "test.txt") - yield* Effect.promise(() => Bun.write(file, "line1\nline2\nline3")) + yield* Effect.promise(() => write(path.join(dir, "test.txt"), "upload completed\n")) const info = yield* GrepTool const grep = yield* info.init() const result = yield* grep.execute( { - pattern: "line2", - path: file, + pattern: "prepare upload", + path: dir, }, ctx, ) - expect(result.metadata.matches).toBe(1) - expect(result.output).toContain(file) - expect(result.output).toContain("Line 2: line2") + expect(result.metadata.matches).toBeGreaterThan(0) + expect(result.output).toContain("Broadened query") + }), + ), + ) + + it.live("suggests path when content has no match", () => + provideTmpdirInstance((dir) => + Effect.gen(function* () { + yield* Effect.promise(() => write(path.join(dir, "src", "server", "auth.ts"), "export const token = 1\n")) + const info = yield* GrepTool + const grep = yield* info.init() + const result = yield* grep.execute( + { + pattern: "src/server/auth.ts", + path: dir, + }, + ctx, + ) + expect(result.metadata.matches).toBe(0) + expect(result.output).toContain("relevant file path") }), ), ) diff --git a/packages/sdk/js/src/gen/types.gen.ts b/packages/sdk/js/src/gen/types.gen.ts index 8eefe5bfe985..fa9a806b3bd1 100644 --- a/packages/sdk/js/src/gen/types.gen.ts +++ b/packages/sdk/js/src/gen/types.gen.ts @@ -1,5 +1,11 @@ // This file is auto-generated by @hey-api/openapi-ts +export type FileSearchItem = { + path: string + isDirectory: boolean + gitStatus?: string +} + export type EventServerInstanceDisposed = { type: "server.instance.disposed" properties: { @@ -3180,9 +3186,9 @@ export type FindFilesData = { export type FindFilesResponses = { /** - * File paths + * File search results */ - 200: Array + 200: Array } export type FindFilesResponse = FindFilesResponses[keyof FindFilesResponses] diff --git a/packages/sdk/js/src/v2/gen/sdk.gen.ts b/packages/sdk/js/src/v2/gen/sdk.gen.ts index 2da7c865d770..3c376f7105b7 100644 --- a/packages/sdk/js/src/v2/gen/sdk.gen.ts +++ b/packages/sdk/js/src/v2/gen/sdk.gen.ts @@ -3183,7 +3183,7 @@ export class Find extends HeyApiClient { /** * Find text * - * Search for text patterns across files in the project using ripgrep. + * Search for text patterns across files in the project. */ public text( parameters: { diff --git a/packages/sdk/js/src/v2/gen/types.gen.ts b/packages/sdk/js/src/v2/gen/types.gen.ts index d98d5c6fe18e..4019a170a527 100644 --- a/packages/sdk/js/src/v2/gen/types.gen.ts +++ b/packages/sdk/js/src/v2/gen/types.gen.ts @@ -2007,6 +2007,12 @@ export type ProviderAuthAuthorization = { instructions: string } +export type FileSearchItem = { + path: string + isDirectory: boolean + gitStatus?: string +} + export type Symbol = { name: string kind: number @@ -4716,9 +4722,9 @@ export type FindFilesData = { export type FindFilesResponses = { /** - * File paths + * File search results */ - 200: Array + 200: Array } export type FindFilesResponse = FindFilesResponses[keyof FindFilesResponses] diff --git a/packages/sdk/openapi.json b/packages/sdk/openapi.json index 65c1d810c580..d8691e2d3f8c 100644 --- a/packages/sdk/openapi.json +++ b/packages/sdk/openapi.json @@ -5569,7 +5569,7 @@ } ], "summary": "Find text", - "description": "Search for text patterns across files in the project using ripgrep.", + "description": "Search for text patterns across files in the project.", "responses": { "200": { "description": "Matches", @@ -5708,13 +5708,13 @@ "description": "Search for files or directories by name or pattern in the project directory.", "responses": { "200": { - "description": "File paths", + "description": "File search results", "content": { "application/json": { "schema": { "type": "array", "items": { - "type": "string" + "$ref": "#/components/schemas/FileSearchItem" } } } @@ -13104,6 +13104,21 @@ }, "required": ["url", "method", "instructions"] }, + "FileSearchItem": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "isDirectory": { + "type": "boolean" + }, + "gitStatus": { + "type": "string" + } + }, + "required": ["path", "isDirectory"] + }, "Symbol": { "type": "object", "properties": { diff --git a/packages/ui/src/hooks/use-filtered-list.tsx b/packages/ui/src/hooks/use-filtered-list.tsx index 2d4e2bdd1aae..62e1840cb4cc 100644 --- a/packages/ui/src/hooks/use-filtered-list.tsx +++ b/packages/ui/src/hooks/use-filtered-list.tsx @@ -14,6 +14,8 @@ export interface FilteredListProps { sortGroupsBy?: (a: { category: string; items: T[] }, b: { category: string; items: T[] }) => number onSelect?: (value: T | undefined, index: number) => void noInitialSelection?: boolean + stale?: boolean + fuzzy?: boolean | ((filter: string) => boolean) } export function useFilteredList(props: FilteredListProps) { @@ -30,11 +32,12 @@ export function useFilteredList(props: FilteredListProps) { async ({ filter, items }) => { const query = filter ?? "" const needle = query.toLowerCase() + const fuzzy = typeof props.fuzzy === "function" ? props.fuzzy(query) : (props.fuzzy ?? true) const all = (await Promise.resolve(items)) || [] const result = pipe( all, (x) => { - if (!needle) return x + if (!needle || !fuzzy) return x if (!props.filterKeys && Array.isArray(x) && x.every((e) => typeof e === "string")) { return fuzzysort.go(needle, x).map((x) => x.target) as T[] } @@ -51,8 +54,9 @@ export function useFilteredList(props: FilteredListProps) { ) const flat = createMemo(() => { + const groups = props.stale === false && grouped.loading ? empty : grouped.latest || [] return pipe( - grouped.latest || [], + groups, flatMap((x) => x.items), ) })