Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion src/basic_memory/cli/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,17 @@ def app_callback(
# Skip for 'mcp' command - it has its own lifespan that handles initialization
# Skip for API-using commands (status, sync, etc.) - they handle initialization via deps.py
# Skip for 'reset' command - it manages its own database lifecycle
skip_init_commands = {"doctor", "mcp", "status", "sync", "project", "tool", "reset", "reindex"}
skip_init_commands = {
"doctor",
"mcp",
"status",
"sync",
"project",
"tool",
"reset",
"reindex",
"watch",
}
if (
not version
and ctx.invoked_subcommand is not None
Expand Down
3 changes: 2 additions & 1 deletion src/basic_memory/cli/commands/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""CLI commands for basic-memory."""

from . import status, db, doctor, import_memory_json, mcp, import_claude_conversations
from . import import_claude_projects, import_chatgpt, tool, project, format, schema
from . import import_claude_projects, import_chatgpt, tool, project, format, schema, watch

__all__ = [
"status",
Expand All @@ -16,4 +16,5 @@
"project",
"format",
"schema",
"watch",
]
16 changes: 4 additions & 12 deletions src/basic_memory/cli/commands/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,7 @@ def reindex(
embeddings: bool = typer.Option(
False, "--embeddings", "-e", help="Rebuild vector embeddings (requires semantic search)"
),
search: bool = typer.Option(
False, "--search", "-s", help="Rebuild full-text search index"
),
search: bool = typer.Option(False, "--search", "-s", help="Rebuild full-text search index"),
project: str = typer.Option(
None, "--project", "-p", help="Reindex a specific project (default: all)"
),
Expand Down Expand Up @@ -193,12 +191,8 @@ async def _reindex(app_config, search: bool, embeddings: bool, project: str | No
project_path = Path(proj.path)
entity_parser = EntityParser(project_path)
markdown_processor = MarkdownProcessor(entity_parser, app_config=app_config)
file_service = FileService(
project_path, markdown_processor, app_config=app_config
)
search_service = SearchService(
search_repository, entity_repository, file_service
)
file_service = FileService(project_path, markdown_processor, app_config=app_config)
search_service = SearchService(search_repository, entity_repository, file_service)

with Progress(
SpinnerColumn(),
Expand All @@ -212,9 +206,7 @@ async def _reindex(app_config, search: bool, embeddings: bool, project: str | No
def on_progress(entity_id, index, total):
progress.update(task, total=total, completed=index)

stats = await search_service.reindex_vectors(
progress_callback=on_progress
)
stats = await search_service.reindex_vectors(progress_callback=on_progress)
progress.update(task, completed=stats["total_entities"])

console.print(
Expand Down
97 changes: 97 additions & 0 deletions src/basic_memory/cli/commands/watch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
"""Watch command - run file watcher as a standalone long-running process."""

import asyncio
import os
import signal
import sys
from typing import Optional

import typer
from loguru import logger

from basic_memory import db
from basic_memory.cli.app import app
from basic_memory.cli.container import get_container
from basic_memory.config import ConfigManager
from basic_memory.services.initialization import initialize_app
from basic_memory.sync.coordinator import SyncCoordinator


async def run_watch(project: Optional[str] = None) -> None:
"""Run the file watcher as a long-running process.

This is the async core of the watch command. It:
1. Initializes the app (DB migrations + project reconciliation)
2. Validates and sets project constraint if --project given
3. Creates a SyncCoordinator with quiet=False for Rich console output
4. Blocks until SIGINT/SIGTERM, then shuts down cleanly
"""
container = get_container()
config = container.config

# --- Initialization ---
# Wrapped in try/finally so DB resources are cleaned up on all exit paths,
# including early exits from invalid --project names.
await initialize_app(config)
sync_coordinator = None

try:
# --- Project constraint ---
if project:
config_manager = ConfigManager()
project_name, _ = config_manager.get_project(project)
if not project_name:
typer.echo(f"No project found named: {project}", err=True)
raise typer.Exit(1)

os.environ["BASIC_MEMORY_MCP_PROJECT"] = project_name
logger.info(f"Watch constrained to project: {project_name}")

# --- Sync coordinator ---
# quiet=False so file change events are printed to the terminal
sync_coordinator = SyncCoordinator(config=config, should_sync=True, quiet=False)

# --- Signal handling ---
shutdown_event = asyncio.Event()

def _signal_handler() -> None:
logger.info("Shutdown signal received")
shutdown_event.set()

loop = asyncio.get_running_loop()

# Windows ProactorEventLoop does not support add_signal_handler;
# fall back to the stdlib signal module which works cross-platform.
try:
for sig in (signal.SIGINT, signal.SIGTERM):
loop.add_signal_handler(sig, _signal_handler)
except NotImplementedError:
for sig in (signal.SIGINT, signal.SIGTERM):
signal.signal(sig, lambda _signum, _frame: _signal_handler())

# --- Run ---
await sync_coordinator.start()
logger.info("Watch service running, press Ctrl+C to stop")
await shutdown_event.wait()
finally:
if sync_coordinator is not None:
await sync_coordinator.stop()
await db.shutdown_db()
logger.info("Watch service stopped")


@app.command()
def watch(
project: Optional[str] = typer.Option(None, help="Restrict watcher to a single project"),
) -> None:
"""Run file watcher as a long-running process (no MCP server).

Watches for file changes in project directories and syncs them to the
database. Useful for running Basic Memory sync alongside external tools
that don't use the MCP server.
"""
# On Windows, use SelectorEventLoop to avoid ProactorEventLoop cleanup issues
if sys.platform == "win32": # pragma: no cover
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

asyncio.run(run_watch(project=project))
3 changes: 1 addition & 2 deletions src/basic_memory/repository/openai_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,7 @@ async def _get_client(self) -> Any:
from openai import AsyncOpenAI
except ImportError as exc: # pragma: no cover - covered via monkeypatch tests
raise SemanticDependenciesMissingError(
"OpenAI dependency is missing. "
"Reinstall basic-memory: pip install basic-memory"
"OpenAI dependency is missing. Reinstall basic-memory: pip install basic-memory"
) from exc

api_key = self._api_key or os.getenv("OPENAI_API_KEY")
Expand Down
4 changes: 1 addition & 3 deletions src/basic_memory/repository/postgres_search_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,9 +311,7 @@ async def _ensure_vector_tables(self) -> None:
f"provider expects {self._vector_dimensions}. "
"Dropping and recreating search_vector_embeddings."
)
await session.execute(
text("DROP TABLE IF EXISTS search_vector_embeddings")
)
await session.execute(text("DROP TABLE IF EXISTS search_vector_embeddings"))

await session.execute(
text(
Expand Down
16 changes: 4 additions & 12 deletions src/basic_memory/repository/search_repository_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -904,13 +904,9 @@ async def _search_vector_only(
)
# Use (id, type) tuples to avoid collisions between different
# search_index row types that share the same auto-increment id.
allowed_keys = {
(row.id, row.type) for row in filtered_rows if row.id is not None
}
allowed_keys = {(row.id, row.type) for row in filtered_rows if row.id is not None}
search_index_rows = {
k: v
for k, v in search_index_rows.items()
if (v.id, v.type) in allowed_keys
k: v for k, v in search_index_rows.items() if (v.id, v.type) in allowed_keys
}

ranked_rows: list[SearchIndexRow] = []
Expand Down Expand Up @@ -1077,17 +1073,13 @@ async def _search_hybrid(
for rank, row in enumerate(fts_results, start=1):
if row.id is None:
continue
fused_scores[row.id] = fused_scores.get(row.id, 0.0) + (
1.0 / (RRF_K + rank)
)
fused_scores[row.id] = fused_scores.get(row.id, 0.0) + (1.0 / (RRF_K + rank))
rows_by_id[row.id] = row

for rank, row in enumerate(vector_results, start=1):
if row.id is None:
continue
fused_scores[row.id] = fused_scores.get(row.id, 0.0) + (
1.0 / (RRF_K + rank)
)
fused_scores[row.id] = fused_scores.get(row.id, 0.0) + (1.0 / (RRF_K + rank))
rows_by_id[row.id] = row

ranked = sorted(fused_scores.items(), key=lambda item: item[1], reverse=True)
Expand Down
3 changes: 1 addition & 2 deletions src/basic_memory/repository/sqlite_search_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,8 +339,7 @@ async def _ensure_sqlite_vec_loaded(self, session) -> None:
import sqlite_vec
except ImportError as exc:
raise SemanticDependenciesMissingError(
"sqlite-vec package is missing. "
"Reinstall basic-memory: pip install basic-memory"
"sqlite-vec package is missing. Reinstall basic-memory: pip install basic-memory"
) from exc

async with self._sqlite_vec_lock:
Expand Down
4 changes: 3 additions & 1 deletion src/basic_memory/services/initialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,13 @@ async def reconcile_projects_with_config(app_config: BasicMemoryConfig):

async def initialize_file_sync(
app_config: BasicMemoryConfig,
quiet: bool = True,
) -> None:
"""Initialize file synchronization services. This function starts the watch service and does not return

Args:
app_config: The Basic Memory project configuration
quiet: Whether to suppress Rich console output (True for MCP, False for CLI watch)

Returns:
The watch service task that's monitoring file changes
Expand All @@ -101,7 +103,7 @@ async def initialize_file_sync(
watch_service = WatchService(
app_config=app_config,
project_repository=project_repository,
quiet=True,
quiet=quiet,
)

# Get active projects
Expand Down
3 changes: 2 additions & 1 deletion src/basic_memory/sync/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ class SyncCoordinator:
config: BasicMemoryConfig
should_sync: bool = True
skip_reason: Optional[str] = None
quiet: bool = True

# Internal state (not constructor args)
_status: SyncStatus = field(default=SyncStatus.NOT_STARTED, init=False)
Expand Down Expand Up @@ -96,7 +97,7 @@ async def start(self) -> None:
async def _file_sync_runner() -> None: # pragma: no cover
"""Run the file sync service."""
try:
await initialize_file_sync(self.config)
await initialize_file_sync(self.config, quiet=self.quiet)
except asyncio.CancelledError:
logger.debug("File sync cancelled")
raise
Expand Down
Loading
Loading