Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions ChatWithTree/.markdownlint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"default": true,
"MD007": { "indent": 4 },
"MD013": false,
"MD033": { "allowed_elements": [ "a", "p", "image" ] },
"MD051": false,
"no-hard-tabs": false,
"whitespace": false
}
29 changes: 29 additions & 0 deletions ChatWithTree/.pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
files: ^ChatWithTree/
exclude: ^ChatWithTree/ChatWithTree.gpr.py
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
- id: end-of-file-fixer
- id: check-yaml
args: [--unsafe]
- id: check-added-large-files
- id: check-json
- id: pretty-format-json
args: [--autofix]
- id: check-merge-conflict
- id: check-symlinks
- id: detect-private-key
- id: mixed-line-ending
args: [--fix=lf]
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
hooks:
- id: flake8
args : [--max-line-length=89]
101 changes: 101 additions & 0 deletions ChatWithTree/AsyncChatService.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import logging
import queue
from concurrent.futures import ThreadPoolExecutor
from typing import Iterator, Optional, Tuple

from chatwithllm import YieldType
from ChatWithTreeBot import ChatBot

logger = logging.getLogger("AsyncChatService")

# Alias for the yielded items from the ChatBot generator
ReplyItem = Tuple[YieldType, str]


class AsyncChatService:
"""
Manages a single-worker ThreadPoolExecutor for thread-local database access.
"""

def __init__(self, database_name: str) -> None:
self.chat_logic = ChatBot(database_name)

# Create a dedicated executor pool with ONLY ONE worker thread
self.executor: ThreadPoolExecutor = ThreadPoolExecutor(
max_workers=1,
thread_name_prefix="DBWorker"
)

# Thread-safe Queue for results
self.result_queue: queue.Queue[Optional[ReplyItem]] = queue.Queue()

# Status flag to check if the worker is busy
self._is_processing = False

# Submit the open_database call as the first task to the single thread.
self._initialize_database()

def _initialize_database(self) -> None:
"""Runs the blocking open_database() call on the worker thread."""

def init_task() -> None:
logger.debug("Running open_database on the dedicated worker thread.")
self.chat_logic.open_database_for_chat()

# Blocking wait for the database to open on the worker thread.
future = self.executor.submit(init_task)
future.result()

def is_processing(self) -> bool:
"""Called by the GTK thread to check if the job is running."""
return self._is_processing

def get_next_result_from_queue(self) -> Optional[ReplyItem]:
"""Called by the GTK thread to pull a result without blocking."""
try:
return self.result_queue.get_nowait()
except queue.Empty:
return None

def start_query(self, query: str) -> None:
"""
Called by the GTK thread to submit the job to the worker.
"""
if self._is_processing:
logging.warning("Query already running. Ignoring new query.")
return

self._is_processing = True

# Submit the synchronous work function to the dedicated executor.
# This will block the single worker thread until the job is done.
self.executor.submit(self._run_and_pipe_results, query)

def _run_and_pipe_results(self, query: str) -> None:
"""
Worker function: Runs synchronously on the dedicated executor thread.
Pipes the synchronous generator output to the queue.
"""
try:
# Get the synchronous generator from the ChatBot
reply_iterator: Iterator[ReplyItem] = self.chat_logic.get_reply(query)

for reply in reply_iterator:
self.result_queue.put(reply)

except Exception as e:
self.result_queue.put(YieldType.FINAL,
f"ERROR: {type(e).__name__} during processing.")

finally:
# Always put the sentinel and set status to finished
self.result_queue.put(None) # Sentinel: None signals job completion
self._is_processing = False

def stop_worker(self) -> None:
"""Shuts down the executor pool."""
# Optional: Submit close_database to ensure it runs on the worker thread,
# but needs careful handling as shutdown might be concurrent.

# We rely on the executor's shutdown mechanism for cleanup.
self.executor.shutdown(wait=True)
25 changes: 25 additions & 0 deletions ChatWithTree/ChatWithTree.gpr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# ------------------------------------------------------------------------
#
# Register the Gramplet ChatWithTree
#
# ------------------------------------------------------------------------
register(
GRAMPLET,
id="ChatWithTree", # Unique ID for your addon
name=_("Chat With Tree Interactive Addon"), # Display name in Gramps, translatable
description=_("Chat With Tree with the help of AI Large Language Model, needs litellm module"),
version = '0.0.24',
gramps_target_version="6.0", # Specify the Gramps version you are targeting
status=EXPERIMENTAL,
audience = DEVELOPER,
fname="ChatWithTree.py", # The main Python file for your Gramplet
# The 'gramplet' argument points to the class name in your main file
gramplet="ChatWithTreeClass",
gramplet_title=_("Chat With Tree"),
authors = ["Melle Koning"],
authors_email = ["mellekoning@gmail.com"],
height=18,
# addon needs litellm python module
requires_mod=['litellm'],
navtypes=["Dashboard"],
)
Loading