Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions GUI/Dockerfile.dev
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
FROM node:22.0.0-alpine AS image
WORKDIR /app
COPY ./package.json .
COPY ./package.json ./package-lock.json ./

FROM image AS build
RUN npm install --legacy-peer-deps --mode=development
RUN npm ci --legacy-peer-deps
COPY . .
RUN ./node_modules/.bin/vite build --mode=development

Expand Down
3 changes: 2 additions & 1 deletion GUI/src/pages/PromptConfigurations/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,10 @@ const PromptConfigurations: FC = () => {
<FormTextarea
label={t('promptConfigurations.promptLabel')}
name="promptText"
maxLength={10000}
value={promptText}
onChange={(e) => setPromptText(e.target.value)}
minRows={10}
maxRows={15}
/>

<div className="form-actions">
Expand Down
1 change: 1 addition & 0 deletions GUI/src/pages/TestModel/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ const TestLLM: FC = () => {
label=""
name=""
maxLength={1000}
maxRows={15}
onChange={(e) => handleChange('text', e.target.value)}
showMaxLength={true}
/>
Expand Down
59 changes: 46 additions & 13 deletions src/vector_indexer/document_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import json
from pathlib import Path
from typing import List
from urllib.parse import urlparse

from loguru import logger

from vector_indexer.config.config_loader import VectorIndexerConfig
Expand All @@ -20,10 +22,19 @@ class DocumentLoadError(Exception):
class DocumentLoader:
"""Handles document discovery and loading from datasets folder."""

def __init__(self, config: VectorIndexerConfig):
def __init__(self, config: VectorIndexerConfig) -> None:
self.config = config
self.datasets_path = Path(config.dataset_base_path)

@staticmethod
def _is_valid_url(url: str) -> bool:
"""Validate that a URL has a proper scheme and network location."""
try:
parsed = urlparse(url)
return parsed.scheme in ("http", "https") and bool(parsed.netloc)
except Exception:
return False

def discover_all_documents(self) -> List[DocumentInfo]:
"""
Optimized document discovery using pathlib.glob for better performance.
Expand Down Expand Up @@ -88,22 +99,44 @@ def discover_all_documents(self) -> List[DocumentInfo]:

# Check metadata file exists
metadata_file = hash_dir / self.config.metadata_file
if metadata_file.exists():
documents.append(
DocumentInfo(
document_hash=content_hash, # Use content hash consistently
cleaned_txt_path=str(cleaned_file),
source_meta_path=str(metadata_file),
dataset_collection=collection_name,
)
if not metadata_file.exists():
logger.warning(
f"Skipping document in {hash_dir.name}: "
f"missing {self.config.metadata_file}"
)
logger.debug(
f"Found document: {content_hash[:12]}... in collection: {collection_name}"
continue

# Validate source_url before accepting the document
try:
with open(metadata_file, "r", encoding="utf-8") as mf:
meta = json.load(mf)
source_url = meta.get("source_url") or ""
except Exception as e:
logger.warning(
f"Skipping document in {hash_dir.name}: "
f"failed to read metadata: {e}"
)
else:
continue

if not self._is_valid_url(source_url):
logger.warning(
f"Skipping document in {hash_dir.name}: missing {self.config.metadata_file}"
f"Skipping document in {hash_dir.name}: "
f"invalid source_url '{source_url}'"
)
continue

documents.append(
DocumentInfo(
document_hash=content_hash, # Use content hash consistently
cleaned_txt_path=str(cleaned_file),
source_meta_path=str(metadata_file),
dataset_collection=collection_name,
)
)
logger.debug(
f"Found document: {content_hash[:12]}... "
f"in collection: {collection_name}"
)

logger.info(f"Discovered {len(documents)} documents for processing")
return documents
Expand Down
Loading