Skip to content
This repository was archived by the owner on Jan 13, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
316 changes: 160 additions & 156 deletions docs/src/app/aiSearch/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,44 +21,44 @@ import { generateEmbedding } from '@/lib/aisearch/embeddings'
import { getDbWorker } from '@/lib/aisearch/dbWorker'

function cosineSimilarity(a: number[], b: number[]): number {
if (a.some(isNaN) || b.some(isNaN)) {
console.error("NaN values detected in vectors:");
console.error("Vector A NaN indices:", a.map((val, i) => isNaN(val) ? i : null).filter(x => x !== null));
console.error("Vector B NaN indices:", b.map((val, i) => isNaN(val) ? i : null).filter(x => x !== null));
throw new Error("Invalid vectors containing NaN values");
}
const dotProduct = a.reduce((sum, val, i) => sum + val * b[i], 0);
const magnitudeA = Math.sqrt(a.reduce((sum, val) => sum + val * val, 0));
const magnitudeB = Math.sqrt(b.reduce((sum, val) => sum + val * val, 0));
return dotProduct / (magnitudeA * magnitudeB);
if (a.some(isNaN) || b.some(isNaN)) {
console.error("NaN values detected in vectors:");
console.error("Vector A NaN indices:", a.map((val, i) => isNaN(val) ? i : null).filter(x => x !== null));
console.error("Vector B NaN indices:", b.map((val, i) => isNaN(val) ? i : null).filter(x => x !== null));
throw new Error("Invalid vectors containing NaN values");
}

const dotProduct = a.reduce((sum, val, i) => sum + val * b[i], 0);
const magnitudeA = Math.sqrt(a.reduce((sum, val) => sum + val * val, 0));
const magnitudeB = Math.sqrt(b.reduce((sum, val) => sum + val * val, 0));
return dotProduct / (magnitudeA * magnitudeB);
}

export default function Home() {
const [query, setQuery] = useState('')
const [aiResponse, setAiResponse] = useState('')
const [loaderText, setLoaderText] = useState('Loading database ...')
const [loaderText, setLoaderText] = useState('Loading database ...')
const [isLoading, setIsLoading] = useState(false)
const [error, setError] = useState<string | null>(null)
const recommendedArticles = getRecommendedArticles()
const searchConfig = getSearchConfig()
const headerConfig = getHeaderConfig()
const config = getAkiradocsConfig()
const [sources, setSources] = useState<Source[]>([])
const handleGenerateEmbedding = useCallback(async (text: string) => {
try {
setIsLoading(true);
// console.log("Loading model for embedding");
const embedding = await generateEmbedding(text, (progress) => {});
return embedding;
} catch (error) {
console.error('Error generating embedding:', error);
throw error;
}
finally {
setLoaderText('Searching database for relevant information ...')
}
}, []);
const handleGenerateEmbedding = useCallback(async (text: string) => {
try {
setIsLoading(true);
// console.log("Loading model for embedding");
const embedding = await generateEmbedding(text, (progress) => { });
return embedding;
} catch (error) {
console.error('Error generating embedding:', error);
throw error;
}
finally {
setLoaderText('Searching database for relevant information ...')
}
}, []);

// If AI Search is disabled, show the disabled message
if (!config.navigation.header.items.find((item: any) => item.href === '/aiSearch')?.show) {
Expand Down Expand Up @@ -104,96 +104,98 @@ export default function Home() {
setIsLoading(true)
setError(null)
setSources([])
const startTime = performance.now()
try {
// Generate embedding for the query
const queryEmbedding = await handleGenerateEmbedding(query);
// console.log("Query embedding:", queryEmbedding)
// Get database worker
const worker = await getDbWorker();

// Get all documents
const allDocs = await worker.db.query(`

const startTime = performance.now()

try {
// Generate embedding for the query
const queryEmbedding = await handleGenerateEmbedding(query);
// console.log("Query embedding:", queryEmbedding)
// Get database worker
const worker = await getDbWorker();

// Get all documents
const allDocs = await worker.db.query(`
SELECT path, content, embedding
FROM documents
WHERE embedding IS NOT NULL
`);

// Calculate similarity scores and filter results
const similarityThreshold = 0.5;
const scoredDocs = allDocs
.map((doc: any) => {
// Clean the embedding string and parse it
const cleanEmbeddingStr = doc.embedding.replace(/[\[\]]/g, ''); // Remove square brackets
const embeddingArray = cleanEmbeddingStr
.split(',')
.map((val: string) => {
const parsed = parseFloat(val.trim());
if (isNaN(parsed)) {
console.error(`Invalid embedding value found: "${val}"`);
}
return parsed;
});
return {
...doc,
similarity_score: cosineSimilarity(queryEmbedding, embeddingArray)
};
})
.filter((doc: any) => doc.similarity_score > similarityThreshold)
.sort((a: any, b: any) => b.similarity_score - a.similarity_score)
.slice(0, 5);

console.log("RAG top 5 results:", scoredDocs);

// If no relevant documents found, return early
if (scoredDocs.length === 0) {
setAiResponse("I cannot answer this question from the given documentation. The available content doesn't seem relevant enough to provide a accurate answer.");
setIsLoading(false);
return;
}

setLoaderText('Loading the AI model ...')

// Combine relevant documents into context
const docsContext = scoredDocs
.map((doc: any) => `
// Calculate similarity scores and filter results
const similarityThreshold = 0.5;
const scoredDocs = allDocs
.map((doc: any) => {
// Clean the embedding string and parse it
const cleanEmbeddingStr = doc.embedding.replace(/[\[\]]/g, ''); // Remove square brackets
const embeddingArray = cleanEmbeddingStr
.split(',')
.map((val: string) => {
const parsed = parseFloat(val.trim());
if (isNaN(parsed)) {
console.error(`Invalid embedding value found: "${val}"`);
}
return parsed;
});

return {
...doc,
similarity_score: cosineSimilarity(queryEmbedding, embeddingArray)
};
})
.filter((doc: any) => doc.similarity_score > similarityThreshold)
.sort((a: any, b: any) => b.similarity_score - a.similarity_score)
.slice(0, 5);

console.log("RAG top 5 results:", scoredDocs);

// If no relevant documents found, return early
if (scoredDocs.length === 0) {
setAiResponse("I cannot answer this question from the given documentation. The available content doesn't seem relevant enough to provide a accurate answer.");
setIsLoading(false);
return;
}

setLoaderText('Loading the AI model ...')

// Combine relevant documents into context
const docsContext = scoredDocs
.map((doc: any) => `
Source: ${doc.path}
--- Content ---
${doc.content}
--- End of Content ---
`)
.join('\n');
.join('\n');

const engine = await CreateMLCEngine(
"Llama-3.2-1B-Instruct-q4f16_1-MLC",
{ initProgressCallback: (progress: any) => {
console.log(progress)
setLoaderText(`Loading the AI model ${Math.round(progress.progress * 100)}% ...`)
} },
{
context_window_size: 20000,
initProgressCallback: (progress: any) => {
console.log(progress)
setLoaderText(`Loading the AI model ${Math.round(progress.progress * 100)}% ...`)
}
},
{
context_window_size: 20000,
}
);



const engineLoadTime = performance.now() // Track engine load time
console.log(`Time taken for engine initialization: ${(engineLoadTime - startTime) / 1000}s`)
setLoaderText('Processing information and generating AI response ...')
const engineLoadTime = performance.now() // Track engine load time
console.log(`Time taken for engine initialization: ${(engineLoadTime - startTime) / 1000}s`)
setLoaderText('Processing information and generating AI response ...')
const messages = [
{
role: "system",
content: `You are a technical documentation assistant for AkiraDocs. Your purpose is to:
{
role: "system",
content: `You are a technical documentation assistant for AkiraDocs. Your purpose is to:
1. Provide accurate, helpful answers using ONLY the provided documentation
2. Stay positive and factual based on the documentation provided.
3. Make sure the markdown answer is pretty, clean and easy to read.`
},
{
role: "user",
content: `
{
role: "user",
content: `
Please provide a helpful answer which is short and concise to the following question using only the provided documentation.

Question: ${query}
Expand Down Expand Up @@ -221,41 +223,43 @@ export default function Home() {
}
];

// console.log("Messages:", messages)
// console.log("Messages:", messages)

const chunks = await engine.chat.completions.create({
const chunks = await engine.chat.completions.create({
messages: messages as ChatCompletionMessageParam[],
stream: true,
stream_options: { include_usage: true },
max_tokens: 500,
temperature: 0.7,
top_p: 0.95,
frequency_penalty: 0.5,
presence_penalty: 0.5,
stream_options: { include_usage: false },
max_tokens: 300,
temperature: 0.5,
top_p: 0.8,
frequency_penalty: 0.3,
presence_penalty: 0.3,
});

let aiContent = "";
for await (const chunk of chunks) {
const newContent = chunk.choices[0]?.delta.content || "";
aiContent += newContent;

// Process partial content for streaming
const { cleanResponse } = extractSources(aiContent);
setAiResponse(cleanResponse);
}

// Only extract and set sources after streaming is complete
const { sources } = extractSources(aiContent);

// Process partial content for streaming
const { cleanResponse } = extractSources(aiContent);
setAiResponse(cleanResponse);
setIsLoading(false)
}


// Only extract and set sources after streaming is complete
const { sources } = extractSources(aiContent);
setSources(sources);

const endTime = performance.now() // Track total time
console.log(`Total time taken for AI search: ${(endTime - startTime) / 1000}s`)

} catch (error) {
console.error('Search error:', error);
setError(error instanceof Error ? error.message : 'An error occurred');
const endTime = performance.now() // Track total time
console.log(`Total time taken for AI search: ${(endTime - startTime) / 1000}s`)

} catch (error) {
console.error('Search error:', error);
setError(error instanceof Error ? error.message : 'An error occurred');
} finally {
setIsLoading(false);
setIsLoading(false);
}
}

Expand All @@ -265,49 +269,49 @@ export default function Home() {

return (
<div className="flex flex-col min-h-screen">
<Header {...headerConfig} currentLocale={`en`} currentType={`aiSearch`}/>
<div className="min-h-screen py-12 px-4 sm:px-6 lg:px-8">

<div className="max-w-4xl mx-auto">
<SearchHeader
logo={searchConfig.logo}
title={searchConfig.title}
description={searchConfig.description}
/>
<div className="flex flex-col sm:flex-row space-y-4 sm:space-y-0 sm:space-x-4 justify-center items-center mb-12">
<SearchBar
query={query}
onQueryChange={setQuery}
onSubmit={handleSearch}
<Header {...headerConfig} currentLocale={`en`} currentType={`aiSearch`} />
<div className="min-h-screen py-12 px-4 sm:px-6 lg:px-8">

<div className="max-w-4xl mx-auto">
<SearchHeader
logo={searchConfig.logo}
title={searchConfig.title}
description={searchConfig.description}
/>
<LegacyDocsToggle/>
</div>

<AnimatePresence>
{isLoading ? (
<div className="flex flex-col justify-center items-center space-y-4 py-12">
<AILoader />
<p className="text-muted-foreground text-sm animate-pulse">
{loaderText}
</p>
</div>
) : error ? (
<div className="text-center p-4 rounded-lg bg-red-50 text-red-800">
<p className="text-lg font-medium mb-2">Error</p>
<p>{error}</p>
</div>
) : aiResponse ? (
<AIResponse
response={aiResponse}
sources={sources}
onBack={handleBack}
<div className="flex flex-col sm:flex-row space-y-4 sm:space-y-0 sm:space-x-4 justify-center items-center mb-12">
<SearchBar
query={query}
onQueryChange={setQuery}
onSubmit={handleSearch}
/>
) : recommendedArticles && (
<RecommendedArticles articles={recommendedArticles} />
)}
</AnimatePresence>
<LegacyDocsToggle />
</div>

<AnimatePresence>
{isLoading ? (
<div className="flex flex-col justify-center items-center space-y-4 py-12">
<AILoader />
<p className="text-muted-foreground text-sm animate-pulse">
{loaderText}
</p>
</div>
) : error ? (
<div className="text-center p-4 rounded-lg bg-red-50 text-red-800">
<p className="text-lg font-medium mb-2">Error</p>
<p>{error}</p>
</div>
) : aiResponse ? (
<AIResponse
response={aiResponse}
sources={sources}
onBack={handleBack}
/>
) : recommendedArticles && (
<RecommendedArticles articles={recommendedArticles} />
)}
</AnimatePresence>
</div>
</div>
</div>
</div>
)
}
}
Loading
Loading