diff --git a/blazor-toc.html b/blazor-toc.html
index d5f1cfb51b..05dac09e99 100644
--- a/blazor-toc.html
+++ b/blazor-toc.html
@@ -497,6 +497,8 @@
Toolbar items
@@ -1489,6 +1491,16 @@
Messages
+ Chat Bot Integrations
+
+
Time break
Timestamp
Typing indicator
diff --git a/blazor/ai-assistview/ai-integrations/gemini-integration.md b/blazor/ai-assistview/ai-integrations/gemini-integration.md
index e144fa6af0..d66a3e8deb 100644
--- a/blazor/ai-assistview/ai-integrations/gemini-integration.md
+++ b/blazor/ai-assistview/ai-integrations/gemini-integration.md
@@ -29,7 +29,7 @@ Follow the Syncfusion AI AssistView [Getting Started](../getting-started) guide
Install the required packages:
-1. Install the `Gemini AI` nuget package in the application.
+* Install the `Gemini AI` nuget package in the application.
```bash
@@ -37,7 +37,7 @@ Nuget\Install-Package Mscc.GenerativeAI
```
-2. Install the `Markdig` nuget packages in the application.
+* Install the `Markdig` nuget packages in the application.
```bash
@@ -59,7 +59,7 @@ Nuget\Install-Package Markdig
## Gemini AI with AI AssistView
-Modify the razor file to integrate the Gemini AI with the AI AssistView component.
+Modify the Razor file to integrate the Gemini AI with the AI AssistView component.
* update your Gemini API key securely in the configuration:
diff --git a/blazor/ai-assistview/ai-integrations/ollama-llm-integration.md b/blazor/ai-assistview/ai-integrations/ollama-llm-integration.md
new file mode 100644
index 0000000000..ff048615a5
--- /dev/null
+++ b/blazor/ai-assistview/ai-integrations/ollama-llm-integration.md
@@ -0,0 +1,282 @@
+---
+layout: post
+title: LLM Model with Blazor AI AssistView Component | Syncfusion
+description: Checkout and learn about Integration of LLM Model with Blazor AI AssistView component in Blazor WebAssembly Application.
+platform: Blazor
+control: AI AssistView
+documentation: ug
+---
+
+# Integrate LLM via Ollama with Blazor AI AssistView Component
+
+The AI AssistView component integrates with [LLM via Ollama](https://ollama.com) to enable advanced conversational AI features in your Blazor application. The component acts as a user interface where user prompts are sent to the selected LLM model via API calls, providing natural language understanding and context-aware responses.
+
+## Prerequisites
+
+Before starting, ensure you have the following:
+
+* [Ollama](https://ollama.com) installed to run and manage LLM models locally.
+
+* **Syncfusion AI AssistView**: Package [Syncfusion Blazor package](https://www.nuget.org/packages/Syncfusion.Blazor.InteractiveChat) installed.
+
+* [Markdig](https://www.nuget.org/packages/Markdig) package: For parsing Markdown responses.
+
+## Set Up the AI AssistView Component
+
+Follow the Syncfusion AI AssistView [Getting Started](../getting-started) guide to configure and render the AI AssistView component in the application and that prerequisites are met.
+
+## Install Dependency
+
+To install the Markdig package by run `NuGet\Install-Package Markdig` in Package Manager Console.
+
+## Configuring Ollama
+
+Install Ollama for your operating system:
+
+{% tabs %}
+{% highlight ts tabtitle="Windows" %}
+
+1. Visit [Windows](https://ollama.com/download)
+2. Click `Download for Windows` to get the `.exe installer`.
+3. Run `OllamaSetup.exe` and follow the wizard to install.
+
+{% endhighlight %}
+
+{% highlight ts tabtitle="macOS" %}
+
+1. Visit [macOS](https://ollama.com/download/mac)
+2. Click `Download for macOS` to get `.dmg file`
+3. Install it by following the wizard.
+
+{% endhighlight %}
+
+{% highlight ts tabtitle="Linux" %}
+
+1. Visit [Linux](https://ollama.com/download/linux)
+2. Run the below command to install Ollama in your system
+
+ curl -fsSL https://ollama.com/install.sh | sh
+
+{% endhighlight %}
+{% endtabs %}
+
+## Download and run an Ollama model
+
+* Download and run a model using the following command. Replace `deepseek-r1` with your preferred model (e.g., `llama3`, `phi4`). See the [Ollama model](https://ollama.com/search) library for available models.
+
+```bash
+
+ollama run deepseek-r1
+
+```
+
+* After the model download completes, start the Ollama server to make the model accessible:
+
+```bash
+
+ollama serve
+
+```
+
+## Configure AI AssistView with Ollama
+
+To integrate Ollama with the Syncfusion Blazor AI AssistView component in your Blazor application:
+
+* Configure the AI services in the `Program.cs` file to register the Ollama client and Syncfusion Blazor services.
+
+{% tabs %}
+{% highlight cs tabtitle="Program.cs" %}
+
+
+using Blazor_AssistView_Ollama.Components;
+using Microsoft.Extensions.Caching.Memory;
+using Microsoft.Extensions.AI;
+using OllamaSharp;
+using Syncfusion.Blazor;
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Add services to the container.
+builder.Services.AddRazorComponents()
+ .AddInteractiveServerComponents();
+builder.Services.AddSyncfusionBlazor();
+
+builder.Services.AddHttpClient();
+
+builder.Services.AddDistributedMemoryCache();
+
+// Ollama configuration
+builder.Services.AddChatClient(new OllamaApiClient(new Uri("http://localhost:11434/"), "llama3.2"))
+ .UseDistributedCache()
+ .UseLogging();
+
+var app = builder.Build();
+
+// Configure the HTTP request pipeline.
+if (!app.Environment.IsDevelopment())
+{
+ app.UseExceptionHandler("/Error", createScopeForErrors: true);
+ // The default HSTS value is 30 days. You may want to change this for production scenarios, see https://aka.ms/aspnetcore-hsts.
+ app.UseHsts();
+}
+
+app.UseHttpsRedirection();
+
+
+app.UseAntiforgery();
+
+app.MapStaticAssets();
+app.MapRazorComponents()
+ .AddInteractiveServerRenderMode();
+
+app.Run();
+
+{% endhighlight %}
+{% endtabs %}
+
+* Modify the `Index.razor` file (or a dedicated component) to host the integration logic and handle prompt requests.
+
+{% tabs %}
+{% highlight razor %}
+
+@rendermode InteractiveServer
+@using Markdig
+@using Microsoft.Extensions.AI
+@using Syncfusion.Blazor.Navigations
+
+
+
+
+
+
+
+
+
+
AI Assistance
+
Live responses streamed from your local Ollama model.
+
+
+
+
+
+
+
+
+
+
+
+
+
+@code {
+ private SfAIAssistView AIAssist = new();
+ private bool responseStopped = false;
+ private bool isStreaming = false;
+
+ // Suggestion list
+ private List suggestions = new()
+ {
+ "What are the best tools for organizing my tasks?",
+ "How can I maintain work-life balance effectively?"
+ };
+
+ [Inject] private IChatClient ChatClient { get; set; } = default!;
+
+ private async Task PromptRequest(AssistViewPromptRequestedEventArgs args)
+ {
+ responseStopped = false;
+ isStreaming = true; // turn on Stop button
+
+ try
+ {
+ var pipeline = new MarkdownPipelineBuilder()
+ .UseAdvancedExtensions()
+ .UsePipeTables()
+ .UseTaskLists()
+ .Build();
+
+ var messages = new List
+ {
+ new(ChatRole.System, "You are a helpful AI assistant. Respond with clear, concise explanations. Use Markdown when helpful."),
+ new(ChatRole.User, args.Prompt)
+ };
+
+ var buffer = new System.Text.StringBuilder();
+ const int updateRateChars = 5;
+ int lastLenPushed = 0;
+
+ await foreach (var update in ChatClient.GetStreamingResponseAsync(messages))
+ {
+ if (responseStopped) break;
+ if (string.IsNullOrEmpty(update?.Text)) continue;
+
+ buffer.Append(update.Text);
+
+ if (buffer.Length - lastLenPushed >= updateRateChars)
+ {
+ string html = Markdown.ToHtml(buffer.ToString(), pipeline);
+ await AIAssist.UpdateResponseAsync(html);
+ await AIAssist.ScrollToBottomAsync();
+ lastLenPushed = buffer.Length;
+ }
+ }
+
+ if (!responseStopped)
+ {
+ string finalHtml = Markdown.ToHtml(buffer.ToString(), pipeline);
+ await AIAssist.UpdateResponseAsync(finalHtml);
+ await AIAssist.ScrollToBottomAsync();
+ }
+
+ args.PromptSuggestions = suggestions;
+ }
+ catch (Exception ex)
+ {
+ await AIAssist.UpdateResponseAsync($"Error generating response: {ex.Message}");
+ await AIAssist.ScrollToBottomAsync();
+ }
+ finally
+ {
+ responseStopped = false;
+ isStreaming = true;
+ StateHasChanged();
+ }
+ }
+
+ private void ToolbarItemClicked(AssistViewToolbarItemClickedEventArgs args)
+ {
+ // Handle Refresh
+ if (args.Item.IconCss == "e-icons e-refresh")
+ {
+ AIAssist.Prompts.Clear();
+
+ AIAssist.PromptSuggestions = suggestions;
+ }
+ }
+
+ private void HandleStopResponse(ResponseStoppedEventArgs args)
+ {
+ responseStopped = true;
+ }
+}
+
+
+
+{% endhighlight %}
+{% endtabs %}
+
+
\ No newline at end of file
diff --git a/blazor/ai-assistview/ai-integrations/openai-integration.md b/blazor/ai-assistview/ai-integrations/openai-integration.md
index 661836d8a6..608dbfdb6d 100644
--- a/blazor/ai-assistview/ai-integrations/openai-integration.md
+++ b/blazor/ai-assistview/ai-integrations/openai-integration.md
@@ -29,7 +29,7 @@ Follow the Syncfusion AI AssistView [Getting Started](../getting-started) guide
Install the required packages:
-1. Install the `OpenAI` and `Azure` nuget packages in the application.
+* Install the `OpenAI` and `Azure` nuget packages in the application.
```bash
@@ -39,7 +39,7 @@ NuGet\Install-Package Azure.Core
```
-2. Install the `Markdig` nuget packages in the application.
+* Install the `Markdig` nuget packages in the application.
```bash
diff --git a/blazor/ai-assistview/images/llm-integration.png b/blazor/ai-assistview/images/llm-integration.png
new file mode 100644
index 0000000000..c72e1f85c7
Binary files /dev/null and b/blazor/ai-assistview/images/llm-integration.png differ
diff --git a/blazor/chat-ui/bot-integrations/integration-with-bot-dialogflow.md b/blazor/chat-ui/bot-integrations/integration-with-bot-dialogflow.md
new file mode 100644
index 0000000000..497e0d4106
--- /dev/null
+++ b/blazor/chat-ui/bot-integrations/integration-with-bot-dialogflow.md
@@ -0,0 +1,213 @@
+---
+layout: post
+title: Google Dialogflow with Blazor Chat UI Component | Syncfusion
+description: Checkout and learn here all about Integrate Microsoft Bot Framework with Syncfusion Blazor Chat UI component in Blazor Server App and Blazor WebAssembly App.
+platform: Blazor
+control: Chat UI
+documentation: ug
+---
+
+# Integrate Google Dialogflow with Blazor Chat UI component
+
+The Chat UI component integrates with [Google Dialogflow](https://cloud.google.com/dialogflow/docs) to enable advanced conversational AI features in your Blazor applications. The component acts as a user interface for a support bot, where user prompts are sent to the Dialogflow service via API calls, providing natural language understanding and context-aware responses.
+
+## Prerequisites
+
+Before starting, ensure you have the following:
+
+* **Google Account**: To access [Google Dialogflow](https://cloud.google.com/dialogflow/docs) and [Google Cloud Console](https://console.cloud.google.com/).
+
+* **Syncfusion Chat UI**: Package [Syncfusion Blazor package](https://www.nuget.org/packages/Syncfusion.Blazor.InteractiveChat) installed.
+
+* Dialogflow Service Account with the `Dialogflow API Client` role and its JSON key file.
+
+## Set Up the Chat UI component
+
+Follow the Syncfusion Chat UI Getting Started guide to configure and render the Chat UI component in the application and that prerequisites are met.
+
+## Install Dependencies
+
+* Install backend dependencies for Dialogflow and server setup using NuGet:
+
+```bash
+
+dotnet add package Google.Cloud.Dialogflow.V2
+dotnet add package Newtonsoft.Json
+
+```
+
+## Set Up the Dialogflow Agent
+
+1. In the Dialogflow Console, create an agent, set a name (e.g., MyChatBot), and configure the default language (e.g., English - en).
+
+2. Add intents with training phrases and responses (e.g., greetings, FAQs). Test using the Dialogflow simulator.
+
+3. In the Google Cloud Console, go to APIs & Services > Credentials, create a service account with the Dialogflow API Client role, and download the JSON key file.
+
+> `Security Note`: Never commit the JSON key file to version component. Use environment variables or a secret manager (e.g., Google Cloud Secret Manager) for production.
+
+## Configure Backend API
+
+In a Blazor application, create a minimal API endpoint to handle Dialogflow requests. Add the following to `Program.cs`:
+
+{% tabs %}
+{% highlight cs tabtitle="Program.cs" %}
+
+using Google.Cloud.Dialogflow.V2;
+using Google.Apis.Auth.OAuth2;
+using Syncfusion.Blazor;
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Add services...
+builder.Services.AddRazorPages();
+builder.Services.AddServerSideBlazor();
+
+var app = builder.Build();
+
+// Configure the HTTP request pipeline...
+app.MapRazorPages();
+app.MapBlazorHub();
+app.MapFallbackToPage("/_Host");
+
+// Minimal API for Dialogflow requests
+app.MapPost("/api/chat/message", async ([FromBody] MessageRequest request) =>
+{
+ var credential = GoogleCredential.FromFile("service-acct.json");
+ var sessionsClient = SessionsClient.Create(credential.ToChannelCredentials());
+ var projectId = builder.Configuration["DialogflowProjectId"]; // Add to appsettings.json or extract from JSON
+
+ var sessionId = request.SessionId ?? "default-session";
+ var session = SessionName.FromProjectSession(projectId, sessionId);
+ var queryInput = new QueryInput
+ {
+ Text = new TextInput
+ {
+ Text = request.Text,
+ LanguageCode = "en-US"
+ }
+ };
+
+ try
+ {
+ var response = await sessionsClient.DetectIntentAsync(new DetectIntentRequest { Session = session.ToString(), QueryInput = queryInput });
+ var reply = response.QueryResult.FulfillmentText;
+ return Results.Ok(new { reply });
+ }
+ catch (Exception ex)
+ {
+ return Results.Problem("Error connecting to Dialogflow.", ex, statusCode: 500);
+ }
+});
+
+app.Run();
+
+public class MessageRequest
+{
+ public string Text { get; set; } = string.Empty;
+ public string SessionId { get; set; } = string.Empty;
+}
+
+{% endhighlight %}
+{% endtabs %}
+
+Add the project ID to `appsettings.json` (extract from the JSON key file if needed):
+
+```bash
+
+{
+ "DialogflowProjectId": "your-dialogflow-project-id"
+}
+
+```
+
+> Use a unique `sessionId` (e.g., Guid) for each user to maintain conversation context.
+
+## Configure Message Send
+
+Use the `MessageSend` event of the `SfChatUI` component to handle message exchanges. Each time a user sends a message, this event will be invoked with details of the sent message.
+
+### Forward Message to Backend
+
+In the `MessageSend` event handler, send a POST request to your backend API endpoint (`/api/chat/message`). The backend forwards the message to Dialogflow and returns the response.
+
+### Displaying Bot Response
+
+* Use the `AddMessageAsync` method (via a reference to the component) to programmatically add the bot's reply to the Chat UI.
+
+* Create or modify a Razor component (`e.g., Pages/Chat.razor`) to integrate the Syncfusion Chat UI with the Dialogflow backend:
+
+{% tabs %}
+{% highlight razor %}
+
+@using Syncfusion.Blazor.InteractiveChat
+@using System.Net.Http.Json
+
+
+
+
+
+
+
+
+@code {
+ private SfChatUI ChatUI { get; set; } = default!;
+ private UserModel CurrentUserModel = new() { ID = "user1", User = "Albert" };
+ private UserModel BotUserModel = new() { ID = "user2", User = "Bot", AvatarUrl = "https://ej2.syncfusion.com/demos/src/chat-ui/images/bot.png" };
+ private string currentUserId = "user1";
+ private readonly HttpClient httpClient = new();
+
+ private async Task OnMessageSend(MessageSendEventArgs args)
+ {
+ // The user message will be added automatically after this event
+ // Send to backend
+ var request = new { text = args.Message.Text, sessionId = currentUserId };
+ try
+ {
+ var response = await httpClient.PostAsJsonAsync("/api/chat/message", request);
+ if (response.IsSuccessStatusCode)
+ {
+ var data = await response.Content.ReadFromJsonAsync();
+ // Add bot's reply
+ await ChatUI.AddMessageAsync(new ChatMessage { Text = data.reply.ToString(), Author = BotUserModel });
+ }
+ else
+ {
+ await ChatUI.AddMessageAsync(new ChatMessage { Text = "Sorry, I couldn't contact the server.", Author = BotUserModel });
+ }
+ }
+ catch (Exception)
+ {
+ await ChatUI.AddMessageAsync(new ChatMessage { Text = "Sorry, I couldn't contact the server.", Author = BotUserModel });
+ }
+ args.Cancel = true; // Prevent default send if needed, but user message is already added
+ }
+}
+
+{% endhighlight %}
+{% endtabs %}
+
+> Ensure Syncfusion scripts and styles are included in `_Host.cshtml` (Blazor Server) or `index.html` (Blazor WebAssembly) as per the getting started guide.
+
+## Run and Test
+
+### Start the Application
+
+* Run the project using `dotnet run` or Visual Studio.
+* Open your app in the browser (e.g., `http://localhost:port`) to chat with your dialogflow-powered bot.
+
+
+
+## Troubleshooting:
+
+* `Permission Denied`: Ensure the service account has the `Dialogflow API Client` role in the Google Cloud Console.
+* `CORS Error`: If using separate origins, configure CORS in Web.config (e.g., add custom headers under ).
+* `No Response`: Test intents in the Dialogflow Console simulator to ensure they are configured correctly.
+* `Quota Exceeded`: Check Dialogflow API quotas in the Google Cloud Console.
+* `Network Issues`: Confirm the application is running and the frontend is pointing to the correct API URL.
+* `Invalid Credentials`: Verify the service account JSON or configuration settings are correctly set up.
\ No newline at end of file
diff --git a/blazor/chat-ui/bot-integrations/integration-with-bot-framework.md b/blazor/chat-ui/bot-integrations/integration-with-bot-framework.md
new file mode 100644
index 0000000000..5e966901d7
--- /dev/null
+++ b/blazor/chat-ui/bot-integrations/integration-with-bot-framework.md
@@ -0,0 +1,281 @@
+---
+layout: post
+title: Bot Framework with Blazor Chat UI Component | Syncfusion
+description: Checkout and learn here all about Integrate Microsoft Bot Framework with Syncfusion Blazor Chat UI component in Blazor Server App and Blazor WebAssembly App.
+platform: Blazor
+control: Chat UI
+documentation: ug
+---
+
+# Integrate Microsoft Bot Framework with Blazor Chat UI component
+
+The Chat UI component integrates with a [Microsoft Bot Framework](https://learn.microsoft.com/en-us/azure/bot-service/bot-builder-basics?view=azure-bot-service-4.0) bot hosted on Azure, enabling a custom chat interface for seamless user interaction. The process involves setting up a secure backend token server, configuring Direct Line in Azure, and integrating the Chat UI in the Blazor application.
+
+## Prerequisites
+
+Before starting, ensure you have the following:
+
+* [Microsoft Azure Account](https://portal.azure.com/#home): Required to create and host the bot.
+
+* **Syncfusion Chat UI**: Package [Syncfusion Blazor package](https://www.nuget.org/packages/Syncfusion.Blazor.InteractiveChat) installed.
+
+* **Deployed Azure Bot**: A bot should be created and published using the [Microsoft Bot Framework](https://learn.microsoft.com/en-us/azure/bot-service/bot-builder-basics?view=azure-bot-service-4.0), which is accessible via an Azure App Service. Refer to Microsoft's Bot Creation Guide.
+
+## Set Up the Chat UI component
+
+Follow the Syncfusion Chat UI [Getting Started](../getting-started) guide to configure and render the Chat UI component in the application and that prerequisites are met.
+
+## Install Dependencies
+
+* Install backend dependencies for bot communication using NuGet:
+
+``` bash
+
+dotnet add package Microsoft.Bot.Connector.DirectLine
+dotnet add package Newtonsoft.Json
+
+```
+
+Note: While the integration uses a JavaScript-based Direct Line client for client-side handling, the .NET packages support token generation on the server side.
+
+## Configure the Azure Bot
+
+1. In the [Azure Portal](https://portal.azure.com/auth/login/), navigate to your bot resource.
+
+2. Enable the direct line channel:
+ * Go to Channels > Direct Line > Default-Site.
+ * Copy one of the displayed secret keys.
+
+3. Verify the messaging endpoint in the configuration section (e.g., https://your-bot-service.azurewebsites.net/api/messages).
+
+> `Security Note`: Never expose the Direct Line secret key in frontend code. Use a backend token server to handle it securely.
+
+## Set Up Token Server
+
+In a Blazor Server application, create a minimal API endpoint or controller to handle direct line token generation. For simplicity, add a minimal API in `Program.cs` (or use a dedicated controller in a Pages or Areas folder).
+
+Add the following to `Program.cs`:
+
+{% tabs %}
+{% highlight cs tabtitle="Program.cs" %}
+
+using Syncfusion.Blazor;
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Add services...
+builder.Services.AddRazorPages();
+builder.Services.AddServerSideBlazor();
+
+var app = builder.Build();
+
+// Configure the HTTP request pipeline...
+app.MapRazorPages();
+app.MapBlazorHub();
+app.MapFallbackToPage("/_Host");
+
+// Minimal API for token generation
+app.MapPost("/api/token/directline/token", async () =>
+{
+ var directLineSecret = builder.Configuration["DirectLineSecret"];
+ if (string.IsNullOrEmpty(directLineSecret))
+ {
+ return Results.BadRequest("Direct Line secret is not configured.");
+ }
+
+ using var httpClient = new HttpClient();
+ try
+ {
+ var request = new HttpRequestMessage(HttpMethod.Post, "https://directline.botframework.com/v3/directline/tokens/generate");
+ request.Headers.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", directLineSecret);
+ var response = await httpClient.SendAsync(request);
+ response.EnsureSuccessStatusCode();
+ var responseContent = await response.Content.ReadAsStringAsync();
+ dynamic tokenResponse = Newtonsoft.Json.JsonConvert.DeserializeObject(responseContent);
+ return Results.Ok(new { token = tokenResponse.token });
+ }
+ catch (HttpRequestException ex)
+ {
+ return Results.Problem("Failed to generate Direct Line token.", ex, statusCode: 500);
+ }
+});
+
+app.Run();
+
+{% endhighlight %}
+{% endtabs %}
+
+Add the Direct Line secret to `Web.config`:
+
+{% tabs %}
+{% highlight js tabtitle=".env" %}
+
+
+
+
+{% endhighlight %}
+{% endtabs %}
+
+>`Security Note`: Store the Direct Line secret in a secure configuration, such as Azure Key Vault, for production environments.
+
+## Configure Chat UI
+
+Use the `MessageSend`event of the `SfChatUI` component to handle message exchanges. This event is triggered before a message is sent, allowing you to forward it to the bot via the Direct Line API. Use the `AddMessageAsync` method (via a reference to the component) to programmatically add the bot's reply to the Chat UI.
+
+Create or modify a Razor component (e.g., `Pages/Chat.razor`) to integrate the Syncfusion Chat UI with the Direct Line API:
+
+{% tabs %}
+{% highlight razor %}
+
+@using Syncfusion.Blazor.InteractiveChat
+@using Syncfusion.Blazor
+
+
+
+
+
+
+
+
+
+
+@code {
+ private SfChatUI ChatUI { get; set; }
+ private UserModel CurrentUserModel = new() { ID = "user1", User = "You" };
+ private UserModel BotUserModel = new() { ID = "bot", User = "Bot" };
+ private string currentUserId = "user1";
+ private bool isConnected = false;
+
+ private async Task OnMessageSend(MessageSendEventArgs args)
+ {
+ // Initialize Direct Line connection on first message if not already connected
+ if (!isConnected)
+ {
+ try
+ {
+ // Fetch Direct Line token via HttpClient (Blazor Server can call backend directly)
+ using var httpClient = new HttpClient();
+ var response = await httpClient.PostAsync("/api/token/directline/token", null);
+ var data = await response.Content.ReadFromJsonAsync();
+ if (data?.error != null)
+ {
+ await ChatUI.AddMessageAsync(new ChatMessage { Text = "Failed to connect to bot.", Author = BotUserModel });
+ return;
+ }
+
+ // Use JSInterop to initialize Direct Line (client-side for real-time updates)
+ var directLine = await JS.InvokeAsync("eval",
+ $"new BotFramework.DirectLine.DirectLine({{ token: '{data.token}' }})");
+
+ isConnected = true;
+
+ // Subscribe to bot messages via JS callback
+ await JS.InvokeVoidAsync("setupBotSubscription", directLine, DotNetObjectReference.Create(this), currentUserId);
+ }
+ catch (Exception ex)
+ {
+ await ChatUI.AddMessageAsync(new ChatMessage { Text = "Sorry, I couldn’t connect to the bot.", Author = BotUserModel });
+ Console.WriteLine($"Connection error: {ex.Message}");
+ return;
+ }
+ }
+
+ // Send message to bot via JSInterop
+ await JS.InvokeVoidAsync("sendToBot", /* directLine ref or global */, args.Message.Text, currentUserId);
+ args.Cancel = true; // Prevent default send
+ }
+
+ private Task OnCreated()
+ {
+ // Additional initialization if needed
+ return Task.CompletedTask;
+ }
+
+ [JSInvokable]
+ public async Task ReceiveBotMessage(string messageText)
+ {
+ await ChatUI.AddMessageAsync(new ChatMessage { Text = messageText, Author = BotUserModel });
+ }
+}
+
+{% endhighlight %}
+{% endtabs %}
+
+Add the following JavaScript to a global script file (e.g., `wwwroot/js/bot-integration.js`) and reference it in `_Host.cshtml` or `index.html`:
+
+{% tabs %}
+{% highlight js tabtitle="_Host.cshtml" %}
+
+window.setupBotSubscription = (directLine, dotNetHelper, currentUserId) => {
+ directLine.activity$
+ .filter(activity => activity.type === 'message' && activity.from.id !== currentUserId)
+ .subscribe(async (message) => {
+ await dotNetHelper.invokeMethodAsync('ReceiveBotMessage', message.text);
+ });
+};
+
+window.sendToBot = async (directLine, text, currentUserId) => {
+ directLine.postActivity({
+ from: { id: currentUserId, name: 'You' },
+ type: 'message',
+ text: text
+ }).subscribe(
+ id => console.log('Sent message with ID: ', id),
+ error => {
+ console.error('Error sending message: ', error);
+ // Handle error via JS callback if needed
+ }
+ );
+};
+
+{% endhighlight %}
+{% endtabs %}
+
+> Ensure Syncfusion scripts and styles are included in _Host.cshtml (Blazor Server) or index.html (Blazor WebAssembly) as per the getting started guide. Also, include the Bot Framework Web Chat script for Direct Line functionality. Replace 'YourAssemblyName' with your actual assembly name in the JS invoke calls.
+
+## Configure CORS (if needed for Blazor WebAssembly)
+
+For Blazor WebAssembly (client-side), configure CORS in `Program.cs` to allow API requests to your backend:
+
+{% tabs %}
+{% highlight cs tabtitle="Program.cs" %}
+
+builder.Services.AddCors(options =>
+{
+ options.AddDefaultPolicy(policy =>
+ {
+ policy.WithOrigins("https://localhost:5001") // Your Blazor WASM URL
+ .AllowAnyHeader()
+ .AllowAnyMethod();
+ });
+});
+
+// In app...
+app.UseCors();
+
+{% endhighlight %}
+{% endtabs %}
+
+## Run and Test
+
+### Start the Application
+
+* Run the project in Visual Studio or use IIS Express.
+* Open your app in the browser (e.g., `http://localhost:port`) to interact with your Microsoft Bot Framework chatbot.
+
+## Troubleshooting
+
+* `Token Server Error (500)`: Ensure the `DirectLineSecret` in `Web.config` is correct and the token endpoint is accessible.
+* `CORS Error`: Verify the CORS configuration in `Web.config` allows requests from your frontend URL.
+* `Bot is Not Responding`:
+ - Test the bot in the Azure Portal using the `Test in Web Chat` feature to ensure it’s running correctly.
+ - Check the bot’s `Messaging endpoint` in the Configuration section and ensure it is correct and accessible.
+* `Connection Fails on Load`: Verify the token controller is running and accessible. Check the browser console for network errors.
+* `Token Expiration`: Direct Line tokens are short-lived. The Direct Line client typically handles token refresh, but if issues persist, restart the Direct Line connection.
\ No newline at end of file
diff --git a/blazor/chat-ui/images/dialogflow.png b/blazor/chat-ui/images/dialogflow.png
new file mode 100644
index 0000000000..8b96f608e9
Binary files /dev/null and b/blazor/chat-ui/images/dialogflow.png differ