How can I help you?
Integrate LLM via Ollama with ASP.NET Core Inline AI Assist control
31 Mar 202614 minutes to read
The Inline AI Assist control integrates with LLM via Ollama to enable advanced conversational AI features in your ASP.NET Core application. The control acts as a user interface where user prompts are sent to the selected LLM model via API calls, providing natural language understanding and context-aware responses.
Prerequisites
Before starting, ensure you have the following:
-
Ollama installed to run and manage LLM models locally.
-
Syncfusion Inline AI Assist: Package Syncfusion.EJ2.AspNet.Core installed.
-
Markdig package: For parsing Markdown responses.
Set Up the Inline AI Assist control
Follow the Getting Started guide to configure and render the Inline AI Assist control in the application and that prerequisites are met.
Install Dependency
To install the Markdig package by run NuGet\Install-Package Markdig in Package Manager Console.
Configuring Ollama
Install the LLM Model package in the application using Package Manager Console.
NuGet\Install-Package Microsoft.Extensions.AI
NuGet\Install-Package Microsoft.Extensions.AI.OllamaConfigure Inline AI Assist with Ollama in ASP.NET Core
Modify the index.cshtml file to integrate the Ollama with the Inline AI Assist control.
Add services in Program.cs file
using Microsoft.Extensions.AI;
builder.Services.AddControllersWithViews();
builder.Services.AddCors(options =>
{
options.AddPolicy("AllowAll",
builder => builder.AllowAnyOrigin().AllowAnyMethod().AllowAnyHeader());
});
builder.Services.AddChatClient(new OllamaChatClient(new Uri("http://localhost:11434/"), "deepseek-r1"))
.UseDistributedCache()
.UseLogging();@using Syncfusion.EJ2.InteractiveChat
@model InlineAIAssistDemo.Controllers.IndexViewModel
<style>
#editableText {
width: 100%;
min-height: 120px;
max-height: 300px;
overflow-y: auto;
font-size: 16px;
padding: 12px;
border-radius: 4px;
border: 1px solid;
}
</style>
@{
ViewData["Title"] = "AI Assistance";
}
<div class="container" style="height: 350px; width: 650px;">
<button id="summarizeBtn" class="e-btn e-primary" style="margin-bottom: 10px;" onclick="onSummarizeClick()">Content Summarize</button>
<div id="editableText" contenteditable="true">
<p>Inline AI Assist component provides intelligent text processing capabilities that enhance user productivity. It leverages advanced natural language processing to understand context and deliver precise suggestions. Users can seamlessly integrate AI-powered features into their applications.</p>
<p>With real-time response streaming and customizable prompts, developers can create interactive experiences. The component supports multiple response modes including inline editing and popup-based interactions.</p>
</div>
<ejs-inlineaiassist id="inlineAIAssist" relateTo="#summarizeBtn" created="onCreated" promptRequest="onPromptRequest">
<e-inlineaiassist-responsesettings itemSelect="onItemSelect"></e-inlineaiassist-responsesettings>
</ejs-inlineaiassist>
</div>
<script src="https://cdn.jsdelivr.net/npm/marked@latest/marked.min.js"></script>
<script>
var assistObj = null;
function onCreated() {
assistObj = this;
}
function onItemSelect(args) {
if (args.command.label === 'Accept') {
var editable = document.getElementById('editableText');
if (editable) {
editable.innerHTML = '<p>' + assistObj.prompts[assistObj.prompts.length - 1].response + '</p>';
}
assistObj.hidePopup();
} else if (args.command.label === 'Discard') {
assistObj.hidePopup();
}
}
function onSummarizeClick() {
if (assistObj) {
assistObj.showPopup();
}
}
function onPromptRequest(args) {
setTimeout(async () => {
let responseText = '';
try {
const response = await fetch('/Home/GetAIResponse', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ prompt: args.prompt })
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = await response.json();
responseText = data; // Adjust to data.response if the JSON is { response: "text" }
let current = '';
let i = 0;
const typingSpeed = 15; // ms per character; adjust as needed
const interval = setInterval(() => {
if (i < responseText.length && !stopStreaming) {
current += responseText.charAt(i);
assistObj.addResponse(marked.parse(current), false);
i++;
} else {
assistObj.addResponse(marked.parse(current), true);
clearInterval(interval);
}
}, typingSpeed);
} catch (error) {
assistObj.addResponse('⚠️ Something went wrong while connecting to the AI service. Please try again later.', true);
}
}, 2000); // Match the 2000ms delay from the reference sample
}
</script>using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.AI;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace InlineAIAssistDemo.Controllers
{
public class HomeController : Controller
{
private readonly IChatClient _chatClient;
private readonly ILogger<HomeController> _logger;
public HomeController(IChatClient chatClient, ILogger<HomeController> logger)
{
_chatClient = chatClient;
_logger = logger;
}
public IActionResult Index()
{
var viewModel = new IndexViewModel();
return View(viewModel);
}
[HttpPost]
public async Task<IActionResult> GetAIResponse([FromBody] PromptRequest request)
{
try
{
_logger.LogInformation("Received request with prompt: {Prompt}", request?.Prompt);
if (string.IsNullOrEmpty(request?.Prompt))
{
_logger.LogWarning("Prompt is null or empty.");
return BadRequest("Prompt cannot be empty.");
}
// Use Ollama via IChatClient (from Program.cs)
var chatCompletion = await _chatClient.CompleteAsync(request.Prompt);
var responseText = chatCompletion.Message.Contents.FirstOrDefault()?.ToString();
if (string.IsNullOrEmpty(responseText))
{
_logger.LogError("Ollama API returned no text.");
return BadRequest("No response from Ollama.");
}
_logger.LogInformation("Ollama response received: {Response}", responseText);
return Json(responseText);
}
catch (Exception ex)
{
_logger.LogError("Exception in Ollama call: {Message}", ex.Message);
return BadRequest($"Error generating response: {ex.Message}");
}
}
public IActionResult Error()
{
return View();
}
}
public class IndexViewModel
{
public List<ToolbarItemModel> Items { get; set; } = new List<ToolbarItemModel>();
}
public class PromptRequest
{
[JsonPropertyName("prompt")]
public string Prompt { get; set; }
}
public class ToolbarItemModel
{
public string align { get; set; }
public string iconCss { get; set; }
}
}