From 384758f4bd52f22b9eb0ebfac2ec85b694b446ea Mon Sep 17 00:00:00 2001 From: Magdalena Majta Date: Tue, 20 Jan 2026 10:53:01 +0100 Subject: [PATCH 1/5] Ollama text chat & infra page Introduces support for the Ollama backend by updating configuration, service factory, and constants. Adds OllamaService implementing OpenAI-compatible API calls, updates the example project to include a ChatExampleOllama, and provides utility setup for Ollama API key. Also updates InferPage to allow selection and configuration of Ollama as a backend. --- Examples/Examples/Chat/ChatExampleOllama.cs | 18 +++++ Examples/Examples/Program.cs | 2 + Examples/Examples/Utils/OllamaExample.cs | 16 ++++ src/MaIN.Domain/Configuration/MaINSettings.cs | 2 + src/MaIN.InferPage/Program.cs | 26 +++++++ src/MaIN.InferPage/Utils.cs | 2 + .../Constants/ServiceConstants.cs | 5 ++ .../LLMService/Factory/LLMServiceFactory.cs | 7 ++ .../Services/LLMService/OllamaService.cs | 78 +++++++++++++++++++ 9 files changed, 156 insertions(+) create mode 100644 Examples/Examples/Chat/ChatExampleOllama.cs create mode 100644 Examples/Examples/Utils/OllamaExample.cs create mode 100644 src/MaIN.Services/Services/LLMService/OllamaService.cs diff --git a/Examples/Examples/Chat/ChatExampleOllama.cs b/Examples/Examples/Chat/ChatExampleOllama.cs new file mode 100644 index 00000000..0c625f20 --- /dev/null +++ b/Examples/Examples/Chat/ChatExampleOllama.cs @@ -0,0 +1,18 @@ +using Examples.Utils; +using MaIN.Core.Hub; + +namespace Examples.Chat; + +public class ChatExampleOllama : IExample +{ + public async Task Start() + { + OllamaExample.Setup(); //We need to provide Ollama API key + Console.WriteLine("(Ollama) ChatExample is running!"); + + await AIHub.Chat() + .WithModel("qwen3-next:80b") + .WithMessage("Write a short poem about the color green.") + .CompleteAsync(interactive: true); + } +} \ No newline at end of file diff --git a/Examples/Examples/Program.cs b/Examples/Examples/Program.cs index 80cc3d62..4e0d6bcb 100644 --- a/Examples/Examples/Program.cs +++ b/Examples/Examples/Program.cs @@ -79,6 +79,7 @@ static void RegisterExamples(IServiceCollection services) services.AddTransient(); services.AddTransient(); services.AddTransient(); + services.AddTransient(); } async Task RunSelectedExample(IServiceProvider serviceProvider) @@ -174,6 +175,7 @@ public class ExampleRegistry(IServiceProvider serviceProvider) ("\u25a0 GroqCloud Chat", serviceProvider.GetRequiredService()), ("\u25a0 Anthropic Chat", serviceProvider.GetRequiredService()), ("\u25a0 xAI Chat", serviceProvider.GetRequiredService()), + ("\u25a0 Ollama Chat", serviceProvider.GetRequiredService()), ("\u25a0 McpClient example", serviceProvider.GetRequiredService()), ("\u25a0 McpAgent example", serviceProvider.GetRequiredService()), ("\u25a0 Chat with TTS example", serviceProvider.GetRequiredService()), diff --git a/Examples/Examples/Utils/OllamaExample.cs b/Examples/Examples/Utils/OllamaExample.cs new file mode 100644 index 00000000..adf72624 --- /dev/null +++ b/Examples/Examples/Utils/OllamaExample.cs @@ -0,0 +1,16 @@ +using MaIN.Core; +using MaIN.Domain.Configuration; + +namespace Examples.Utils; + +public class OllamaExample +{ + public static void Setup() + { + MaINBootstrapper.Initialize(configureSettings: (options) => + { + options.BackendType = BackendType.Ollama; + options.OllamaKey = ""; + }); + } +} \ No newline at end of file diff --git a/src/MaIN.Domain/Configuration/MaINSettings.cs b/src/MaIN.Domain/Configuration/MaINSettings.cs index 17f7e062..67d38fec 100644 --- a/src/MaIN.Domain/Configuration/MaINSettings.cs +++ b/src/MaIN.Domain/Configuration/MaINSettings.cs @@ -13,6 +13,7 @@ public class MaINSettings public string? DeepSeekKey { get; set; } public string? AnthropicKey { get; set; } public string? GroqCloudKey { get; set; } + public string? OllamaKey { get; set; } public string? XaiKey { get; set; } public MongoDbSettings? MongoDbSettings { get; set; } public FileSystemSettings? FileSystemSettings { get; set; } @@ -30,4 +31,5 @@ public enum BackendType GroqCloud = 4, Anthropic = 5, Xai = 6, + Ollama = 7, } \ No newline at end of file diff --git a/src/MaIN.InferPage/Program.cs b/src/MaIN.InferPage/Program.cs index 292101cd..cebe0267 100644 --- a/src/MaIN.InferPage/Program.cs +++ b/src/MaIN.InferPage/Program.cs @@ -76,6 +76,18 @@ apiKeyVariable = "ANTHROPIC_API_KEY"; apiName = "Anthropic"; break; + + case "xai": + Utils.Xai = true; + apiKeyVariable = "XAI_API_KEY"; + apiName = "Xai"; + break; + + case "ollama": + Utils.Ollama = true; + apiKeyVariable = "OLLAMA_API_KEY"; + apiName = "Ollama"; + break; } var key = Environment.GetEnvironmentVariable(apiKeyVariable); @@ -128,6 +140,20 @@ settings.BackendType = BackendType.Anthropic; }); } +else if (Utils.Xai) +{ + builder.Services.AddMaIN(builder.Configuration, settings => + { + settings.BackendType = BackendType.Xai; + }); +} +else if (Utils.Ollama) +{ + builder.Services.AddMaIN(builder.Configuration, settings => + { + settings.BackendType = BackendType.Ollama; + }); +} else { if (Utils.Path == null && !KnownModels.IsModelSupported(Utils.Model!)) diff --git a/src/MaIN.InferPage/Utils.cs b/src/MaIN.InferPage/Utils.cs index 99678568..c00c6f59 100644 --- a/src/MaIN.InferPage/Utils.cs +++ b/src/MaIN.InferPage/Utils.cs @@ -12,6 +12,8 @@ public static class Utils public static bool DeepSeek { get; set; } public static bool GroqCloud { get; set; } public static bool Anthropic { get; set; } + public static bool Xai { get; set; } + public static bool Ollama { get; set; } public static string? Path { get; set; } public static bool Reason { get; set; } } diff --git a/src/MaIN.Services/Constants/ServiceConstants.cs b/src/MaIN.Services/Constants/ServiceConstants.cs index 33aa25e0..e00bac07 100644 --- a/src/MaIN.Services/Constants/ServiceConstants.cs +++ b/src/MaIN.Services/Constants/ServiceConstants.cs @@ -11,6 +11,7 @@ public static class HttpClients public const string GroqCloudClient = "GroqCloudClient"; public const string AnthropicClient = "AnthropicClient"; public const string XaiClient = "XaiClient"; + public const string OllamaClient = "OllamaClient"; public const string ImageDownloadClient = "ImageDownloadClient"; public const string ModelContextDownloadClient = "ModelContextDownloadClient"; } @@ -41,6 +42,10 @@ public static class ApiUrls public const string XaiImageGenerations = "https://api.x.ai/v1/images/generations"; public const string XaiOpenAiChatCompletions = "https://api.x.ai/v1/chat/completions"; public const string XaiModels = "https://api.x.ai/v1/models"; + + public const string OllamaImageGenerations = "https://ollama.com/v1/images/generations"; + public const string OllamaOpenAiChatCompletions = "https://ollama.com/v1/chat/completions"; + public const string OllamaModels = "https://ollama.com/v1/models"; } public static class Messages diff --git a/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs b/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs index bff97bbb..d404c89d 100644 --- a/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs +++ b/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs @@ -46,6 +46,13 @@ public ILLMService CreateService(BackendType backendType) serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), + BackendType.Ollama => new OllamaService( + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService()), + BackendType.Anthropic => new AnthropicService( serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService(), diff --git a/src/MaIN.Services/Services/LLMService/OllamaService.cs b/src/MaIN.Services/Services/LLMService/OllamaService.cs new file mode 100644 index 00000000..5ad18bea --- /dev/null +++ b/src/MaIN.Services/Services/LLMService/OllamaService.cs @@ -0,0 +1,78 @@ +using System.Text; +using MaIN.Domain.Configuration; +using MaIN.Domain.Entities; +using MaIN.Services.Constants; +using MaIN.Services.Services.Abstract; +using MaIN.Services.Services.LLMService.Memory; +using MaIN.Services.Services.Models; +using Microsoft.Extensions.Logging; + +namespace MaIN.Services.Services.LLMService; + +public sealed class OllamaService( + MaINSettings settings, + INotificationService notificationService, + IHttpClientFactory httpClientFactory, + IMemoryFactory memoryFactory, + IMemoryService memoryService, + ILogger? logger = null) + : OpenAiCompatibleService(notificationService, httpClientFactory, memoryFactory, memoryService, logger) +{ + private readonly MaINSettings _settings = settings ?? throw new ArgumentNullException(nameof(settings)); + + protected override string HttpClientName => ServiceConstants.HttpClients.OllamaClient; + protected override string ChatCompletionsUrl => ServiceConstants.ApiUrls.OllamaOpenAiChatCompletions; + protected override string ModelsUrl => ServiceConstants.ApiUrls.OllamaModels; + + protected override string GetApiKey() + { + return _settings.OllamaKey ?? Environment.GetEnvironmentVariable("OLLAMA_API_KEY") ?? + throw new InvalidOperationException("Olama Key not configured"); + } + + protected override void ValidateApiKey() + { + if (string.IsNullOrEmpty(_settings.OllamaKey) && string.IsNullOrEmpty(Environment.GetEnvironmentVariable("OLLAMA_API_KEY"))) + { + throw new InvalidOperationException("Ollama Key not configured"); + } + } + + public override async Task AskMemory( + Chat chat, + ChatMemoryOptions memoryOptions, + ChatRequestOptions requestOptions, + CancellationToken cancellationToken = default) + { + var lastMsg = chat.Messages.Last(); + var filePaths = await DocumentProcessor.ConvertToFilesContent(memoryOptions); + var message = new Message() + { + Role = ServiceConstants.Roles.User, + Content = ComposeMessage(lastMsg, filePaths), + Type = MessageType.CloudLLM + }; + + chat.Messages.Last().Content = message.Content; + chat.Messages.Last().Files = []; + var result = await Send(chat, new ChatRequestOptions(), cancellationToken); + chat.Messages.Last().Content = lastMsg.Content; + return result; + } + + private string ComposeMessage(Message lastMsg, string[] filePaths) + { + var stringBuilder = new StringBuilder(); + stringBuilder.AppendLine($"== FILES IN MEMORY"); + foreach (var path in filePaths) + { + var doc = DocumentProcessor.ProcessDocument(path); + stringBuilder.Append(doc); + stringBuilder.AppendLine(); + } + stringBuilder.AppendLine($"== END OF FILES"); + stringBuilder.AppendLine(); + stringBuilder.Append(lastMsg.Content); + return stringBuilder.ToString(); + } +} \ No newline at end of file From d3d45a3def9e2ad91dced0ff2b3d6d138b76bd6e Mon Sep 17 00:00:00 2001 From: Magdalena Majta Date: Tue, 20 Jan 2026 11:37:06 +0100 Subject: [PATCH 2/5] Ollama doesn't support image generation Deleted the OllamaImageGenerations constant from ServiceConstants as it is no longer used. Also, set the Ollama backend to null in ImageGenServiceFactory and cleaned up unused usings in ImageGenService.cs. --- src/MaIN.Services/Constants/ServiceConstants.cs | 1 - .../Services/ImageGenServices/ImageGenService.cs | 4 ---- .../Services/LLMService/Factory/ImageGenServiceFactory.cs | 1 + 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/src/MaIN.Services/Constants/ServiceConstants.cs b/src/MaIN.Services/Constants/ServiceConstants.cs index e00bac07..660f92b1 100644 --- a/src/MaIN.Services/Constants/ServiceConstants.cs +++ b/src/MaIN.Services/Constants/ServiceConstants.cs @@ -43,7 +43,6 @@ public static class ApiUrls public const string XaiOpenAiChatCompletions = "https://api.x.ai/v1/chat/completions"; public const string XaiModels = "https://api.x.ai/v1/models"; - public const string OllamaImageGenerations = "https://ollama.com/v1/images/generations"; public const string OllamaOpenAiChatCompletions = "https://ollama.com/v1/chat/completions"; public const string OllamaModels = "https://ollama.com/v1/models"; } diff --git a/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs b/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs index d060a4f6..56e67055 100644 --- a/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs +++ b/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs @@ -1,7 +1,3 @@ -using System; -using System.Linq; -using System.Net.Http; -using System.Threading.Tasks; using MaIN.Domain.Configuration; using MaIN.Domain.Entities; using MaIN.Services.Constants; diff --git a/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs b/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs index 1bedeebc..0854b665 100644 --- a/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs +++ b/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs @@ -20,6 +20,7 @@ public class ImageGenServiceFactory(IServiceProvider serviceProvider) : IImageGe BackendType.Anthropic => null, BackendType.Xai => new XaiImageGenService(serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), + BackendType.Ollama => null, BackendType.Self => new ImageGenService(serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), From 041738ef5b6d4a161a78d38f038bf19bfffaac84 Mon Sep 17 00:00:00 2001 From: Magdalena Majta Date: Tue, 20 Jan 2026 11:51:02 +0100 Subject: [PATCH 3/5] Ollama doesn't support MCP Throws a NotSupportedException when BackendType.Ollama is used with MCP integration, clarifying that Ollama models are not supported. This improves error handling and provides clearer feedback to developers. --- src/MaIN.Services/Services/McpService.cs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/MaIN.Services/Services/McpService.cs b/src/MaIN.Services/Services/McpService.cs index 06de96ba..edc3a29a 100644 --- a/src/MaIN.Services/Services/McpService.cs +++ b/src/MaIN.Services/Services/McpService.cs @@ -117,6 +117,9 @@ private PromptExecutionSettings InitializeChatCompletions(IKernelBuilder kernelB FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { RetainArgumentTypes = true }) }; + case BackendType.Ollama: + throw new NotSupportedException("Ollama models does not support MCP integration."); + case BackendType.Self: throw new NotSupportedException("Self backend (local models) does not support MCP integration."); From bc422f8fa6fc4dd8c535af69ff86cef6e9aed987 Mon Sep 17 00:00:00 2001 From: Magdalena Majta Date: Tue, 20 Jan 2026 12:48:47 +0100 Subject: [PATCH 4/5] Remove unused and redundant package references Cleaned up .csproj files by removing unnecessary and redundant NuGet package references across multiple projects. This reduces build complexity and potential dependency conflicts. --- .../MaIN.Core.IntegrationTests.csproj | 4 +--- src/MaIN.Core/MaIN.Core.csproj | 13 ++----------- src/MaIN.InferPage/MaIN.InferPage.csproj | 4 ---- src/MaIN.Infrastructure/MaIN.Infrastructure.csproj | 2 -- src/MaIN.Services/MaIN.Services.csproj | 5 ----- 5 files changed, 3 insertions(+), 25 deletions(-) diff --git a/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj b/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj index 02d992bc..03e9f2ff 100644 --- a/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj +++ b/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj @@ -8,9 +8,7 @@ - - all @@ -19,7 +17,7 @@ - + diff --git a/src/MaIN.Core/MaIN.Core.csproj b/src/MaIN.Core/MaIN.Core.csproj index b01cc99d..30f7526b 100644 --- a/src/MaIN.Core/MaIN.Core.csproj +++ b/src/MaIN.Core/MaIN.Core.csproj @@ -7,16 +7,7 @@ - - - - - - - - - @@ -33,7 +24,7 @@ - - + + \ No newline at end of file diff --git a/src/MaIN.InferPage/MaIN.InferPage.csproj b/src/MaIN.InferPage/MaIN.InferPage.csproj index edf4893a..ba1ff18a 100644 --- a/src/MaIN.InferPage/MaIN.InferPage.csproj +++ b/src/MaIN.InferPage/MaIN.InferPage.csproj @@ -8,11 +8,7 @@ - - all - - diff --git a/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj b/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj index 5fbcd3c1..87cb1492 100644 --- a/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj +++ b/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj @@ -13,8 +13,6 @@ - - diff --git a/src/MaIN.Services/MaIN.Services.csproj b/src/MaIN.Services/MaIN.Services.csproj index c78149be..9e0ad478 100644 --- a/src/MaIN.Services/MaIN.Services.csproj +++ b/src/MaIN.Services/MaIN.Services.csproj @@ -15,17 +15,12 @@ - - - - - From 3ed5cb0b2e6d1b5db7af2531bd400bacf11accdc Mon Sep 17 00:00:00 2001 From: Magdalena Majta Date: Tue, 20 Jan 2026 13:03:55 +0100 Subject: [PATCH 5/5] versioning --- Releases/0.8.1.md | 3 +++ src/MaIN.Core/.nuspec | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 Releases/0.8.1.md diff --git a/Releases/0.8.1.md b/Releases/0.8.1.md new file mode 100644 index 00000000..64871067 --- /dev/null +++ b/Releases/0.8.1.md @@ -0,0 +1,3 @@ +# 0.8.1 release + +- Add Ollama integration \ No newline at end of file diff --git a/src/MaIN.Core/.nuspec b/src/MaIN.Core/.nuspec index 0ab9801e..51350429 100644 --- a/src/MaIN.Core/.nuspec +++ b/src/MaIN.Core/.nuspec @@ -2,7 +2,7 @@ MaIN.NET - 0.8.0 + 0.8.1 Wisedev Wisedev favicon.png