diff --git a/Examples/Examples/Chat/ChatExampleOllama.cs b/Examples/Examples/Chat/ChatExampleOllama.cs new file mode 100644 index 0000000..0c625f2 --- /dev/null +++ b/Examples/Examples/Chat/ChatExampleOllama.cs @@ -0,0 +1,18 @@ +using Examples.Utils; +using MaIN.Core.Hub; + +namespace Examples.Chat; + +public class ChatExampleOllama : IExample +{ + public async Task Start() + { + OllamaExample.Setup(); //We need to provide Ollama API key + Console.WriteLine("(Ollama) ChatExample is running!"); + + await AIHub.Chat() + .WithModel("qwen3-next:80b") + .WithMessage("Write a short poem about the color green.") + .CompleteAsync(interactive: true); + } +} \ No newline at end of file diff --git a/Examples/Examples/Program.cs b/Examples/Examples/Program.cs index 80cc3d6..4e0d6bc 100644 --- a/Examples/Examples/Program.cs +++ b/Examples/Examples/Program.cs @@ -79,6 +79,7 @@ static void RegisterExamples(IServiceCollection services) services.AddTransient(); services.AddTransient(); services.AddTransient(); + services.AddTransient(); } async Task RunSelectedExample(IServiceProvider serviceProvider) @@ -174,6 +175,7 @@ public class ExampleRegistry(IServiceProvider serviceProvider) ("\u25a0 GroqCloud Chat", serviceProvider.GetRequiredService()), ("\u25a0 Anthropic Chat", serviceProvider.GetRequiredService()), ("\u25a0 xAI Chat", serviceProvider.GetRequiredService()), + ("\u25a0 Ollama Chat", serviceProvider.GetRequiredService()), ("\u25a0 McpClient example", serviceProvider.GetRequiredService()), ("\u25a0 McpAgent example", serviceProvider.GetRequiredService()), ("\u25a0 Chat with TTS example", serviceProvider.GetRequiredService()), diff --git a/Examples/Examples/Utils/OllamaExample.cs b/Examples/Examples/Utils/OllamaExample.cs new file mode 100644 index 0000000..adf7262 --- /dev/null +++ b/Examples/Examples/Utils/OllamaExample.cs @@ -0,0 +1,16 @@ +using MaIN.Core; +using MaIN.Domain.Configuration; + +namespace Examples.Utils; + +public class OllamaExample +{ + public static void Setup() + { + MaINBootstrapper.Initialize(configureSettings: (options) => + { + options.BackendType = BackendType.Ollama; + options.OllamaKey = ""; + }); + } +} \ No newline at end of file diff --git a/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj b/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj index 02d992b..03e9f2f 100644 --- a/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj +++ b/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj @@ -8,9 +8,7 @@ - - all @@ -19,7 +17,7 @@ - + diff --git a/Releases/0.8.1.md b/Releases/0.8.1.md new file mode 100644 index 0000000..6487106 --- /dev/null +++ b/Releases/0.8.1.md @@ -0,0 +1,3 @@ +# 0.8.1 release + +- Add Ollama integration \ No newline at end of file diff --git a/src/MaIN.Core/.nuspec b/src/MaIN.Core/.nuspec index 0ab9801..5135042 100644 --- a/src/MaIN.Core/.nuspec +++ b/src/MaIN.Core/.nuspec @@ -2,7 +2,7 @@ MaIN.NET - 0.8.0 + 0.8.1 Wisedev Wisedev favicon.png diff --git a/src/MaIN.Core/MaIN.Core.csproj b/src/MaIN.Core/MaIN.Core.csproj index b01cc99..30f7526 100644 --- a/src/MaIN.Core/MaIN.Core.csproj +++ b/src/MaIN.Core/MaIN.Core.csproj @@ -7,16 +7,7 @@ - - - - - - - - - @@ -33,7 +24,7 @@ - - + + \ No newline at end of file diff --git a/src/MaIN.Domain/Configuration/MaINSettings.cs b/src/MaIN.Domain/Configuration/MaINSettings.cs index 17f7e06..67d38fe 100644 --- a/src/MaIN.Domain/Configuration/MaINSettings.cs +++ b/src/MaIN.Domain/Configuration/MaINSettings.cs @@ -13,6 +13,7 @@ public class MaINSettings public string? DeepSeekKey { get; set; } public string? AnthropicKey { get; set; } public string? GroqCloudKey { get; set; } + public string? OllamaKey { get; set; } public string? XaiKey { get; set; } public MongoDbSettings? MongoDbSettings { get; set; } public FileSystemSettings? FileSystemSettings { get; set; } @@ -30,4 +31,5 @@ public enum BackendType GroqCloud = 4, Anthropic = 5, Xai = 6, + Ollama = 7, } \ No newline at end of file diff --git a/src/MaIN.InferPage/MaIN.InferPage.csproj b/src/MaIN.InferPage/MaIN.InferPage.csproj index edf4893..ba1ff18 100644 --- a/src/MaIN.InferPage/MaIN.InferPage.csproj +++ b/src/MaIN.InferPage/MaIN.InferPage.csproj @@ -8,11 +8,7 @@ - - all - - diff --git a/src/MaIN.InferPage/Program.cs b/src/MaIN.InferPage/Program.cs index 292101c..cebe026 100644 --- a/src/MaIN.InferPage/Program.cs +++ b/src/MaIN.InferPage/Program.cs @@ -76,6 +76,18 @@ apiKeyVariable = "ANTHROPIC_API_KEY"; apiName = "Anthropic"; break; + + case "xai": + Utils.Xai = true; + apiKeyVariable = "XAI_API_KEY"; + apiName = "Xai"; + break; + + case "ollama": + Utils.Ollama = true; + apiKeyVariable = "OLLAMA_API_KEY"; + apiName = "Ollama"; + break; } var key = Environment.GetEnvironmentVariable(apiKeyVariable); @@ -128,6 +140,20 @@ settings.BackendType = BackendType.Anthropic; }); } +else if (Utils.Xai) +{ + builder.Services.AddMaIN(builder.Configuration, settings => + { + settings.BackendType = BackendType.Xai; + }); +} +else if (Utils.Ollama) +{ + builder.Services.AddMaIN(builder.Configuration, settings => + { + settings.BackendType = BackendType.Ollama; + }); +} else { if (Utils.Path == null && !KnownModels.IsModelSupported(Utils.Model!)) diff --git a/src/MaIN.InferPage/Utils.cs b/src/MaIN.InferPage/Utils.cs index 9967856..c00c6f5 100644 --- a/src/MaIN.InferPage/Utils.cs +++ b/src/MaIN.InferPage/Utils.cs @@ -12,6 +12,8 @@ public static class Utils public static bool DeepSeek { get; set; } public static bool GroqCloud { get; set; } public static bool Anthropic { get; set; } + public static bool Xai { get; set; } + public static bool Ollama { get; set; } public static string? Path { get; set; } public static bool Reason { get; set; } } diff --git a/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj b/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj index 5fbcd3c..87cb149 100644 --- a/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj +++ b/src/MaIN.Infrastructure/MaIN.Infrastructure.csproj @@ -13,8 +13,6 @@ - - diff --git a/src/MaIN.Services/Constants/ServiceConstants.cs b/src/MaIN.Services/Constants/ServiceConstants.cs index 33aa25e..660f92b 100644 --- a/src/MaIN.Services/Constants/ServiceConstants.cs +++ b/src/MaIN.Services/Constants/ServiceConstants.cs @@ -11,6 +11,7 @@ public static class HttpClients public const string GroqCloudClient = "GroqCloudClient"; public const string AnthropicClient = "AnthropicClient"; public const string XaiClient = "XaiClient"; + public const string OllamaClient = "OllamaClient"; public const string ImageDownloadClient = "ImageDownloadClient"; public const string ModelContextDownloadClient = "ModelContextDownloadClient"; } @@ -41,6 +42,9 @@ public static class ApiUrls public const string XaiImageGenerations = "https://api.x.ai/v1/images/generations"; public const string XaiOpenAiChatCompletions = "https://api.x.ai/v1/chat/completions"; public const string XaiModels = "https://api.x.ai/v1/models"; + + public const string OllamaOpenAiChatCompletions = "https://ollama.com/v1/chat/completions"; + public const string OllamaModels = "https://ollama.com/v1/models"; } public static class Messages diff --git a/src/MaIN.Services/MaIN.Services.csproj b/src/MaIN.Services/MaIN.Services.csproj index c78149b..9e0ad47 100644 --- a/src/MaIN.Services/MaIN.Services.csproj +++ b/src/MaIN.Services/MaIN.Services.csproj @@ -15,17 +15,12 @@ - - - - - diff --git a/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs b/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs index d060a4f..56e6705 100644 --- a/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs +++ b/src/MaIN.Services/Services/ImageGenServices/ImageGenService.cs @@ -1,7 +1,3 @@ -using System; -using System.Linq; -using System.Net.Http; -using System.Threading.Tasks; using MaIN.Domain.Configuration; using MaIN.Domain.Entities; using MaIN.Services.Constants; diff --git a/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs b/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs index 1bedeeb..0854b66 100644 --- a/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs +++ b/src/MaIN.Services/Services/LLMService/Factory/ImageGenServiceFactory.cs @@ -20,6 +20,7 @@ public class ImageGenServiceFactory(IServiceProvider serviceProvider) : IImageGe BackendType.Anthropic => null, BackendType.Xai => new XaiImageGenService(serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), + BackendType.Ollama => null, BackendType.Self => new ImageGenService(serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), diff --git a/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs b/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs index bff97bb..d404c89 100644 --- a/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs +++ b/src/MaIN.Services/Services/LLMService/Factory/LLMServiceFactory.cs @@ -46,6 +46,13 @@ public ILLMService CreateService(BackendType backendType) serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService()), + BackendType.Ollama => new OllamaService( + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService(), + serviceProvider.GetRequiredService()), + BackendType.Anthropic => new AnthropicService( serviceProvider.GetRequiredService(), serviceProvider.GetRequiredService(), diff --git a/src/MaIN.Services/Services/LLMService/OllamaService.cs b/src/MaIN.Services/Services/LLMService/OllamaService.cs new file mode 100644 index 0000000..5ad18be --- /dev/null +++ b/src/MaIN.Services/Services/LLMService/OllamaService.cs @@ -0,0 +1,78 @@ +using System.Text; +using MaIN.Domain.Configuration; +using MaIN.Domain.Entities; +using MaIN.Services.Constants; +using MaIN.Services.Services.Abstract; +using MaIN.Services.Services.LLMService.Memory; +using MaIN.Services.Services.Models; +using Microsoft.Extensions.Logging; + +namespace MaIN.Services.Services.LLMService; + +public sealed class OllamaService( + MaINSettings settings, + INotificationService notificationService, + IHttpClientFactory httpClientFactory, + IMemoryFactory memoryFactory, + IMemoryService memoryService, + ILogger? logger = null) + : OpenAiCompatibleService(notificationService, httpClientFactory, memoryFactory, memoryService, logger) +{ + private readonly MaINSettings _settings = settings ?? throw new ArgumentNullException(nameof(settings)); + + protected override string HttpClientName => ServiceConstants.HttpClients.OllamaClient; + protected override string ChatCompletionsUrl => ServiceConstants.ApiUrls.OllamaOpenAiChatCompletions; + protected override string ModelsUrl => ServiceConstants.ApiUrls.OllamaModels; + + protected override string GetApiKey() + { + return _settings.OllamaKey ?? Environment.GetEnvironmentVariable("OLLAMA_API_KEY") ?? + throw new InvalidOperationException("Olama Key not configured"); + } + + protected override void ValidateApiKey() + { + if (string.IsNullOrEmpty(_settings.OllamaKey) && string.IsNullOrEmpty(Environment.GetEnvironmentVariable("OLLAMA_API_KEY"))) + { + throw new InvalidOperationException("Ollama Key not configured"); + } + } + + public override async Task AskMemory( + Chat chat, + ChatMemoryOptions memoryOptions, + ChatRequestOptions requestOptions, + CancellationToken cancellationToken = default) + { + var lastMsg = chat.Messages.Last(); + var filePaths = await DocumentProcessor.ConvertToFilesContent(memoryOptions); + var message = new Message() + { + Role = ServiceConstants.Roles.User, + Content = ComposeMessage(lastMsg, filePaths), + Type = MessageType.CloudLLM + }; + + chat.Messages.Last().Content = message.Content; + chat.Messages.Last().Files = []; + var result = await Send(chat, new ChatRequestOptions(), cancellationToken); + chat.Messages.Last().Content = lastMsg.Content; + return result; + } + + private string ComposeMessage(Message lastMsg, string[] filePaths) + { + var stringBuilder = new StringBuilder(); + stringBuilder.AppendLine($"== FILES IN MEMORY"); + foreach (var path in filePaths) + { + var doc = DocumentProcessor.ProcessDocument(path); + stringBuilder.Append(doc); + stringBuilder.AppendLine(); + } + stringBuilder.AppendLine($"== END OF FILES"); + stringBuilder.AppendLine(); + stringBuilder.Append(lastMsg.Content); + return stringBuilder.ToString(); + } +} \ No newline at end of file diff --git a/src/MaIN.Services/Services/McpService.cs b/src/MaIN.Services/Services/McpService.cs index 06de96b..edc3a29 100644 --- a/src/MaIN.Services/Services/McpService.cs +++ b/src/MaIN.Services/Services/McpService.cs @@ -117,6 +117,9 @@ private PromptExecutionSettings InitializeChatCompletions(IKernelBuilder kernelB FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { RetainArgumentTypes = true }) }; + case BackendType.Ollama: + throw new NotSupportedException("Ollama models does not support MCP integration."); + case BackendType.Self: throw new NotSupportedException("Self backend (local models) does not support MCP integration.");