Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions TalentManagementAPI.Application/Interfaces/IAiChatService.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
namespace TalentManagementAPI.Application.Interfaces
{
public interface IAiChatService
{
Task<string> ChatAsync(string message, string? systemPrompt = null, CancellationToken cancellationToken = default);
}
}
3 changes: 3 additions & 0 deletions TalentManagementAPI.Infrastructure.Shared/GlobalUsings.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
global using System;
global using System.Collections.Generic;
global using System.Linq;
global using System.Threading;
global using System.Threading.Tasks;
global using AutoBogus;
global using Bogus;
Expand All @@ -12,6 +13,8 @@
global using Microsoft.Extensions.Logging;
global using Microsoft.Extensions.Options;
global using MimeKit;
global using OllamaSharp;
global using OllamaSharp.Models.Chat;
global using TalentManagementAPI.Application.DTOs.Email;
global using TalentManagementAPI.Application.Exceptions;
global using TalentManagementAPI.Application.Interfaces;
Expand Down
16 changes: 13 additions & 3 deletions TalentManagementAPI.Infrastructure.Shared/ServiceRegistration.cs
Original file line number Diff line number Diff line change
@@ -1,13 +1,23 @@
namespace TalentManagementAPI.Infrastructure.Shared
using TalentManagementAPI.Application.Interfaces;
using TalentManagementAPI.Infrastructure.Shared.Services;

namespace TalentManagementAPI.Infrastructure.Shared
{
public static class ServiceRegistration
{
public static void AddSharedInfrastructure(this IServiceCollection services, IConfiguration _config)
public static void AddSharedInfrastructure(this IServiceCollection services, IConfiguration config)
{
services.Configure<MailSettings>(_config.GetSection("MailSettings"));
services.Configure<MailSettings>(config.GetSection("MailSettings"));
services.AddTransient<IDateTimeService, DateTimeService>();
services.AddTransient<IEmailService, EmailService>();
services.AddTransient<IMockService, MockService>();
services.AddSingleton<IOllamaApiClient>(_ =>
{
var baseUrl = config["Ollama:BaseUrl"] ?? "http://localhost:11434";
var model = config["Ollama:Model"] ?? "llama3.2";
return new OllamaApiClient(new Uri(baseUrl), model);
});
services.AddTransient<IAiChatService, OllamaAiService>();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
using TalentManagementAPI.Application.Interfaces;

namespace TalentManagementAPI.Infrastructure.Shared.Services
{
public class OllamaAiService : IAiChatService
{
private readonly IOllamaApiClient _ollamaApiClient;

public OllamaAiService(IOllamaApiClient ollamaApiClient)
{
_ollamaApiClient = ollamaApiClient;
}

public async Task<string> ChatAsync(string message, string? systemPrompt = null, CancellationToken cancellationToken = default)
{
var messages = new List<Message>();

if (!string.IsNullOrWhiteSpace(systemPrompt))
{
messages.Add(new Message(new ChatRole("system"), systemPrompt));
}

messages.Add(new Message(new ChatRole("user"), message));

var request = new ChatRequest
{
Model = _ollamaApiClient.SelectedModel,
Messages = messages,
Stream = true
};

var responseBuilder = new MessageBuilder();

await foreach (var response in _ollamaApiClient.ChatAsync(request, cancellationToken).WithCancellation(cancellationToken))
{
if (response?.Message is not null)
{
responseBuilder.Append(response);
}
}

return responseBuilder.HasValue ? responseBuilder.ToMessage().Content ?? string.Empty : string.Empty;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@
<ItemGroup>
<PackageReference Include="AutoBogus" Version="2.13.1" />
<PackageReference Include="Bogus" Version="35.6.5" />
<PackageReference Include="MailKit" Version="4.14.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.2" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.1" />
<PackageReference Include="MimeKit" Version="4.14.0" />
<PackageReference Include="MailKit" Version="4.15.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.5" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.5" />
<PackageReference Include="MimeKit" Version="4.15.1" />
<PackageReference Include="OllamaSharp" Version="5.4.23" />
</ItemGroup>

<ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
using OllamaSharp.Models.Chat;

namespace TalentManagementAPI.Infrastructure.Tests.Services
{
public class OllamaAiServiceTests
{
[Fact]
public async Task ChatAsync_WithSystemPrompt_SendsPromptAndReturnsCombinedReply()
{
var client = new Mock<IOllamaApiClient>();
client.SetupProperty(x => x.SelectedModel, "llama3.2");

ChatRequest? capturedRequest = null;

client
.Setup(x => x.ChatAsync(It.IsAny<ChatRequest>(), It.IsAny<CancellationToken>()))
.Returns((ChatRequest request, CancellationToken _) =>
{
capturedRequest = request;
return StreamResponses(
new ChatResponseStream
{
Message = new Message(new ChatRole("assistant"), "Hello")
},
new ChatResponseStream
{
Message = new Message(new ChatRole("assistant"), " world")
});
});

var service = new OllamaAiService(client.Object);

var reply = await service.ChatAsync("Say hi", "You are concise.");

reply.Should().Be("Hello world");
capturedRequest.Should().NotBeNull();
capturedRequest!.Model.Should().Be("llama3.2");
capturedRequest.Messages.Should().HaveCount(2);
capturedRequest.Messages[0].Role.Should().Be(new ChatRole("system"));
capturedRequest.Messages[0].Content.Should().Be("You are concise.");
capturedRequest.Messages[1].Role.Should().Be(new ChatRole("user"));
capturedRequest.Messages[1].Content.Should().Be("Say hi");
}

[Fact]
public async Task ChatAsync_WithoutChunks_ReturnsEmptyString()
{
var client = new Mock<IOllamaApiClient>();
client.SetupProperty(x => x.SelectedModel, "llama3.2");
client
.Setup(x => x.ChatAsync(It.IsAny<ChatRequest>(), It.IsAny<CancellationToken>()))
.Returns(StreamResponses());

var service = new OllamaAiService(client.Object);

var reply = await service.ChatAsync("Say hi");

reply.Should().BeEmpty();
}

private static async IAsyncEnumerable<ChatResponseStream?> StreamResponses(params ChatResponseStream[] responses)
{
foreach (var response in responses)
{
yield return response;
await Task.Yield();
}
}
}
}
51 changes: 51 additions & 0 deletions TalentManagementAPI.WebApi.Tests/Controllers/AiControllerTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
namespace TalentManagementAPI.WebApi.Tests.Controllers
{
public class AiControllerTests
{
private readonly Mock<IAiChatService> _aiChatServiceMock = new();
private readonly Mock<IFeatureManagerSnapshot> _featureManagerMock = new();
private readonly AiController _controller;

public AiControllerTests()
{
_controller = new AiController(_aiChatServiceMock.Object, _featureManagerMock.Object);
}

[Fact]
public async Task Chat_AiDisabled_ReturnsServiceUnavailableProblemDetails()
{
_featureManagerMock
.Setup(m => m.IsEnabledAsync("AiEnabled"))
.ReturnsAsync(false);

var result = await _controller.Chat(new AiChatRequest("hello"), CancellationToken.None);

var objectResult = result.Should().BeOfType<ObjectResult>().Subject;
objectResult.StatusCode.Should().Be(StatusCodes.Status503ServiceUnavailable);

var problem = objectResult.Value.Should().BeOfType<ProblemDetails>().Subject;
problem.Title.Should().Be("AI chat is disabled");
problem.Detail.Should().Be("AI chat is disabled. Enable FeatureManagement:AiEnabled to use this endpoint.");

_aiChatServiceMock.Verify(
m => m.ChatAsync(It.IsAny<string>(), It.IsAny<string?>(), It.IsAny<CancellationToken>()),
Times.Never);
}

[Fact]
public async Task Chat_AiEnabled_ReturnsOkWithReply()
{
_featureManagerMock
.Setup(m => m.IsEnabledAsync("AiEnabled"))
.ReturnsAsync(true);
_aiChatServiceMock
.Setup(m => m.ChatAsync("hello", null, It.IsAny<CancellationToken>()))
.ReturnsAsync("hi");

var result = await _controller.Chat(new AiChatRequest("hello"), CancellationToken.None);

var okResult = result.Should().BeOfType<OkObjectResult>().Subject;
okResult.Value.Should().BeEquivalentTo(new AiChatResponse("hi"));
}
}
}
46 changes: 46 additions & 0 deletions TalentManagementAPI.WebApi/Controllers/v1/AiController.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
using Asp.Versioning;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using TalentManagementAPI.Application.Interfaces;

namespace TalentManagementAPI.WebApi.Controllers.v1
{
[ApiVersion("1.0")]
[AllowAnonymous]
[Route("api/v{version:apiVersion}/ai")]
public sealed class AiController : BaseApiController
{
private readonly IAiChatService _aiChatService;
private readonly IFeatureManagerSnapshot _featureManager;

public AiController(IAiChatService aiChatService, IFeatureManagerSnapshot featureManager)
{
_aiChatService = aiChatService;
_featureManager = featureManager;
}

/// <summary>
/// Send a message to the AI assistant and receive a reply.
/// </summary>
/// <param name="request">The chat message and optional system prompt.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The AI-generated reply.</returns>
[HttpPost("chat")]
public async Task<IActionResult> Chat([FromBody] AiChatRequest request, CancellationToken cancellationToken)
{
if (!await _featureManager.IsEnabledAsync("AiEnabled"))
{
return Problem(
detail: "AI chat is disabled. Enable FeatureManagement:AiEnabled to use this endpoint.",
title: "AI chat is disabled",
statusCode: StatusCodes.Status503ServiceUnavailable);
}

var reply = await _aiChatService.ChatAsync(request.Message, request.SystemPrompt, cancellationToken);
return Ok(new AiChatResponse(reply));
}
}

public record AiChatRequest(string Message, string? SystemPrompt = null);
public record AiChatResponse(string Reply);
}
2 changes: 2 additions & 0 deletions TalentManagementAPI.WebApi/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
builder.Services.AddApplicationLayer();
builder.Services.AddPersistenceInfrastructure(builder.Configuration);
builder.Services.AddSharedInfrastructure(builder.Configuration);
// Register Ollama chat client (IChatClient) — used by OllamaAiService
// AiController is gated by [FeatureGate("AiEnabled")], so no calls are made when AI is disabled
builder.Services.AddEasyCachingInfrastructure(builder.Configuration);
builder.Services.AddHttpContextAccessor();
builder.Services.AddScoped<ICacheDiagnosticsPublisher, HttpCacheDiagnosticsPublisher>();
Expand Down
8 changes: 8 additions & 0 deletions TalentManagementAPI.WebApi/TalentManagementAPI.WebApi.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions TalentManagementAPI.WebApi/appsettings.Development.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"ExecutionTimingIncludeHeader": true,
"ExecutionTimingIncludePayload": true,
"ExecutionTimingLogTimings": true,
"AiEnabled": true,
"UseInMemoryDatabase": false
},
"MailSettings": {
Expand Down
7 changes: 6 additions & 1 deletion TalentManagementAPI.WebApi/appsettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,12 @@
"ExecutionTimingIncludeHeader": true,
"ExecutionTimingIncludePayload": true,
"ExecutionTimingLogTimings": false,
"UseInMemoryDatabase": false
"UseInMemoryDatabase": false,
"AiEnabled": false
},
"Ollama": {
"BaseUrl": "http://localhost:11434",
"Model": "llama3.2"
},
"ApiRoles": {
"EmployeeRole": "Employee",
Expand Down