From ea1305e1e928e4926441e40890208e574fd0d5d4 Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 00:10:03 -0700 Subject: [PATCH 01/12] feat: add local AI (Ollama) support for web chat feature - Extract IAIChatService interface from AIChatService (slim match, Azure-specific params preserved) - Add LocalAIChatService backed by IChatClient (CommunityToolkit.Aspire.OllamaSharp) - ConcurrentDictionary for thread-safe in-memory conversation history - Ignores ResponseTool/RAG with LogWarning (Phase 1: no vector search) - Add AddAIServices(IHostApplicationBuilder) dispatcher with 3-branch logic: - UseLocalAI=true -> AddLocalAIServices (Ollama via IChatClient) - Endpoint set -> AddAzureOpenAIServices (existing Azure path) - Dev + no config -> graceful skip (no AI registered) - Prod + no config -> throw InvalidOperationException - Fix AIChatService double-registration: AddSingleton() + forwarding AddSingleton(sp => sp.GetRequiredService()) so CLI and web share the same singleton - ChatController injects IAIChatService instead of AIChatService - Program.cs uses builder.AddAIServices(configuration) instead of IsDevelopment guard - Add CommunityToolkit.Aspire.OllamaSharp 13.1.1 package reference - Pin OpenTelemetry.Api 1.15.3 to resolve GHSA-g94r-2vxg-569j vulnerability (OllamaSharp -> OpenTelemetry.Api 1.12.0 is vulnerable) Live tested end-to-end: LocalAIChatService confirmed via structured logs, streaming chat works with markdown/code rendering, conversation continuity maintained via responseId, /health and /alive endpoints healthy. --- Directory.Packages.props | 3 + .../EssentialCSharp.Chat.Common.csproj | 3 + .../Extensions/ServiceCollectionExtensions.cs | 62 +++++++- .../Models/AIOptions.cs | 6 + .../Services/AIChatService.cs | 2 +- .../Services/IAIChatService.cs | 31 ++++ .../Services/LocalAIChatService.cs | 133 ++++++++++++++++++ .../Controllers/ChatController.cs | 4 +- EssentialCSharp.Web/Program.cs | 8 +- 9 files changed, 243 insertions(+), 9 deletions(-) create mode 100644 EssentialCSharp.Chat.Shared/Services/IAIChatService.cs create mode 100644 EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs diff --git a/Directory.Packages.props b/Directory.Packages.props index 604ddc38..091cb58d 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -63,6 +63,9 @@ + + + diff --git a/EssentialCSharp.Chat.Shared/EssentialCSharp.Chat.Common.csproj b/EssentialCSharp.Chat.Shared/EssentialCSharp.Chat.Common.csproj index 20ffba5d..44a36eb7 100644 --- a/EssentialCSharp.Chat.Shared/EssentialCSharp.Chat.Common.csproj +++ b/EssentialCSharp.Chat.Shared/EssentialCSharp.Chat.Common.csproj @@ -6,6 +6,9 @@ + + + diff --git a/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs b/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs index 817a48ae..f300297e 100644 --- a/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs +++ b/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs @@ -5,6 +5,7 @@ using Microsoft.Extensions.AI; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; using Microsoft.SemanticKernel; using Npgsql; @@ -14,6 +15,62 @@ public static class ServiceCollectionExtensions { private static readonly string[] _PostgresScopes = ["https://ossrdbms-aad.database.windows.net/.default"]; + /// + /// Dispatches to or + /// based on AIOptions:UseLocalAI. Replaces the if (!IsDevelopment()) guard in + /// Program.cs so that AI services are always registered regardless of environment. + /// + public static IHostApplicationBuilder AddAIServices( + this IHostApplicationBuilder builder, + IConfiguration configuration) + { + var aiOptions = configuration.GetSection("AIOptions").Get() ?? new AIOptions(); + + if (aiOptions.UseLocalAI) + { + builder.AddLocalAIServices(configuration); + } + else if (!string.IsNullOrEmpty(aiOptions.Endpoint)) + { + builder.Services.AddAzureOpenAIServices(configuration); + } + else if (!builder.Environment.IsDevelopment()) + { + // Non-development without an endpoint is a misconfiguration — fail loudly. + throw new InvalidOperationException( + "AIOptions:Endpoint is required when UseLocalAI=false in non-development environments. " + + "Set the endpoint or enable local AI mode with aspire secret set Parameters:UseLocalAI true"); + } + // else: development + no config — graceful degradation, chat endpoints unavailable. + + return builder; + } + + /// + /// Registers the Ollama-backed local AI services. Uses IChatClient from + /// CommunityToolkit.Aspire.OllamaSharp. Vector search (RAG) is disabled in Phase 1 + /// due to the embedding dimension mismatch (Ollama nomic-embed-text = 768 dims, + /// pgvector schema expects 1536). + /// + public static IHostApplicationBuilder AddLocalAIServices( + this IHostApplicationBuilder builder, + IConfiguration configuration) + { + builder.Services.Configure(configuration.GetSection("AIOptions")); + + // Registers IChatClient backed by the Ollama "ollama-chat" resource. + // Connection string injected by Aspire: Endpoint=http://...:11434;Model=qwen2.5-coder:7b + builder.AddOllamaApiClient("ollama-chat") + .AddChatClient(); + + // NOTE: ollama-embed (nomic-embed-text, 768 dims) not registered in Phase 1. + // The pgvector schema hardcodes 1536 dims — incompatible without schema migration. + // Phase 2: register IEmbeddingGenerator + configure VectorStoreCollectionDefinition. + + builder.Services.AddSingleton(); + return builder; + } + /// /// Adds Azure OpenAI and related AI services to the service collection using Managed Identity /// @@ -65,10 +122,13 @@ public static IServiceCollection AddAzureOpenAIServices( .UseOpenTelemetry(); #pragma warning restore SKEXP0010 - // Register shared AI services + // Register shared AI services — forward IAIChatService to the concrete instance + // so the CLI tool (GetRequiredService()) and the web app + // (GetRequiredService()) share the same singleton. services.AddSingleton(); services.AddSingleton(); services.AddSingleton(); + services.AddSingleton(sp => sp.GetRequiredService()); services.AddSingleton(); return services; diff --git a/EssentialCSharp.Chat.Shared/Models/AIOptions.cs b/EssentialCSharp.Chat.Shared/Models/AIOptions.cs index 3d0123c6..4cee8e22 100644 --- a/EssentialCSharp.Chat.Shared/Models/AIOptions.cs +++ b/EssentialCSharp.Chat.Shared/Models/AIOptions.cs @@ -22,4 +22,10 @@ public class AIOptions /// public string Endpoint { get; set; } = string.Empty; + /// + /// When true, uses a local Ollama backend via IChatClient instead of Azure OpenAI. + /// Set by Aspire via the AIOptions__UseLocalAI environment variable. + /// + public bool UseLocalAI { get; set; } + } diff --git a/EssentialCSharp.Chat.Shared/Services/AIChatService.cs b/EssentialCSharp.Chat.Shared/Services/AIChatService.cs index 8dfab8c4..7bf35e8d 100644 --- a/EssentialCSharp.Chat.Shared/Services/AIChatService.cs +++ b/EssentialCSharp.Chat.Shared/Services/AIChatService.cs @@ -9,7 +9,7 @@ namespace EssentialCSharp.Chat.Common.Services; /// /// Service for handling AI chat completions using the OpenAI Responses API /// -public class AIChatService +public class AIChatService : IAIChatService { private readonly AIOptions _Options; private readonly AzureOpenAIClient _AzureClient; diff --git a/EssentialCSharp.Chat.Shared/Services/IAIChatService.cs b/EssentialCSharp.Chat.Shared/Services/IAIChatService.cs new file mode 100644 index 00000000..4e223078 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/IAIChatService.cs @@ -0,0 +1,31 @@ +using ModelContextProtocol.Client; +using OpenAI.Responses; + +namespace EssentialCSharp.Chat.Common.Services; + +public interface IAIChatService +{ + Task<(string response, string responseId)> GetChatCompletion( + string prompt, + string? systemPrompt = null, + string? previousResponseId = null, + IMcpClient? mcpClient = null, +#pragma warning disable OPENAI001 + IEnumerable? tools = null, + ResponseReasoningEffortLevel? reasoningEffortLevel = null, +#pragma warning restore OPENAI001 + bool enableContextualSearch = false, + CancellationToken cancellationToken = default); + + IAsyncEnumerable<(string text, string? responseId)> GetChatCompletionStream( + string prompt, + string? systemPrompt = null, + string? previousResponseId = null, + IMcpClient? mcpClient = null, +#pragma warning disable OPENAI001 + IEnumerable? tools = null, + ResponseReasoningEffortLevel? reasoningEffortLevel = null, +#pragma warning restore OPENAI001 + bool enableContextualSearch = false, + CancellationToken cancellationToken = default); +} diff --git a/EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs b/EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs new file mode 100644 index 00000000..2e7df548 --- /dev/null +++ b/EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs @@ -0,0 +1,133 @@ +using System.Collections.Concurrent; +using System.Runtime.CompilerServices; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using ModelContextProtocol.Client; +using OpenAI.Responses; + +namespace EssentialCSharp.Chat.Common.Services; + +/// +/// Local AI chat service using IChatClient (e.g. Ollama via CommunityToolkit.Aspire.OllamaSharp). +/// Compared to the Azure path: conversation history is in-memory only (lost on restart), +/// ResponseTool/ReasoningEffortLevel params are silently ignored, and vector search (RAG) +/// is disabled. Intended for local development without Azure credentials. +/// +public class LocalAIChatService : IAIChatService +{ + private readonly IChatClient _chatClient; + private readonly AIOptions _options; + private readonly ILogger _logger; + + // Synthetic conversation history keyed by GUID responseId. + // In-memory only — not shared across instances and lost on restart. + // ConcurrentDictionary prevents crashes from parallel requests (e.g., two chat tabs). + private readonly ConcurrentDictionary> _conversations = new(); + + public LocalAIChatService( + IOptions options, + IChatClient chatClient, + ILogger logger) + { + _options = options.Value; + _chatClient = chatClient; + _logger = logger; + } + + public async Task<(string response, string responseId)> GetChatCompletion( + string prompt, + string? systemPrompt = null, + string? previousResponseId = null, + IMcpClient? mcpClient = null, +#pragma warning disable OPENAI001 + IEnumerable? tools = null, + ResponseReasoningEffortLevel? reasoningEffortLevel = null, +#pragma warning restore OPENAI001 + bool enableContextualSearch = false, + CancellationToken cancellationToken = default) + { + WarnUnsupportedFeatures(tools, reasoningEffortLevel, enableContextualSearch); + + var messages = BuildMessages(prompt, systemPrompt, previousResponseId); + var response = await _chatClient.GetResponseAsync(messages, cancellationToken: cancellationToken); + var responseText = response.Text ?? string.Empty; + var responseId = SaveConversation(messages, responseText, previousResponseId); + return (responseText, responseId); + } + + public async IAsyncEnumerable<(string text, string? responseId)> GetChatCompletionStream( + string prompt, + string? systemPrompt = null, + string? previousResponseId = null, + IMcpClient? mcpClient = null, +#pragma warning disable OPENAI001 + IEnumerable? tools = null, + ResponseReasoningEffortLevel? reasoningEffortLevel = null, +#pragma warning restore OPENAI001 + bool enableContextualSearch = false, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + WarnUnsupportedFeatures(tools, reasoningEffortLevel, enableContextualSearch); + + var messages = BuildMessages(prompt, systemPrompt, previousResponseId); + var fullResponse = new System.Text.StringBuilder(); + + await foreach (var update in _chatClient.GetStreamingResponseAsync(messages, cancellationToken: cancellationToken)) + { + if (!string.IsNullOrEmpty(update.Text)) + { + fullResponse.Append(update.Text); + yield return (update.Text, null); + } + } + + var responseId = SaveConversation(messages, fullResponse.ToString(), previousResponseId); + yield return (string.Empty, responseId); + } + +#pragma warning disable OPENAI001 + private void WarnUnsupportedFeatures( + IEnumerable? tools, + ResponseReasoningEffortLevel? reasoningEffortLevel, + bool enableContextualSearch) +#pragma warning restore OPENAI001 + { + if (tools is not null || reasoningEffortLevel is not null) + _logger.LogWarning("LocalAIChatService: ResponseTool and ReasoningEffortLevel are Azure-specific and are ignored in local mode."); + + if (enableContextualSearch) + _logger.LogWarning("LocalAIChatService: Vector search (RAG) is disabled in local mode (Phase 1). Run in Azure mode to enable contextual search."); + } + + private List BuildMessages(string prompt, string? systemPrompt, string? previousResponseId) + { + var messages = new List(); + + var sys = string.IsNullOrWhiteSpace(systemPrompt) ? _options.SystemPrompt : systemPrompt; + if (!string.IsNullOrWhiteSpace(sys)) + messages.Add(new ChatMessage(ChatRole.System, sys)); + + if (previousResponseId is not null && _conversations.TryGetValue(previousResponseId, out var history)) + messages.AddRange(history); + + messages.Add(new ChatMessage(ChatRole.User, prompt)); + return messages; + } + + private string SaveConversation(List messages, string assistantResponse, string? previousResponseId) + { + var history = messages.Where(m => m.Role != ChatRole.System).ToList(); + history.Add(new ChatMessage(ChatRole.Assistant, assistantResponse)); + + var newId = Guid.NewGuid().ToString("N"); + _conversations[newId] = history; + + // Remove previous entry to avoid unbounded memory growth. + // TryRemove is safe on ConcurrentDictionary. + if (previousResponseId is not null) + _conversations.TryRemove(previousResponseId, out _); + + return newId; + } +} diff --git a/EssentialCSharp.Web/Controllers/ChatController.cs b/EssentialCSharp.Web/Controllers/ChatController.cs index fbba17c0..059deb94 100644 --- a/EssentialCSharp.Web/Controllers/ChatController.cs +++ b/EssentialCSharp.Web/Controllers/ChatController.cs @@ -12,10 +12,10 @@ namespace EssentialCSharp.Web.Controllers; [EnableRateLimiting("ChatEndpoint")] public class ChatController : ControllerBase { - private readonly AIChatService _AiChatService; + private readonly IAIChatService _AiChatService; private readonly ILogger _Logger; - public ChatController(ILogger logger, AIChatService aiChatService) + public ChatController(ILogger logger, IAIChatService aiChatService) { _AiChatService = aiChatService; _Logger = logger; diff --git a/EssentialCSharp.Web/Program.cs b/EssentialCSharp.Web/Program.cs index ba88dc5a..b6031ca5 100644 --- a/EssentialCSharp.Web/Program.cs +++ b/EssentialCSharp.Web/Program.cs @@ -239,11 +239,9 @@ private static void Main(string[] args) builder.Services.AddSingleton(); builder.Services.AddScoped(); - // Add AI Chat services - if (!builder.Environment.IsDevelopment()) - { - builder.Services.AddAzureOpenAIServices(configuration); - } + // Add AI Chat services — always registered (Ollama in local mode, Azure OpenAI in production). + // AIOptions__UseLocalAI=true enables Ollama local mode (set via aspire secret or dashboard). + builder.AddAIServices(configuration); // Add Rate Limiting for API endpoints builder.Services.AddRateLimiter(options => From 146e1bed6f9c1bbc3a58ac5dc384ada60ed16f5d Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 07:25:18 -0700 Subject: [PATCH 02/12] fix: extend Polly timeouts for local Ollama chat inference qwen2.5-coder:7b consistently takes >30s, causing Polly's default TotalRequestTimeout to reject every chat response. Override via PostConfigureAll when UseLocalAI=true (dev-only path): - TotalRequestTimeout: 30s 10min - AttemptTimeout: 10s 5min - CircuitBreaker.SamplingDuration: 30s 11min (Polly requires >= 2x AttemptTimeout) The global override is acceptable here: this code path only runs when the Ollama local-AI flag is set, which is developer-only. --- EssentialCSharp.Web/Program.cs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/EssentialCSharp.Web/Program.cs b/EssentialCSharp.Web/Program.cs index b6031ca5..71f57977 100644 --- a/EssentialCSharp.Web/Program.cs +++ b/EssentialCSharp.Web/Program.cs @@ -19,6 +19,7 @@ using Microsoft.AspNetCore.Diagnostics.HealthChecks; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Http.Resilience; using OpenTelemetry; using OpenTelemetry.Instrumentation.AspNetCore; using OpenTelemetry.Metrics; @@ -243,6 +244,21 @@ private static void Main(string[] args) // AIOptions__UseLocalAI=true enables Ollama local mode (set via aspire secret or dashboard). builder.AddAIServices(configuration); + // When using local Ollama, Polly's default 30s TotalRequestTimeout fires before LLM inference + // completes (qwen2.5-coder:7b consistently takes >30s). Override globally — this code path + // is only reached in local dev when UseLocalAI=true, so widening all clients is acceptable. + var aiOptsForTimeout = configuration.GetSection("AIOptions").Get(); + if (aiOptsForTimeout?.UseLocalAI == true) + { + builder.Services.PostConfigureAll(options => + { + options.TotalRequestTimeout.Timeout = TimeSpan.FromMinutes(10); + options.AttemptTimeout.Timeout = TimeSpan.FromMinutes(5); + // Polly requires SamplingDuration >= 2x AttemptTimeout; default 30s is now invalid. + options.CircuitBreaker.SamplingDuration = TimeSpan.FromMinutes(11); + }); + } + // Add Rate Limiting for API endpoints builder.Services.AddRateLimiter(options => { From 583859d4317089400dc2758acd26c4906d5ac28f Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 07:57:32 -0700 Subject: [PATCH 03/12] fix: captcha dev bypass and HTTP/2 connection header - ChatController: inject IOptions; skip captcha check entirely when SiteKey is not configured (local dev without hCaptcha secrets) - ChatController: wrap CaptchaService.VerifyAsync in try-catch to fail-open on InvalidOperationException (missing SecretKey) - ChatController: remove Response.Headers.Connection = keep-alive (invalid in HTTP/2, generated ASP.NET warnings) - chat-module.js: getFreshCaptchaToken returns null (not throws) when HCAPTCHA_SITE_KEY is falsy - chat-module.js: fetchChatStream omits captchaToken from body when null so server bypass fires correctly feat: add hCaptcha test keys for local development Use official hCaptcha test keypair (https://docs.hcaptcha.com/#integration-testing-test-keys) in appsettings.Development.json so all devs get working captcha out of the box without configuring secrets. Test keys always pass silently no challenge is shown. - SiteKey: 10000000-ffff-ffff-ffff-000000000001 - SecretKey: 0x0000000000000000000000000000000000000000 These are public constants from hCaptcha docs; committing them is intentional and safe. Production keys must be set via 'aspire secret set' and will override these defaults. fix: remove unsafe captcha bypass Now that appsettings.Development.json has official hCaptcha test keys, the 'skip when SiteKey not configured' bypass is both unnecessary and dangerous a misconfigured production deploy would silently allow all requests. - ChatController: remove IOptions injection and SiteKey bypass block - ChatController: remove try-catch around VerifyAsync (InvalidOperationException from missing SecretKey should surface as 500, not be silently swallowed with fail-open) - chat-module.js: remove null-return bypass in getFreshCaptchaToken - chat-module.js: restore direct captchaToken in fetchChatStream body If hCaptcha is misconfigured in production: server: throws InvalidOperationException -> 500 (loud, ops must fix) client: throws 'Captcha is not configured.' -> shows error to user (not silent) --- .../Controllers/ChatController.cs | 49 +++++- .../Controllers/ChatMessageRequest.cs | 4 +- .../Views/Shared/_Layout.cshtml | 1 + .../appsettings.Development.json | 4 + EssentialCSharp.Web/wwwroot/js/chat-module.js | 151 +++++++++++++++--- 5 files changed, 186 insertions(+), 23 deletions(-) diff --git a/EssentialCSharp.Web/Controllers/ChatController.cs b/EssentialCSharp.Web/Controllers/ChatController.cs index 059deb94..b6d434c6 100644 --- a/EssentialCSharp.Web/Controllers/ChatController.cs +++ b/EssentialCSharp.Web/Controllers/ChatController.cs @@ -1,5 +1,6 @@ using System.Text.Json; using EssentialCSharp.Chat.Common.Services; +using EssentialCSharp.Web.Services; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.RateLimiting; @@ -13,17 +14,22 @@ namespace EssentialCSharp.Web.Controllers; public class ChatController : ControllerBase { private readonly IAIChatService _AiChatService; + private readonly ICaptchaService _CaptchaService; private readonly ILogger _Logger; - public ChatController(ILogger logger, IAIChatService aiChatService) + public ChatController(ILogger logger, IAIChatService aiChatService, ICaptchaService captchaService) { _AiChatService = aiChatService; + _CaptchaService = captchaService; _Logger = logger; } [HttpPost("message")] public async Task SendMessage([FromBody] ChatMessageRequest request, CancellationToken cancellationToken = default) { + var (captchaOk, captchaError) = await VerifyCaptchaAsync(request.CaptchaToken, cancellationToken); + if (!captchaOk) return captchaError!; + request.Message = request.Message.Trim(); if (string.IsNullOrEmpty(request.Message)) return BadRequest(new { error = "Message cannot be empty." }); @@ -49,6 +55,18 @@ public async Task SendMessage([FromBody] ChatMessageRequest reque [HttpPost("stream")] public async Task StreamMessage([FromBody] ChatMessageRequest request, CancellationToken cancellationToken = default) { + // Captcha and input validation must happen before SSE headers are set, + // so we can still return a proper HTTP status code on failure. + var (captchaOk, captchaError) = await VerifyCaptchaAsync(request.CaptchaToken, cancellationToken); + if (!captchaOk) + { + Response.StatusCode = captchaError is ObjectResult obj ? obj.StatusCode ?? 403 : 403; + await Response.WriteAsJsonAsync( + captchaError is ObjectResult { Value: not null } r ? r.Value : new { error = "Captcha verification failed." }, + CancellationToken.None); + return; + } + request.Message = request.Message.Trim(); if (string.IsNullOrEmpty(request.Message)) { @@ -63,7 +81,6 @@ public async Task StreamMessage([FromBody] ChatMessageRequest request, Cancellat Response.ContentType = "text/event-stream"; Response.Headers.CacheControl = "no-cache"; - Response.Headers.Connection = "keep-alive"; try { @@ -113,4 +130,32 @@ public async Task StreamMessage([FromBody] ChatMessageRequest request, Cancellat catch { /* client already disconnected */ } } } + + /// + /// Verifies the hCaptcha token. Fails-open on service outage (returns success with warning) + /// since the endpoint is already protected by [Authorize] and rate limiting. + /// + private async Task<(bool Success, IActionResult? Error)> VerifyCaptchaAsync( + string? captchaToken, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(captchaToken)) + return (false, StatusCode(StatusCodes.Status403Forbidden, + new { error = "Captcha verification required.", errorCode = "captcha_required", retryable = true })); + + var remoteIp = HttpContext.Connection.RemoteIpAddress?.ToString(); + var result = await _CaptchaService.VerifyAsync(captchaToken, remoteIp, cancellationToken); + + if (result is null) + { + // hCaptcha service is unreachable — fail-open since [Authorize] + rate limiting still protect the endpoint. + _Logger.LogWarning("hCaptcha service unavailable for user {User} — allowing request", User.Identity?.Name); + return (true, null); + } + + if (!result.Success) + return (false, StatusCode(StatusCodes.Status403Forbidden, + new { error = "Captcha verification failed.", errorCode = "captcha_failed", retryable = true })); + + return (true, null); + } } diff --git a/EssentialCSharp.Web/Controllers/ChatMessageRequest.cs b/EssentialCSharp.Web/Controllers/ChatMessageRequest.cs index c797febd..7fa21fee 100644 --- a/EssentialCSharp.Web/Controllers/ChatMessageRequest.cs +++ b/EssentialCSharp.Web/Controllers/ChatMessageRequest.cs @@ -10,5 +10,7 @@ public class ChatMessageRequest [StringLength(200)] public string? PreviousResponseId { get; set; } public bool EnableContextualSearch { get; set; } = true; - public string? CaptchaResponse { get; set; } // For future captcha implementation + + [StringLength(4096)] + public string? CaptchaToken { get; set; } } diff --git a/EssentialCSharp.Web/Views/Shared/_Layout.cshtml b/EssentialCSharp.Web/Views/Shared/_Layout.cshtml index 899b3e45..3066c7e2 100644 --- a/EssentialCSharp.Web/Views/Shared/_Layout.cshtml +++ b/EssentialCSharp.Web/Views/Shared/_Layout.cshtml @@ -192,3 +192,4 @@ + diff --git a/EssentialCSharp.Web/appsettings.Development.json b/EssentialCSharp.Web/appsettings.Development.json index f7e1d576..c92c76f5 100644 --- a/EssentialCSharp.Web/appsettings.Development.json +++ b/EssentialCSharp.Web/appsettings.Development.json @@ -11,5 +11,9 @@ }, "SiteSettings": { "BaseUrl": "https://localhost:7184" + }, + "HCaptcha": { + "SiteKey": "10000000-ffff-ffff-ffff-000000000001", + "SecretKey": "0x0000000000000000000000000000000000000000" } } diff --git a/EssentialCSharp.Web/wwwroot/js/chat-module.js b/EssentialCSharp.Web/wwwroot/js/chat-module.js index 1305b354..6a18cdb3 100644 --- a/EssentialCSharp.Web/wwwroot/js/chat-module.js +++ b/EssentialCSharp.Web/wwwroot/js/chat-module.js @@ -16,6 +16,12 @@ export function useChatWidget() { const chatInputField = ref(null); const lastResponseId = ref(null); + // hCaptcha invisible widget state + const captchaContainerEl = ref(null); + let captchaWidgetId = null; + let captchaResolve = null; + let captchaReject = null; + // Load chat history from localStorage on initialization function loadChatHistory() { try { @@ -109,6 +115,67 @@ export function useChatWidget() { } // Remove captcha callback functions as they're no longer needed for chat + + // hCaptcha invisible widget — programmatic callbacks (not string-based data-callback attributes) + function onCaptchaSuccess(token) { + if (captchaResolve) { + captchaResolve(token); + captchaResolve = null; + captchaReject = null; + } + } + + function onCaptchaExpired() { + if (captchaReject) { + captchaReject(new Error('Captcha expired')); + captchaResolve = null; + captchaReject = null; + } + } + + function onCaptchaError() { + if (captchaReject) { + captchaReject(new Error('Captcha error')); + captchaResolve = null; + captchaReject = null; + } + } + + async function ensureCaptchaWidget() { + if (!window.HCAPTCHA_SITE_KEY) throw new Error('Captcha is not configured.'); + await nextTick(); + if (!window.hcaptcha?.render) throw new Error('Captcha script is not ready.'); + if (captchaWidgetId !== null) return; + + captchaWidgetId = window.hcaptcha.render(captchaContainerEl.value, { + sitekey: window.HCAPTCHA_SITE_KEY, + size: 'invisible', + callback: onCaptchaSuccess, + 'expired-callback': onCaptchaExpired, + 'error-callback': onCaptchaError + }); + } + + async function getFreshCaptchaToken() { + await ensureCaptchaWidget(); + + return await new Promise((resolve, reject) => { + captchaResolve = resolve; + captchaReject = reject; + + window.hcaptcha.reset(captchaWidgetId); + window.hcaptcha.execute(captchaWidgetId); + + // Safety timeout — should not normally be reached + setTimeout(() => { + if (captchaReject) { + captchaReject(new Error('Captcha timed out')); + captchaResolve = null; + captchaReject = null; + } + }, 15000); + }); + } // The captcha service can still be used elsewhere in the application function scrollToBottom() { @@ -182,7 +249,24 @@ export function useChatWidget() { saveChatHistory(); return; } - + + // Acquire captcha token BEFORE mutating UI state — so if captcha fails the user + // message is still in the input and nothing is incorrectly shown in the chat history. + let captchaToken; + try { + captchaToken = await getFreshCaptchaToken(); + } catch (captchaErr) { + console.warn('Captcha acquisition failed:', captchaErr); + chatMessages.value.push({ + role: 'error', + errorType: 'captcha-error', + content: 'Security verification failed. Please refresh the page and try again.', + timestamp: new Date().toISOString() + }); + saveChatHistory(); + return; + } + chatInput.value = ''; // Add user message @@ -191,6 +275,7 @@ export function useChatWidget() { content: userMessage, timestamp: new Date().toISOString() }); + const userMessageIndex = chatMessages.value.length - 1; // Save immediately after adding user message saveChatHistory(); @@ -207,25 +292,37 @@ export function useChatWidget() { let reader = null; try { - const requestBody = { - message: userMessage, - enableContextualSearch: true, - previousResponseId: lastResponseId.value - }; - - const response = await fetch('/api/chat/stream', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(requestBody) - }); + const response = await fetchChatStream(userMessage, captchaToken); if (!response.ok) { if (response.status === 401) { throw new Error('Authentication required'); + } else if (response.status === 403) { + // Captcha failed — try once more with a fresh token + let errorData = {}; + try { errorData = await response.json(); } catch (_) {} + + if (errorData.retryable) { + let retryToken; + try { + retryToken = await getFreshCaptchaToken(); + } catch (_) { + throw new Error('captcha-failed'); + } + + const retryResponse = await fetchChatStream(userMessage, retryToken); + if (!retryResponse.ok) { + // Remove optimistic user message and restore input + chatMessages.value.splice(userMessageIndex, 1); + chatInput.value = userMessage; + throw new Error('captcha-failed'); + } + // Use retry response for the rest of the streaming flow + reader = retryResponse.body.getReader(); + } else { + throw new Error('captcha-failed'); + } } else if (response.status === 429) { - // Handle rate limiting - simple error message without captcha let errorData; try { errorData = await response.json(); @@ -237,11 +334,8 @@ export function useChatWidget() { } const retryAfter = errorData.retryAfter || 60; - const errorMessage = `Rate limit exceeded. Please wait ${Math.ceil(retryAfter)} seconds before sending another message.`; - - throw new Error(errorMessage); + throw new Error(`Rate limit exceeded. Please wait ${Math.ceil(retryAfter)} seconds before sending another message.`); } else if (response.status === 400) { - // Handle validation errors const errorData = await response.json(); throw new Error(errorData.error || 'Bad request'); } @@ -249,7 +343,7 @@ export function useChatWidget() { } // Handle streaming response - reader = response.body.getReader(); + if (!reader) reader = response.body.getReader(); const decoder = new TextDecoder(); let assistantMessage = ''; let assistantMessageIndex = -1; @@ -321,6 +415,9 @@ export function useChatWidget() { if (error.name === 'AbortError') { errorMessage = 'Request was cancelled. Please try again.'; errorType = 'error'; + } else if (error.message === 'captcha-failed') { + errorMessage = 'Security verification failed. Please try again.'; + errorType = 'captcha-error'; } else if (error.message?.includes('Authentication required')) { errorMessage = 'You must be logged in to use the chat feature. Please log in and try again.'; errorType = 'auth-error'; @@ -365,6 +462,19 @@ export function useChatWidget() { } } + function fetchChatStream(message, captchaToken) { + return fetch('/api/chat/stream', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + message, + enableContextualSearch: true, + previousResponseId: lastResponseId.value, + captchaToken + }) + }); + } + // Clean up old chat sessions (keep only last 7 days) function cleanupOldSessions() { try { @@ -396,6 +506,7 @@ export function useChatWidget() { isTyping, chatMessagesEl, chatInputField, + captchaContainerEl, // Methods openChatDialog, From 18c40f45af7ebfe2676a3ed449f0db7ff797be46 Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 09:45:28 -0700 Subject: [PATCH 04/12] Fix chat captcha after rebase: restore HCAPTCHA_SITE_KEY global and captcha container - _Layout.cshtml: emit window.HCAPTCHA_SITE_KEY from HCaptcha:SiteKey config - ChatWidget.vue: destructure captchaContainerEl ref, add hidden container div - chat-module.js: use whenHcaptchaReady() from hcaptcha-form.js for robustness (waits for hcaptcha.js to load before rendering the invisible widget) --- EssentialCSharp.Web/Views/Shared/_Layout.cshtml | 1 + EssentialCSharp.Web/src/components/ChatWidget.vue | 3 +++ EssentialCSharp.Web/wwwroot/js/chat-module.js | 10 +++++++++- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/EssentialCSharp.Web/Views/Shared/_Layout.cshtml b/EssentialCSharp.Web/Views/Shared/_Layout.cshtml index 3066c7e2..a6d8c997 100644 --- a/EssentialCSharp.Web/Views/Shared/_Layout.cshtml +++ b/EssentialCSharp.Web/Views/Shared/_Layout.cshtml @@ -188,6 +188,7 @@ window.TRYDOTNET_ORIGIN = @Json.Serialize(Configuration["TryDotNet:Origin"]); window.BUILD_LABEL = @Json.Serialize(buildLabel); window.ENABLE_CHAT_WIDGET = @Json.Serialize(!Context.Request.Path.StartsWithSegments("/Identity")); + window.HCAPTCHA_SITE_KEY = @Json.Serialize(Configuration["HCaptcha:SiteKey"]); diff --git a/EssentialCSharp.Web/src/components/ChatWidget.vue b/EssentialCSharp.Web/src/components/ChatWidget.vue index b7b0820f..247a1e0d 100644 --- a/EssentialCSharp.Web/src/components/ChatWidget.vue +++ b/EssentialCSharp.Web/src/components/ChatWidget.vue @@ -11,6 +11,7 @@ const { isTyping, chatMessagesEl, chatInputField, + captchaContainerEl, openChatDialog, closeChatDialog, clearChatHistory, @@ -189,6 +190,8 @@ const { Type your question and press Enter or click send. Maximum 500 characters. + + diff --git a/EssentialCSharp.Web/wwwroot/js/chat-module.js b/EssentialCSharp.Web/wwwroot/js/chat-module.js index 6a18cdb3..c435759d 100644 --- a/EssentialCSharp.Web/wwwroot/js/chat-module.js +++ b/EssentialCSharp.Web/wwwroot/js/chat-module.js @@ -144,9 +144,17 @@ export function useChatWidget() { async function ensureCaptchaWidget() { if (!window.HCAPTCHA_SITE_KEY) throw new Error('Captcha is not configured.'); await nextTick(); - if (!window.hcaptcha?.render) throw new Error('Captcha script is not ready.'); if (captchaWidgetId !== null) return; + // Wait for hcaptcha.js to load — uses the shared whenHcaptchaReady queue from hcaptcha-form.js + await new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error('Captcha script is not ready.')), 10000); + window.EssentialCSharp.HCaptcha.whenHcaptchaReady(() => { + clearTimeout(timeout); + resolve(); + }); + }); + captchaWidgetId = window.hcaptcha.render(captchaContainerEl.value, { sitekey: window.HCAPTCHA_SITE_KEY, size: 'invisible', From b2061404d9f824ae0614e0586dc6f5261225b6cb Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 09:58:21 -0700 Subject: [PATCH 05/12] npm install --- EssentialCSharp.Web/package-lock.json | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/EssentialCSharp.Web/package-lock.json b/EssentialCSharp.Web/package-lock.json index 96c40344..e91b5f65 100644 --- a/EssentialCSharp.Web/package-lock.json +++ b/EssentialCSharp.Web/package-lock.json @@ -70,27 +70,6 @@ "node": ">=6.9.0" } }, - "node_modules/@emnapi/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.10.0.tgz", - "integrity": "sha512-yq6OkJ4p82CAfPl0u9mQebQHKPJkY7WrIuk205cTYnYe+k2Z8YBh11FrbRG/H6ihirqcacOgl2BIO8oyMQLeXw==", - "dev": true, - "optional": true, - "dependencies": { - "@emnapi/wasi-threads": "1.2.1", - "tslib": "^2.4.0" - } - }, - "node_modules/@emnapi/runtime": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz", - "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==", - "dev": true, - "optional": true, - "dependencies": { - "tslib": "^2.4.0" - } - }, "node_modules/@emnapi/wasi-threads": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.1.tgz", @@ -920,6 +899,7 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, + "peer": true, "engines": { "node": ">=12" }, @@ -1029,6 +1009,7 @@ "resolved": "https://registry.npmjs.org/vite/-/vite-8.0.10.tgz", "integrity": "sha512-rZuUu9j6J5uotLDs+cAA4O5H4K1SfPliUlQwqa6YEwSrWDZzP4rhm00oJR5snMewjxF5V/K3D4kctsUTsIU9Mw==", "dev": true, + "peer": true, "dependencies": { "lightningcss": "^1.32.0", "picomatch": "^4.0.4", From 67cb7fd579489440eec4ab99dd77d9f9384fd9e2 Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 18:33:24 -0700 Subject: [PATCH 06/12] cleanup --- .../Extensions/ServiceCollectionExtensions.cs | 12 ++- .../Services/LocalAIChatService.cs | 4 + .../ServiceCollectionExtensionsTests.cs | 79 ++++++++++++++++ .../ChatControllerTests.cs | 90 +++++++++++++++++++ .../Controllers/ChatController.cs | 32 +++++-- EssentialCSharp.Web/Program.cs | 7 +- EssentialCSharp.Web/wwwroot/js/chat-module.js | 51 +++++++---- 7 files changed, 236 insertions(+), 39 deletions(-) create mode 100644 EssentialCSharp.Chat.Tests/ServiceCollectionExtensionsTests.cs create mode 100644 EssentialCSharp.Web.Tests/ChatControllerTests.cs diff --git a/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs b/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs index f300297e..520aec3e 100644 --- a/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs +++ b/EssentialCSharp.Chat.Shared/Extensions/ServiceCollectionExtensions.cs @@ -17,13 +17,14 @@ public static class ServiceCollectionExtensions /// /// Dispatches to or - /// based on AIOptions:UseLocalAI. Replaces the if (!IsDevelopment()) guard in - /// Program.cs so that AI services are always registered regardless of environment. + /// based on AIOptions:UseLocalAI. AI chat requires either local AI mode + /// or a configured Azure/Foundry endpoint in every environment. /// public static IHostApplicationBuilder AddAIServices( this IHostApplicationBuilder builder, IConfiguration configuration) { + builder.Services.Configure(configuration.GetSection("AIOptions")); var aiOptions = configuration.GetSection("AIOptions").Get() ?? new AIOptions(); if (aiOptions.UseLocalAI) @@ -34,14 +35,11 @@ public static IHostApplicationBuilder AddAIServices( { builder.Services.AddAzureOpenAIServices(configuration); } - else if (!builder.Environment.IsDevelopment()) + else { - // Non-development without an endpoint is a misconfiguration — fail loudly. throw new InvalidOperationException( - "AIOptions:Endpoint is required when UseLocalAI=false in non-development environments. " + - "Set the endpoint or enable local AI mode with aspire secret set Parameters:UseLocalAI true"); + "AI chat requires either AIOptions:UseLocalAI=true or AIOptions:Endpoint to be configured."); } - // else: development + no config — graceful degradation, chat endpoints unavailable. return builder; } diff --git a/EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs b/EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs index 2e7df548..02ac8594 100644 --- a/EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs +++ b/EssentialCSharp.Chat.Shared/Services/LocalAIChatService.cs @@ -94,10 +94,14 @@ private void WarnUnsupportedFeatures( #pragma warning restore OPENAI001 { if (tools is not null || reasoningEffortLevel is not null) + { _logger.LogWarning("LocalAIChatService: ResponseTool and ReasoningEffortLevel are Azure-specific and are ignored in local mode."); + } if (enableContextualSearch) + { _logger.LogWarning("LocalAIChatService: Vector search (RAG) is disabled in local mode (Phase 1). Run in Azure mode to enable contextual search."); + } } private List BuildMessages(string prompt, string? systemPrompt, string? previousResponseId) diff --git a/EssentialCSharp.Chat.Tests/ServiceCollectionExtensionsTests.cs b/EssentialCSharp.Chat.Tests/ServiceCollectionExtensionsTests.cs new file mode 100644 index 00000000..3e0ea82d --- /dev/null +++ b/EssentialCSharp.Chat.Tests/ServiceCollectionExtensionsTests.cs @@ -0,0 +1,79 @@ +using EssentialCSharp.Chat.Common.Extensions; +using EssentialCSharp.Chat.Common.Services; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace EssentialCSharp.Chat.Tests; + +public class ServiceCollectionExtensionsTests +{ + [Test] + public async Task AddAIServices_WhenDevelopmentWithoutConfiguration_ThrowsInvalidOperationException() + { + var builder = CreateBuilder(Environments.Development); + + await Assert.That(() => builder.AddAIServices(builder.Configuration)) + .Throws(); + } + + [Test] + public async Task AddAIServices_WhenUseLocalAI_RegistersLocalAIService() + { + var builder = CreateBuilder( + Environments.Development, + new Dictionary + { + ["AIOptions:UseLocalAI"] = bool.TrueString, + ["ConnectionStrings:ollama-chat"] = "Endpoint=http://localhost:11434;Model=qwen2.5-coder:7b" + }); + + builder.AddAIServices(builder.Configuration); + + var descriptor = builder.Services.LastOrDefault(service => service.ServiceType == typeof(IAIChatService)); + await Assert.That(descriptor).IsNotNull(); + await Assert.That(descriptor!.ImplementationType).IsEqualTo(typeof(LocalAIChatService)); + } + + [Test] + public async Task AddAIServices_WhenAzureEndpointConfigured_RegistersAzureAIService() + { + var builder = CreateBuilder( + Environments.Production, + new Dictionary + { + ["AIOptions:Endpoint"] = "https://example.openai.azure.com/", + ["AIOptions:ChatDeploymentName"] = "chat", + ["AIOptions:VectorGenerationDeploymentName"] = "embeddings", + ["ConnectionStrings:PostgresVectorStore"] = "Host=test.postgres.database.azure.com;Database=app;Username=user" + }); + + builder.AddAIServices(builder.Configuration); + + await Assert.That(builder.Services.Any(service => service.ServiceType == typeof(AIChatService))).IsTrue(); + await Assert.That(builder.Services.Any(service => service.ServiceType == typeof(IAIChatService))).IsTrue(); + } + + [Test] + public async Task AddAIServices_WhenProductionWithoutConfiguration_ThrowsInvalidOperationException() + { + var builder = CreateBuilder(Environments.Production); + + await Assert.That(() => builder.AddAIServices(builder.Configuration)) + .Throws(); + } + + private static HostApplicationBuilder CreateBuilder( + string environmentName, + Dictionary? settings = null) + { + var builder = new HostApplicationBuilder(new HostApplicationBuilderSettings + { + EnvironmentName = environmentName + }); + + builder.Configuration.Sources.Clear(); + builder.Configuration.AddInMemoryCollection(settings ?? []); + return builder; + } +} diff --git a/EssentialCSharp.Web.Tests/ChatControllerTests.cs b/EssentialCSharp.Web.Tests/ChatControllerTests.cs new file mode 100644 index 00000000..1356643a --- /dev/null +++ b/EssentialCSharp.Web.Tests/ChatControllerTests.cs @@ -0,0 +1,90 @@ +using System.Security.Claims; +using System.Text.Json; +using EssentialCSharp.Chat.Common.Services; +using EssentialCSharp.Web.Controllers; +using EssentialCSharp.Web.Models; +using EssentialCSharp.Web.Services; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using Moq; + +namespace EssentialCSharp.Web.Tests; + +public class ChatControllerTests +{ + [Test] + public async Task StreamMessage_MissingCaptchaToken_Returns403WithCaptchaRequired() + { + var controller = CreateController(); + + await controller.StreamMessage(new ChatMessageRequest { Message = "hello" }); + + var body = await ReadJsonResponse(controller.HttpContext.Response); + await Assert.That(controller.HttpContext.Response.StatusCode).IsEqualTo(StatusCodes.Status403Forbidden); + await Assert.That(body["errorCode"].GetString()).IsEqualTo("captcha_required"); + } + + [Test] + public async Task StreamMessage_InvalidCaptcha_Returns403WithCaptchaFailed() + { + var captchaService = new Mock(); + captchaService + .Setup(service => service.VerifyAsync("bad-token", It.IsAny(), It.IsAny())) + .ReturnsAsync(new HCaptchaResult { Success = false }); + + var controller = CreateController(captchaService: captchaService.Object); + + await controller.StreamMessage(new ChatMessageRequest { Message = "hello", CaptchaToken = "bad-token" }); + + var body = await ReadJsonResponse(controller.HttpContext.Response); + await Assert.That(controller.HttpContext.Response.StatusCode).IsEqualTo(StatusCodes.Status403Forbidden); + await Assert.That(body["errorCode"].GetString()).IsEqualTo("captcha_failed"); + } + + [Test] + public async Task StreamMessage_CaptchaServiceUnavailable_Returns503WithCaptchaUnavailable() + { + var captchaService = new Mock(); + captchaService + .Setup(service => service.VerifyAsync("token", It.IsAny(), It.IsAny())) + .ReturnsAsync((HCaptchaResult?)null); + + var controller = CreateController(captchaService: captchaService.Object); + + await controller.StreamMessage(new ChatMessageRequest { Message = "hello", CaptchaToken = "token" }); + + var body = await ReadJsonResponse(controller.HttpContext.Response); + await Assert.That(controller.HttpContext.Response.StatusCode).IsEqualTo(StatusCodes.Status503ServiceUnavailable); + await Assert.That(body["errorCode"].GetString()).IsEqualTo("captcha_unavailable"); + } + + private static ChatController CreateController( + IAIChatService? aiChatService = null, + ICaptchaService? captchaService = null) + { + var httpContext = new DefaultHttpContext + { + User = new ClaimsPrincipal(new ClaimsIdentity([new Claim(ClaimTypes.Name, "test-user")], "TestAuth")) + }; + httpContext.Response.Body = new MemoryStream(); + + var controller = new ChatController( + Mock.Of>(), + aiChatService ?? new Mock(MockBehavior.Strict).Object, + captchaService ?? new Mock(MockBehavior.Strict).Object) + { + ControllerContext = new ControllerContext { HttpContext = httpContext } + }; + + return controller; + } + + private static async Task> ReadJsonResponse(HttpResponse response) + { + response.Body.Position = 0; + using var reader = new StreamReader(response.Body, leaveOpen: true); + var json = await reader.ReadToEndAsync(); + return JsonSerializer.Deserialize>(json)!; + } +} diff --git a/EssentialCSharp.Web/Controllers/ChatController.cs b/EssentialCSharp.Web/Controllers/ChatController.cs index b6d434c6..ebc17785 100644 --- a/EssentialCSharp.Web/Controllers/ChatController.cs +++ b/EssentialCSharp.Web/Controllers/ChatController.cs @@ -132,30 +132,44 @@ await Response.WriteAsJsonAsync( } /// - /// Verifies the hCaptcha token. Fails-open on service outage (returns success with warning) - /// since the endpoint is already protected by [Authorize] and rate limiting. + /// Verifies the hCaptcha token and denies chat access when verification cannot be completed. /// private async Task<(bool Success, IActionResult? Error)> VerifyCaptchaAsync( string? captchaToken, CancellationToken cancellationToken) { if (string.IsNullOrWhiteSpace(captchaToken)) - return (false, StatusCode(StatusCodes.Status403Forbidden, - new { error = "Captcha verification required.", errorCode = "captcha_required", retryable = true })); + return (false, CreateCaptchaRequiredResult()); var remoteIp = HttpContext.Connection.RemoteIpAddress?.ToString(); var result = await _CaptchaService.VerifyAsync(captchaToken, remoteIp, cancellationToken); if (result is null) { - // hCaptcha service is unreachable — fail-open since [Authorize] + rate limiting still protect the endpoint. - _Logger.LogWarning("hCaptcha service unavailable for user {User} — allowing request", User.Identity?.Name); - return (true, null); + _Logger.LogWarning("hCaptcha service unavailable for user {User} — denying request", User.Identity?.Name); + return (false, CreateCaptchaUnavailableResult()); } if (!result.Success) - return (false, StatusCode(StatusCodes.Status403Forbidden, - new { error = "Captcha verification failed.", errorCode = "captcha_failed", retryable = true })); + return (false, CreateCaptchaFailedResult()); return (true, null); } + + private ObjectResult CreateCaptchaRequiredResult() => + StatusCode(StatusCodes.Status403Forbidden, + new { error = "Captcha verification required.", errorCode = "captcha_required", retryable = true }); + + private ObjectResult CreateCaptchaFailedResult() => + StatusCode(StatusCodes.Status403Forbidden, + new { error = "Captcha verification failed.", errorCode = "captcha_failed", retryable = true }); + + private ObjectResult CreateCaptchaUnavailableResult() => + StatusCode(StatusCodes.Status503ServiceUnavailable, + new + { + error = "Captcha verification is temporarily unavailable. Please try again later.", + errorCode = "captcha_unavailable", + retryable = true + }); + } diff --git a/EssentialCSharp.Web/Program.cs b/EssentialCSharp.Web/Program.cs index 71f57977..219ceb87 100644 --- a/EssentialCSharp.Web/Program.cs +++ b/EssentialCSharp.Web/Program.cs @@ -244,11 +244,10 @@ private static void Main(string[] args) // AIOptions__UseLocalAI=true enables Ollama local mode (set via aspire secret or dashboard). builder.AddAIServices(configuration); - // When using local Ollama, Polly's default 30s TotalRequestTimeout fires before LLM inference - // completes (qwen2.5-coder:7b consistently takes >30s). Override globally — this code path - // is only reached in local dev when UseLocalAI=true, so widening all clients is acceptable. + // When using local Ollama in development, Polly's default 30s TotalRequestTimeout fires + // before LLM inference completes (qwen2.5-coder:7b consistently takes >30s). var aiOptsForTimeout = configuration.GetSection("AIOptions").Get(); - if (aiOptsForTimeout?.UseLocalAI == true) + if (builder.Environment.IsDevelopment() && aiOptsForTimeout?.UseLocalAI == true) { builder.Services.PostConfigureAll(options => { diff --git a/EssentialCSharp.Web/wwwroot/js/chat-module.js b/EssentialCSharp.Web/wwwroot/js/chat-module.js index c435759d..0abc4779 100644 --- a/EssentialCSharp.Web/wwwroot/js/chat-module.js +++ b/EssentialCSharp.Web/wwwroot/js/chat-module.js @@ -114,9 +114,7 @@ export function useChatWidget() { }); } - // Remove captcha callback functions as they're no longer needed for chat - - // hCaptcha invisible widget — programmatic callbacks (not string-based data-callback attributes) + // Captcha callbacks used by the hCaptcha invisible widget during chat requests. function onCaptchaSuccess(token) { if (captchaResolve) { captchaResolve(token); @@ -142,9 +140,11 @@ export function useChatWidget() { } async function ensureCaptchaWidget() { - if (!window.HCAPTCHA_SITE_KEY) throw new Error('Captcha is not configured.'); + const siteKey = window.HCAPTCHA_SITE_KEY?.trim(); + if (!siteKey) throw new Error('Captcha is not configured.'); await nextTick(); if (captchaWidgetId !== null) return; + if (!captchaContainerEl.value) throw new Error('Captcha container is missing.'); // Wait for hcaptcha.js to load — uses the shared whenHcaptchaReady queue from hcaptcha-form.js await new Promise((resolve, reject) => { @@ -156,7 +156,7 @@ export function useChatWidget() { }); captchaWidgetId = window.hcaptcha.render(captchaContainerEl.value, { - sitekey: window.HCAPTCHA_SITE_KEY, + sitekey: siteKey, size: 'invisible', callback: onCaptchaSuccess, 'expired-callback': onCaptchaExpired, @@ -300,15 +300,15 @@ export function useChatWidget() { let reader = null; try { - const response = await fetchChatStream(userMessage, captchaToken); + let streamResponse = await fetchChatStream(userMessage, captchaToken); - if (!response.ok) { - if (response.status === 401) { + if (!streamResponse.ok) { + if (streamResponse.status === 401) { throw new Error('Authentication required'); - } else if (response.status === 403) { + } else if (streamResponse.status === 403) { // Captcha failed — try once more with a fresh token let errorData = {}; - try { errorData = await response.json(); } catch (_) {} + try { errorData = await streamResponse.json(); } catch (_) {} if (errorData.retryable) { let retryToken; @@ -325,15 +325,14 @@ export function useChatWidget() { chatInput.value = userMessage; throw new Error('captcha-failed'); } - // Use retry response for the rest of the streaming flow - reader = retryResponse.body.getReader(); + streamResponse = retryResponse; } else { throw new Error('captcha-failed'); } - } else if (response.status === 429) { + } else if (streamResponse.status === 429) { let errorData; try { - errorData = await response.json(); + errorData = await streamResponse.json(); } catch (e) { errorData = { error: 'Rate limit exceeded. Please wait before sending another message.', @@ -343,15 +342,23 @@ export function useChatWidget() { const retryAfter = errorData.retryAfter || 60; throw new Error(`Rate limit exceeded. Please wait ${Math.ceil(retryAfter)} seconds before sending another message.`); - } else if (response.status === 400) { - const errorData = await response.json(); + } else if (streamResponse.status === 400) { + const errorData = await streamResponse.json(); throw new Error(errorData.error || 'Bad request'); + } else if (streamResponse.status === 503) { + let errorData = {}; + try { errorData = await streamResponse.json(); } catch (_) {} + + if (errorData.errorCode === 'captcha_unavailable') { + throw new Error('captcha-unavailable'); + } + + throw new Error(errorData.error || 'Service unavailable'); } - throw new Error(`HTTP error! status: ${response.status}`); + throw new Error(`HTTP error! status: ${streamResponse.status}`); } - // Handle streaming response - if (!reader) reader = response.body.getReader(); + reader = streamResponse.body.getReader(); const decoder = new TextDecoder(); let assistantMessage = ''; let assistantMessageIndex = -1; @@ -426,6 +433,9 @@ export function useChatWidget() { } else if (error.message === 'captcha-failed') { errorMessage = 'Security verification failed. Please try again.'; errorType = 'captcha-error'; + } else if (error.message === 'captcha-unavailable') { + errorMessage = 'Security verification is temporarily unavailable. Please try again later.'; + errorType = 'captcha-error'; } else if (error.message?.includes('Authentication required')) { errorMessage = 'You must be logged in to use the chat feature. Please log in and try again.'; errorType = 'auth-error'; @@ -433,6 +443,9 @@ export function useChatWidget() { } else if (error.message?.includes('Rate limit exceeded')) { errorMessage = error.message; // Use the specific rate limit message with timing errorType = 'rate-limit'; + } else if (error.message?.includes('Service unavailable')) { + errorMessage = error.message; + errorType = 'connection-error'; } else if (error.message?.includes('HTTP error')) { errorMessage = 'Unable to connect to the chat service. Please check your connection and try again.'; errorType = 'connection-error'; From edbac6841c142bf32d0c0423da99b486b60ab685 Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 18:36:23 -0700 Subject: [PATCH 07/12] lock --- EssentialCSharp.Web/package-lock.json | 1 + 1 file changed, 1 insertion(+) diff --git a/EssentialCSharp.Web/package-lock.json b/EssentialCSharp.Web/package-lock.json index e91b5f65..30062b4e 100644 --- a/EssentialCSharp.Web/package-lock.json +++ b/EssentialCSharp.Web/package-lock.json @@ -1087,6 +1087,7 @@ "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.33.tgz", "integrity": "sha512-1AgChhx5w3ALgT4oK3acm2Es/7jyZhWSVUfs3rOBlGQC0rjEDkS7G4lWlJJGGNQD+BV3reCwbQrOe1mPNwKHBQ==", "license": "MIT", + "peer": true, "dependencies": { "@vue/compiler-dom": "3.5.33", "@vue/compiler-sfc": "3.5.33", From 6cbcf056946fb48902ca3d41eb2486484876a85c Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 18:49:36 -0700 Subject: [PATCH 08/12] ci: add setup-node@v6 (Node 24) to PR workflow npm ci fails on ubuntu-latest (Node 20) because package.json requires node >=22.0.0. Build-Test-And-Deploy.yml already uses Node 24 via setup-node@v6 apply the same step to the PR workflow. --- .github/workflows/PR-Build-And-Test.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/PR-Build-And-Test.yml b/.github/workflows/PR-Build-And-Test.yml index af2f3b70..4a4e0c3a 100644 --- a/.github/workflows/PR-Build-And-Test.yml +++ b/.github/workflows/PR-Build-And-Test.yml @@ -11,6 +11,11 @@ jobs: steps: - uses: actions/checkout@v6 + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: 24 + - name: Set up .NET Core uses: actions/setup-dotnet@v5 with: From 0a37adec4cba17c4a5a918588848a2411de6848d Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 26 Apr 2026 02:12:27 +0000 Subject: [PATCH 09/12] fix: refresh frontend npm lockfile Agent-Logs-Url: https://github.com/IntelliTect/EssentialCSharp.Web/sessions/f6003b4b-0dd2-45a6-80c8-28c78fdafbb6 Co-authored-by: BenjaminMichaelis <22186029+BenjaminMichaelis@users.noreply.github.com> --- EssentialCSharp.Web/package-lock.json | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/EssentialCSharp.Web/package-lock.json b/EssentialCSharp.Web/package-lock.json index 30062b4e..a6386886 100644 --- a/EssentialCSharp.Web/package-lock.json +++ b/EssentialCSharp.Web/package-lock.json @@ -70,6 +70,29 @@ "node": ">=6.9.0" } }, + "node_modules/@emnapi/core": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.10.0.tgz", + "integrity": "sha512-yq6OkJ4p82CAfPl0u9mQebQHKPJkY7WrIuk205cTYnYe+k2Z8YBh11FrbRG/H6ihirqcacOgl2BIO8oyMQLeXw==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/wasi-threads": "1.2.1", + "tslib": "^2.4.0" + } + }, + "node_modules/@emnapi/runtime": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz", + "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@emnapi/wasi-threads": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.1.tgz", @@ -899,7 +922,6 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, - "peer": true, "engines": { "node": ">=12" }, @@ -1009,7 +1031,6 @@ "resolved": "https://registry.npmjs.org/vite/-/vite-8.0.10.tgz", "integrity": "sha512-rZuUu9j6J5uotLDs+cAA4O5H4K1SfPliUlQwqa6YEwSrWDZzP4rhm00oJR5snMewjxF5V/K3D4kctsUTsIU9Mw==", "dev": true, - "peer": true, "dependencies": { "lightningcss": "^1.32.0", "picomatch": "^4.0.4", @@ -1087,7 +1108,6 @@ "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.33.tgz", "integrity": "sha512-1AgChhx5w3ALgT4oK3acm2Es/7jyZhWSVUfs3rOBlGQC0rjEDkS7G4lWlJJGGNQD+BV3reCwbQrOe1mPNwKHBQ==", "license": "MIT", - "peer": true, "dependencies": { "@vue/compiler-dom": "3.5.33", "@vue/compiler-sfc": "3.5.33", From afe77140af33997fbea0b355a64d7002074cb3cb Mon Sep 17 00:00:00 2001 From: Benjamin Michaelis Date: Sat, 25 Apr 2026 19:42:50 -0700 Subject: [PATCH 10/12] cleanup --- .../src/components/ChatWidget.vue | 12 +- EssentialCSharp.Web/wwwroot/js/chat-module.js | 793 ++++++++++-------- 2 files changed, 465 insertions(+), 340 deletions(-) diff --git a/EssentialCSharp.Web/src/components/ChatWidget.vue b/EssentialCSharp.Web/src/components/ChatWidget.vue index 247a1e0d..6aaf69cf 100644 --- a/EssentialCSharp.Web/src/components/ChatWidget.vue +++ b/EssentialCSharp.Web/src/components/ChatWidget.vue @@ -9,6 +9,7 @@ const { chatMessages, chatInput, isTyping, + isSubmitting, chatMessagesEl, chatInputField, captchaContainerEl, @@ -16,6 +17,7 @@ const { closeChatDialog, clearChatHistory, formatMessage, + getErrorHeading, getErrorMessageClass, getErrorIconClass, sendChatMessage @@ -122,11 +124,7 @@ const {
-

Rate Limit Reached

-

Authentication Required

-

Verification Required

-

Invalid Input

-

Error

+

{{ getErrorHeading(message.errorType) }}

Please wait before sending another message @@ -171,7 +169,7 @@ const { v-model="chatInput" class="chat-input" placeholder="Ask me about C#..." - :disabled="isTyping || !isAuthenticated" + :disabled="isSubmitting || isTyping || !isAuthenticated" autocomplete="off" aria-describedby="chat-input-help" maxlength="500" @@ -179,7 +177,7 @@ const {