From 2d9615192408687c8170f9042f8a430e43bfc075 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sat, 28 Feb 2026 05:55:59 +0000
Subject: [PATCH 01/43] Initial plan
From eaaa5229773661a4aa6008aa185b4438bea8f37e Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sat, 28 Feb 2026 06:15:55 +0000
Subject: [PATCH 02/43] Changes before error encountered
Co-authored-by: JerryNixon <1749983+JerryNixon@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 594 +++++++++++++++++
src/Cli/Commands/ConfigureOptions.cs | 3 +
src/Cli/ConfigGenerator.cs | 12 +-
.../Converters/DmlToolsConfigConverter.cs | 18 +-
src/Config/ObjectModel/DmlToolsConfig.cs | 25 +-
.../Mcp/AggregateRecordsToolTests.cs | 596 ++++++++++++++++++
.../EntityLevelDmlToolConfigurationTests.cs | 2 +
7 files changed, 1243 insertions(+), 7 deletions(-)
create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
create mode 100644 src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
new file mode 100644
index 0000000000..e64710e46e
--- /dev/null
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -0,0 +1,594 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+using System.Data.Common;
+using System.Text.Json;
+using Azure.DataApiBuilder.Auth;
+using Azure.DataApiBuilder.Config.DatabasePrimitives;
+using Azure.DataApiBuilder.Config.ObjectModel;
+using Azure.DataApiBuilder.Core.Authorization;
+using Azure.DataApiBuilder.Core.Configurations;
+using Azure.DataApiBuilder.Core.Models;
+using Azure.DataApiBuilder.Core.Parsers;
+using Azure.DataApiBuilder.Core.Resolvers;
+using Azure.DataApiBuilder.Core.Resolvers.Factories;
+using Azure.DataApiBuilder.Core.Services;
+using Azure.DataApiBuilder.Core.Services.MetadataProviders;
+using Azure.DataApiBuilder.Mcp.Model;
+using Azure.DataApiBuilder.Mcp.Utils;
+using Azure.DataApiBuilder.Service.Exceptions;
+using Microsoft.AspNetCore.Authorization;
+using Microsoft.AspNetCore.Http;
+using Microsoft.AspNetCore.Mvc;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using ModelContextProtocol.Protocol;
+using static Azure.DataApiBuilder.Mcp.Model.McpEnums;
+
+namespace Azure.DataApiBuilder.Mcp.BuiltInTools
+{
+ ///
+ /// Tool to aggregate records from a table/view entity configured in DAB.
+ /// Supports count, avg, sum, min, max with optional distinct, filter, groupby, having, orderby.
+ ///
+ public class AggregateRecordsTool : IMcpTool
+ {
+ public ToolType ToolType { get; } = ToolType.BuiltIn;
+
+ private static readonly HashSet ValidFunctions = new(StringComparer.OrdinalIgnoreCase) { "count", "avg", "sum", "min", "max" };
+
+ public Tool GetToolMetadata()
+ {
+ return new Tool
+ {
+ Name = "aggregate_records",
+ Description = "STEP 1: describe_entities -> find entities with READ permission and their fields. STEP 2: call this tool to compute aggregations (count, avg, sum, min, max) with optional filter, groupby, having, and orderby.",
+ InputSchema = JsonSerializer.Deserialize(
+ @"{
+ ""type"": ""object"",
+ ""properties"": {
+ ""entity"": {
+ ""type"": ""string"",
+ ""description"": ""Entity name with READ permission.""
+ },
+ ""function"": {
+ ""type"": ""string"",
+ ""enum"": [""count"", ""avg"", ""sum"", ""min"", ""max""],
+ ""description"": ""Aggregation function to apply.""
+ },
+ ""field"": {
+ ""type"": ""string"",
+ ""description"": ""Field to aggregate. Use '*' for count.""
+ },
+ ""distinct"": {
+ ""type"": ""boolean"",
+ ""description"": ""Apply DISTINCT before aggregating."",
+ ""default"": false
+ },
+ ""filter"": {
+ ""type"": ""string"",
+ ""description"": ""OData filter applied before aggregating (WHERE). Example: 'unitPrice lt 10'"",
+ ""default"": """"
+ },
+ ""groupby"": {
+ ""type"": ""array"",
+ ""items"": { ""type"": ""string"" },
+ ""description"": ""Fields to group by, e.g., ['category', 'region']. Grouped field values are included in the response."",
+ ""default"": []
+ },
+ ""orderby"": {
+ ""type"": ""string"",
+ ""enum"": [""asc"", ""desc""],
+ ""description"": ""Sort aggregated results by the computed value. Only applies with groupby."",
+ ""default"": ""desc""
+ },
+ ""having"": {
+ ""type"": ""object"",
+ ""description"": ""Filter applied after aggregating on the result (HAVING). Operators are AND-ed together."",
+ ""properties"": {
+ ""eq"": { ""type"": ""number"", ""description"": ""Aggregated value equals."" },
+ ""neq"": { ""type"": ""number"", ""description"": ""Aggregated value not equals."" },
+ ""gt"": { ""type"": ""number"", ""description"": ""Aggregated value greater than."" },
+ ""gte"": { ""type"": ""number"", ""description"": ""Aggregated value greater than or equal."" },
+ ""lt"": { ""type"": ""number"", ""description"": ""Aggregated value less than."" },
+ ""lte"": { ""type"": ""number"", ""description"": ""Aggregated value less than or equal."" },
+ ""in"": {
+ ""type"": ""array"",
+ ""items"": { ""type"": ""number"" },
+ ""description"": ""Aggregated value is in the given list.""
+ }
+ }
+ }
+ },
+ ""required"": [""entity"", ""function"", ""field""]
+ }"
+ )
+ };
+ }
+
+ public async Task ExecuteAsync(
+ JsonDocument? arguments,
+ IServiceProvider serviceProvider,
+ CancellationToken cancellationToken = default)
+ {
+ ILogger? logger = serviceProvider.GetService>();
+ string toolName = GetToolMetadata().Name;
+
+ RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService();
+ RuntimeConfig runtimeConfig = runtimeConfigProvider.GetConfig();
+
+ if (runtimeConfig.McpDmlTools?.AggregateRecords is not true)
+ {
+ return McpErrorHelpers.ToolDisabled(toolName, logger);
+ }
+
+ try
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ if (arguments == null)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "No arguments provided.", logger);
+ }
+
+ JsonElement root = arguments.RootElement;
+
+ // Parse required arguments
+ if (!McpArgumentParser.TryParseEntity(root, out string entityName, out string parseError))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", parseError, logger);
+ }
+
+ if (runtimeConfig.Entities?.TryGetValue(entityName, out Entity? entity) == true &&
+ entity.Mcp?.DmlToolEnabled == false)
+ {
+ return McpErrorHelpers.ToolDisabled(toolName, logger, $"DML tools are disabled for entity '{entityName}'.");
+ }
+
+ if (!root.TryGetProperty("function", out JsonElement funcEl) || string.IsNullOrWhiteSpace(funcEl.GetString()))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'function'.", logger);
+ }
+
+ string function = funcEl.GetString()!.ToLowerInvariant();
+ if (!ValidFunctions.Contains(function))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", $"Invalid function '{function}'. Must be one of: count, avg, sum, min, max.", logger);
+ }
+
+ if (!root.TryGetProperty("field", out JsonElement fieldEl) || string.IsNullOrWhiteSpace(fieldEl.GetString()))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'field'.", logger);
+ }
+
+ string field = fieldEl.GetString()!;
+ bool distinct = root.TryGetProperty("distinct", out JsonElement distinctEl) && distinctEl.GetBoolean();
+ string? filter = root.TryGetProperty("filter", out JsonElement filterEl) ? filterEl.GetString() : null;
+ string orderby = root.TryGetProperty("orderby", out JsonElement orderbyEl) ? (orderbyEl.GetString() ?? "desc") : "desc";
+
+ List groupby = new();
+ if (root.TryGetProperty("groupby", out JsonElement groupbyEl) && groupbyEl.ValueKind == JsonValueKind.Array)
+ {
+ foreach (JsonElement g in groupbyEl.EnumerateArray())
+ {
+ string? gVal = g.GetString();
+ if (!string.IsNullOrWhiteSpace(gVal))
+ {
+ groupby.Add(gVal);
+ }
+ }
+ }
+
+ Dictionary? havingOps = null;
+ List? havingIn = null;
+ if (root.TryGetProperty("having", out JsonElement havingEl) && havingEl.ValueKind == JsonValueKind.Object)
+ {
+ havingOps = new Dictionary(StringComparer.OrdinalIgnoreCase);
+ foreach (JsonProperty prop in havingEl.EnumerateObject())
+ {
+ if (prop.Name.Equals("in", StringComparison.OrdinalIgnoreCase) && prop.Value.ValueKind == JsonValueKind.Array)
+ {
+ havingIn = new List();
+ foreach (JsonElement item in prop.Value.EnumerateArray())
+ {
+ havingIn.Add(item.GetDouble());
+ }
+ }
+ else if (prop.Value.ValueKind == JsonValueKind.Number)
+ {
+ havingOps[prop.Name] = prop.Value.GetDouble();
+ }
+ }
+ }
+
+ // Resolve metadata
+ if (!McpMetadataHelper.TryResolveMetadata(
+ entityName,
+ runtimeConfig,
+ serviceProvider,
+ out ISqlMetadataProvider sqlMetadataProvider,
+ out DatabaseObject dbObject,
+ out string dataSourceName,
+ out string metadataError))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "EntityNotFound", metadataError, logger);
+ }
+
+ // Authorization
+ IAuthorizationResolver authResolver = serviceProvider.GetRequiredService();
+ IAuthorizationService authorizationService = serviceProvider.GetRequiredService();
+ IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService();
+ HttpContext? httpContext = httpContextAccessor.HttpContext;
+
+ if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleCtxError))
+ {
+ return McpErrorHelpers.PermissionDenied(toolName, entityName, "read", roleCtxError, logger);
+ }
+
+ if (!McpAuthorizationHelper.TryResolveAuthorizedRole(
+ httpContext!,
+ authResolver,
+ entityName,
+ EntityActionOperation.Read,
+ out string? effectiveRole,
+ out string readAuthError))
+ {
+ string finalError = readAuthError.StartsWith("You do not have permission", StringComparison.OrdinalIgnoreCase)
+ ? $"You do not have permission to read records for entity '{entityName}'."
+ : readAuthError;
+ return McpErrorHelpers.PermissionDenied(toolName, entityName, "read", finalError, logger);
+ }
+
+ // Build select list: groupby fields + aggregation field
+ List selectFields = new(groupby);
+ bool isCountStar = function == "count" && field == "*";
+ if (!isCountStar && !selectFields.Contains(field, StringComparer.OrdinalIgnoreCase))
+ {
+ selectFields.Add(field);
+ }
+
+ // Build and validate Find context
+ RequestValidator requestValidator = new(serviceProvider.GetRequiredService(), runtimeConfigProvider);
+ FindRequestContext context = new(entityName, dbObject, true);
+ httpContext!.Request.Method = "GET";
+
+ requestValidator.ValidateEntity(entityName);
+
+ if (selectFields.Count > 0)
+ {
+ context.UpdateReturnFields(selectFields);
+ }
+
+ if (!string.IsNullOrWhiteSpace(filter))
+ {
+ string filterQueryString = $"?{RequestParser.FILTER_URL}={filter}";
+ context.FilterClauseInUrl = sqlMetadataProvider.GetODataParser().GetFilterClause(filterQueryString, $"{context.EntityName}.{context.DatabaseObject.FullName}");
+ }
+
+ requestValidator.ValidateRequestContext(context);
+
+ AuthorizationResult authorizationResult = await authorizationService.AuthorizeAsync(
+ user: httpContext.User,
+ resource: context,
+ requirements: new[] { new ColumnsPermissionsRequirement() });
+ if (!authorizationResult.Succeeded)
+ {
+ return McpErrorHelpers.PermissionDenied(toolName, entityName, "read", DataApiBuilderException.AUTHORIZATION_FAILURE, logger);
+ }
+
+ // Execute query to get records
+ IQueryEngineFactory queryEngineFactory = serviceProvider.GetRequiredService();
+ IQueryEngine queryEngine = queryEngineFactory.GetQueryEngine(sqlMetadataProvider.GetDatabaseType());
+ JsonDocument? queryResult = await queryEngine.ExecuteAsync(context);
+
+ IActionResult actionResult = queryResult is null
+ ? SqlResponseHelpers.FormatFindResult(JsonDocument.Parse("[]").RootElement.Clone(), context, sqlMetadataProvider, runtimeConfig, httpContext, true)
+ : SqlResponseHelpers.FormatFindResult(queryResult.RootElement.Clone(), context, sqlMetadataProvider, runtimeConfig, httpContext, true);
+
+ string rawPayloadJson = McpResponseBuilder.ExtractResultJson(actionResult);
+ using JsonDocument resultDoc = JsonDocument.Parse(rawPayloadJson);
+ JsonElement resultRoot = resultDoc.RootElement;
+
+ // Extract the records array from the response
+ JsonElement records;
+ if (resultRoot.TryGetProperty("value", out JsonElement valueArray))
+ {
+ records = valueArray;
+ }
+ else if (resultRoot.ValueKind == JsonValueKind.Array)
+ {
+ records = resultRoot;
+ }
+ else
+ {
+ records = resultRoot;
+ }
+
+ // Compute alias for the response
+ string alias = ComputeAlias(function, field);
+
+ // Perform in-memory aggregation
+ List> aggregatedResults = PerformAggregation(
+ records, function, field, distinct, groupby, havingOps, havingIn, orderby, alias);
+
+ return McpResponseBuilder.BuildSuccessResult(
+ new Dictionary
+ {
+ ["entity"] = entityName,
+ ["result"] = aggregatedResults,
+ ["message"] = $"Successfully aggregated records for entity '{entityName}'"
+ },
+ logger,
+ $"AggregateRecordsTool success for entity {entityName}.");
+ }
+ catch (OperationCanceledException)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "OperationCanceled", "The aggregate operation was canceled.", logger);
+ }
+ catch (DbException argEx)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", argEx.Message, logger);
+ }
+ catch (ArgumentException argEx)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", argEx.Message, logger);
+ }
+ catch (DataApiBuilderException argEx)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, argEx.StatusCode.ToString(), argEx.Message, logger);
+ }
+ catch (Exception ex)
+ {
+ logger?.LogError(ex, "Unexpected error in AggregateRecordsTool.");
+ return McpResponseBuilder.BuildErrorResult(toolName, "UnexpectedError", "Unexpected error occurred in AggregateRecordsTool.", logger);
+ }
+ }
+
+ ///
+ /// Computes the response alias for the aggregation result.
+ /// For count with "*", the alias is "count". Otherwise it's "{function}_{field}".
+ ///
+ internal static string ComputeAlias(string function, string field)
+ {
+ if (function == "count" && field == "*")
+ {
+ return "count";
+ }
+
+ return $"{function}_{field}";
+ }
+
+ ///
+ /// Performs in-memory aggregation over a JSON array of records.
+ ///
+ internal static List> PerformAggregation(
+ JsonElement records,
+ string function,
+ string field,
+ bool distinct,
+ List groupby,
+ Dictionary? havingOps,
+ List? havingIn,
+ string orderby,
+ string alias)
+ {
+ if (records.ValueKind != JsonValueKind.Array)
+ {
+ return new List> { new() { [alias] = null } };
+ }
+
+ bool isCountStar = function == "count" && field == "*";
+
+ if (groupby.Count == 0)
+ {
+ // No groupby - single result
+ List items = new();
+ foreach (JsonElement record in records.EnumerateArray())
+ {
+ items.Add(record);
+ }
+
+ double? aggregatedValue = ComputeAggregateValue(items, function, field, distinct, isCountStar);
+
+ // Apply having
+ if (!PassesHavingFilter(aggregatedValue, havingOps, havingIn))
+ {
+ return new List>();
+ }
+
+ return new List>
+ {
+ new() { [alias] = aggregatedValue }
+ };
+ }
+ else
+ {
+ // Group by
+ Dictionary> groups = new();
+ Dictionary> groupKeys = new();
+
+ foreach (JsonElement record in records.EnumerateArray())
+ {
+ string key = BuildGroupKey(record, groupby);
+ if (!groups.ContainsKey(key))
+ {
+ groups[key] = new List();
+ groupKeys[key] = ExtractGroupFields(record, groupby);
+ }
+
+ groups[key].Add(record);
+ }
+
+ List> results = new();
+ foreach (KeyValuePair> group in groups)
+ {
+ double? aggregatedValue = ComputeAggregateValue(group.Value, function, field, distinct, isCountStar);
+
+ if (!PassesHavingFilter(aggregatedValue, havingOps, havingIn))
+ {
+ continue;
+ }
+
+ Dictionary row = new(groupKeys[group.Key])
+ {
+ [alias] = aggregatedValue
+ };
+ results.Add(row);
+ }
+
+ // Apply orderby
+ if (orderby.Equals("asc", StringComparison.OrdinalIgnoreCase))
+ {
+ results.Sort((a, b) => CompareNullableDoubles(a[alias] as double?, b[alias] as double?));
+ }
+ else
+ {
+ results.Sort((a, b) => CompareNullableDoubles(b[alias] as double?, a[alias] as double?));
+ }
+
+ return results;
+ }
+ }
+
+ private static double? ComputeAggregateValue(List records, string function, string field, bool distinct, bool isCountStar)
+ {
+ if (isCountStar)
+ {
+ return distinct ? 0 : records.Count;
+ }
+
+ List values = new();
+ foreach (JsonElement record in records)
+ {
+ if (record.TryGetProperty(field, out JsonElement val) && val.ValueKind == JsonValueKind.Number)
+ {
+ values.Add(val.GetDouble());
+ }
+ }
+
+ if (distinct)
+ {
+ values = values.Distinct().ToList();
+ }
+
+ if (function == "count")
+ {
+ return values.Count;
+ }
+
+ if (values.Count == 0)
+ {
+ return null;
+ }
+
+ return function switch
+ {
+ "avg" => Math.Round(values.Average(), 2),
+ "sum" => values.Sum(),
+ "min" => values.Min(),
+ "max" => values.Max(),
+ _ => null
+ };
+ }
+
+ private static bool PassesHavingFilter(double? value, Dictionary? havingOps, List? havingIn)
+ {
+ if (havingOps == null && havingIn == null)
+ {
+ return true;
+ }
+
+ if (value == null)
+ {
+ return false;
+ }
+
+ double v = value.Value;
+
+ if (havingOps != null)
+ {
+ foreach (KeyValuePair op in havingOps)
+ {
+ bool passes = op.Key.ToLowerInvariant() switch
+ {
+ "eq" => v == op.Value,
+ "neq" => v != op.Value,
+ "gt" => v > op.Value,
+ "gte" => v >= op.Value,
+ "lt" => v < op.Value,
+ "lte" => v <= op.Value,
+ _ => true
+ };
+
+ if (!passes)
+ {
+ return false;
+ }
+ }
+ }
+
+ if (havingIn != null && !havingIn.Contains(v))
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ private static string BuildGroupKey(JsonElement record, List groupby)
+ {
+ List parts = new();
+ foreach (string g in groupby)
+ {
+ if (record.TryGetProperty(g, out JsonElement val))
+ {
+ parts.Add(val.ToString());
+ }
+ else
+ {
+ parts.Add("__null__");
+ }
+ }
+
+ return string.Join("|", parts);
+ }
+
+ private static Dictionary ExtractGroupFields(JsonElement record, List groupby)
+ {
+ Dictionary result = new();
+ foreach (string g in groupby)
+ {
+ if (record.TryGetProperty(g, out JsonElement val))
+ {
+ result[g] = McpResponseBuilder.GetJsonValue(val);
+ }
+ else
+ {
+ result[g] = null;
+ }
+ }
+
+ return result;
+ }
+
+ private static int CompareNullableDoubles(double? a, double? b)
+ {
+ if (a == null && b == null)
+ {
+ return 0;
+ }
+
+ if (a == null)
+ {
+ return -1;
+ }
+
+ if (b == null)
+ {
+ return 1;
+ }
+
+ return a.Value.CompareTo(b.Value);
+ }
+ }
+}
diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs
index 262cbc9145..ecd5ecd185 100644
--- a/src/Cli/Commands/ConfigureOptions.cs
+++ b/src/Cli/Commands/ConfigureOptions.cs
@@ -224,6 +224,9 @@ public ConfigureOptions(
[Option("runtime.mcp.dml-tools.execute-entity.enabled", Required = false, HelpText = "Enable DAB's MCP execute entity tool. Default: true (boolean).")]
public bool? RuntimeMcpDmlToolsExecuteEntityEnabled { get; }
+ [Option("runtime.mcp.dml-tools.aggregate-records.enabled", Required = false, HelpText = "Enable DAB's MCP aggregate records tool. Default: true (boolean).")]
+ public bool? RuntimeMcpDmlToolsAggregateRecordsEnabled { get; }
+
[Option("runtime.cache.enabled", Required = false, HelpText = "Enable DAB's cache globally. (You must also enable each entity's cache separately.). Default: false (boolean).")]
public bool? RuntimeCacheEnabled { get; }
diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs
index 6c51f002b7..2eaf50a822 100644
--- a/src/Cli/ConfigGenerator.cs
+++ b/src/Cli/ConfigGenerator.cs
@@ -1181,6 +1181,7 @@ private static bool TryUpdateConfiguredMcpValues(
bool? updateRecord = currentDmlTools?.UpdateRecord;
bool? deleteRecord = currentDmlTools?.DeleteRecord;
bool? executeEntity = currentDmlTools?.ExecuteEntity;
+ bool? aggregateRecords = currentDmlTools?.AggregateRecords;
updatedValue = options?.RuntimeMcpDmlToolsDescribeEntitiesEnabled;
if (updatedValue != null)
@@ -1230,6 +1231,14 @@ private static bool TryUpdateConfiguredMcpValues(
_logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.execute-entity as '{updatedValue}'", updatedValue);
}
+ updatedValue = options?.RuntimeMcpDmlToolsAggregateRecordsEnabled;
+ if (updatedValue != null)
+ {
+ aggregateRecords = (bool)updatedValue;
+ hasToolUpdates = true;
+ _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.aggregate-records as '{updatedValue}'", updatedValue);
+ }
+
if (hasToolUpdates)
{
updatedMcpOptions = updatedMcpOptions! with
@@ -1242,7 +1251,8 @@ private static bool TryUpdateConfiguredMcpValues(
ReadRecords = readRecord,
UpdateRecord = updateRecord,
DeleteRecord = deleteRecord,
- ExecuteEntity = executeEntity
+ ExecuteEntity = executeEntity,
+ AggregateRecords = aggregateRecords
}
};
}
diff --git a/src/Config/Converters/DmlToolsConfigConverter.cs b/src/Config/Converters/DmlToolsConfigConverter.cs
index 82ac3f6069..7e049c7926 100644
--- a/src/Config/Converters/DmlToolsConfigConverter.cs
+++ b/src/Config/Converters/DmlToolsConfigConverter.cs
@@ -44,6 +44,7 @@ internal class DmlToolsConfigConverter : JsonConverter
bool? updateRecord = null;
bool? deleteRecord = null;
bool? executeEntity = null;
+ bool? aggregateRecords = null;
while (reader.Read())
{
@@ -82,6 +83,9 @@ internal class DmlToolsConfigConverter : JsonConverter
case "execute-entity":
executeEntity = value;
break;
+ case "aggregate-records":
+ aggregateRecords = value;
+ break;
default:
// Skip unknown properties
break;
@@ -91,7 +95,8 @@ internal class DmlToolsConfigConverter : JsonConverter
{
// Error on non-boolean values for known properties
if (property?.ToLowerInvariant() is "describe-entities" or "create-record"
- or "read-records" or "update-record" or "delete-record" or "execute-entity")
+ or "read-records" or "update-record" or "delete-record" or "execute-entity"
+ or "aggregate-records")
{
throw new JsonException($"Property '{property}' must be a boolean value.");
}
@@ -110,7 +115,8 @@ internal class DmlToolsConfigConverter : JsonConverter
readRecords: readRecords,
updateRecord: updateRecord,
deleteRecord: deleteRecord,
- executeEntity: executeEntity);
+ executeEntity: executeEntity,
+ aggregateRecords: aggregateRecords);
}
// For any other unexpected token type, return default (all enabled)
@@ -135,7 +141,8 @@ public override void Write(Utf8JsonWriter writer, DmlToolsConfig? value, JsonSer
value.UserProvidedReadRecords ||
value.UserProvidedUpdateRecord ||
value.UserProvidedDeleteRecord ||
- value.UserProvidedExecuteEntity;
+ value.UserProvidedExecuteEntity ||
+ value.UserProvidedAggregateRecords;
// Only write the boolean value if it's provided by user
// This prevents writing "dml-tools": true when it's the default
@@ -181,6 +188,11 @@ public override void Write(Utf8JsonWriter writer, DmlToolsConfig? value, JsonSer
writer.WriteBoolean("execute-entity", value.ExecuteEntity.Value);
}
+ if (value.UserProvidedAggregateRecords && value.AggregateRecords.HasValue)
+ {
+ writer.WriteBoolean("aggregate-records", value.AggregateRecords.Value);
+ }
+
writer.WriteEndObject();
}
}
diff --git a/src/Config/ObjectModel/DmlToolsConfig.cs b/src/Config/ObjectModel/DmlToolsConfig.cs
index 2a09e9d53c..c1f8b278cd 100644
--- a/src/Config/ObjectModel/DmlToolsConfig.cs
+++ b/src/Config/ObjectModel/DmlToolsConfig.cs
@@ -51,6 +51,11 @@ public record DmlToolsConfig
///
public bool? ExecuteEntity { get; init; }
+ ///
+ /// Whether aggregate-records tool is enabled
+ ///
+ public bool? AggregateRecords { get; init; }
+
[JsonConstructor]
public DmlToolsConfig(
bool? allToolsEnabled = null,
@@ -59,7 +64,8 @@ public DmlToolsConfig(
bool? readRecords = null,
bool? updateRecord = null,
bool? deleteRecord = null,
- bool? executeEntity = null)
+ bool? executeEntity = null,
+ bool? aggregateRecords = null)
{
if (allToolsEnabled is not null)
{
@@ -75,6 +81,7 @@ public DmlToolsConfig(
UpdateRecord = updateRecord ?? toolDefault;
DeleteRecord = deleteRecord ?? toolDefault;
ExecuteEntity = executeEntity ?? toolDefault;
+ AggregateRecords = aggregateRecords ?? toolDefault;
}
else
{
@@ -87,6 +94,7 @@ public DmlToolsConfig(
UpdateRecord = updateRecord ?? DEFAULT_ENABLED;
DeleteRecord = deleteRecord ?? DEFAULT_ENABLED;
ExecuteEntity = executeEntity ?? DEFAULT_ENABLED;
+ AggregateRecords = aggregateRecords ?? DEFAULT_ENABLED;
}
// Track user-provided status - only true if the parameter was not null
@@ -96,6 +104,7 @@ public DmlToolsConfig(
UserProvidedUpdateRecord = updateRecord is not null;
UserProvidedDeleteRecord = deleteRecord is not null;
UserProvidedExecuteEntity = executeEntity is not null;
+ UserProvidedAggregateRecords = aggregateRecords is not null;
}
///
@@ -112,7 +121,8 @@ public static DmlToolsConfig FromBoolean(bool enabled)
readRecords: null,
updateRecord: null,
deleteRecord: null,
- executeEntity: null
+ executeEntity: null,
+ aggregateRecords: null
);
}
@@ -127,7 +137,8 @@ public static DmlToolsConfig FromBoolean(bool enabled)
readRecords: null,
updateRecord: null,
deleteRecord: null,
- executeEntity: null
+ executeEntity: null,
+ aggregateRecords: null
);
///
@@ -185,4 +196,12 @@ public static DmlToolsConfig FromBoolean(bool enabled)
[JsonIgnore(Condition = JsonIgnoreCondition.Always)]
[MemberNotNullWhen(true, nameof(ExecuteEntity))]
public bool UserProvidedExecuteEntity { get; init; } = false;
+
+ ///
+ /// Flag which informs CLI and JSON serializer whether to write aggregate-records
+ /// property/value to the runtime config file.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.Always)]
+ [MemberNotNullWhen(true, nameof(AggregateRecords))]
+ public bool UserProvidedAggregateRecords { get; init; } = false;
}
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
new file mode 100644
index 0000000000..a1fb2b691c
--- /dev/null
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -0,0 +1,596 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+using System;
+using System.Collections.Generic;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.DataApiBuilder.Auth;
+using Azure.DataApiBuilder.Config.ObjectModel;
+using Azure.DataApiBuilder.Core.Authorization;
+using Azure.DataApiBuilder.Core.Configurations;
+using Azure.DataApiBuilder.Mcp.BuiltInTools;
+using Azure.DataApiBuilder.Mcp.Model;
+using Microsoft.AspNetCore.Http;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using ModelContextProtocol.Protocol;
+using Moq;
+
+namespace Azure.DataApiBuilder.Service.Tests.Mcp
+{
+ ///
+ /// Tests for the AggregateRecordsTool MCP tool.
+ /// Covers:
+ /// - Tool metadata and schema validation
+ /// - Runtime-level enabled/disabled configuration
+ /// - Entity-level DML tool configuration
+ /// - Input validation (missing/invalid arguments)
+ /// - In-memory aggregation logic (count, avg, sum, min, max)
+ /// - distinct, groupby, having, orderby
+ /// - Alias convention
+ ///
+ [TestClass]
+ public class AggregateRecordsToolTests
+ {
+ #region Tool Metadata Tests
+
+ [TestMethod]
+ public void GetToolMetadata_ReturnsCorrectName()
+ {
+ AggregateRecordsTool tool = new();
+ Tool metadata = tool.GetToolMetadata();
+ Assert.AreEqual("aggregate_records", metadata.Name);
+ }
+
+ [TestMethod]
+ public void GetToolMetadata_ReturnsCorrectToolType()
+ {
+ AggregateRecordsTool tool = new();
+ Assert.AreEqual(McpEnums.ToolType.BuiltIn, tool.ToolType);
+ }
+
+ [TestMethod]
+ public void GetToolMetadata_HasInputSchema()
+ {
+ AggregateRecordsTool tool = new();
+ Tool metadata = tool.GetToolMetadata();
+ Assert.AreEqual(JsonValueKind.Object, metadata.InputSchema.ValueKind);
+ Assert.IsTrue(metadata.InputSchema.TryGetProperty("properties", out _));
+ Assert.IsTrue(metadata.InputSchema.TryGetProperty("required", out JsonElement required));
+
+ List requiredFields = new();
+ foreach (JsonElement r in required.EnumerateArray())
+ {
+ requiredFields.Add(r.GetString()!);
+ }
+
+ CollectionAssert.Contains(requiredFields, "entity");
+ CollectionAssert.Contains(requiredFields, "function");
+ CollectionAssert.Contains(requiredFields, "field");
+ }
+
+ #endregion
+
+ #region Configuration Tests
+
+ [TestMethod]
+ public async Task AggregateRecords_DisabledAtRuntimeLevel_ReturnsToolDisabledError()
+ {
+ RuntimeConfig config = CreateConfig(aggregateRecordsEnabled: false);
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ AssertToolDisabledError(content);
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_DisabledAtEntityLevel_ReturnsToolDisabledError()
+ {
+ RuntimeConfig config = CreateConfigWithEntityDmlDisabled();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ AssertToolDisabledError(content);
+ }
+
+ #endregion
+
+ #region Input Validation Tests
+
+ [TestMethod]
+ public async Task AggregateRecords_NullArguments_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ CallToolResult result = await tool.ExecuteAsync(null, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
+ Assert.AreEqual("InvalidArguments", error.GetProperty("type").GetString());
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_MissingEntity_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"function\": \"count\", \"field\": \"*\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_MissingFunction_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"field\": \"*\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_MissingField_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_InvalidFunction_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"median\", \"field\": \"price\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("median"));
+ }
+
+ #endregion
+
+ #region Alias Convention Tests
+
+ [TestMethod]
+ public void ComputeAlias_CountStar_ReturnsCount()
+ {
+ Assert.AreEqual("count", AggregateRecordsTool.ComputeAlias("count", "*"));
+ }
+
+ [TestMethod]
+ public void ComputeAlias_CountField_ReturnsFunctionField()
+ {
+ Assert.AreEqual("count_supplierId", AggregateRecordsTool.ComputeAlias("count", "supplierId"));
+ }
+
+ [TestMethod]
+ public void ComputeAlias_AvgField_ReturnsFunctionField()
+ {
+ Assert.AreEqual("avg_unitPrice", AggregateRecordsTool.ComputeAlias("avg", "unitPrice"));
+ }
+
+ [TestMethod]
+ public void ComputeAlias_SumField_ReturnsFunctionField()
+ {
+ Assert.AreEqual("sum_unitPrice", AggregateRecordsTool.ComputeAlias("sum", "unitPrice"));
+ }
+
+ [TestMethod]
+ public void ComputeAlias_MinField_ReturnsFunctionField()
+ {
+ Assert.AreEqual("min_price", AggregateRecordsTool.ComputeAlias("min", "price"));
+ }
+
+ [TestMethod]
+ public void ComputeAlias_MaxField_ReturnsFunctionField()
+ {
+ Assert.AreEqual("max_price", AggregateRecordsTool.ComputeAlias("max", "price"));
+ }
+
+ #endregion
+
+ #region In-Memory Aggregation Tests
+
+ [TestMethod]
+ public void PerformAggregation_CountStar_ReturnsCount()
+ {
+ JsonElement records = ParseArray("[{\"id\":1},{\"id\":2},{\"id\":3}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new(), null, null, "desc", "count");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(3.0, result[0]["count"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_Avg_ReturnsAverage()
+ {
+ JsonElement records = ParseArray("[{\"price\":10},{\"price\":20},{\"price\":30}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "avg", "price", false, new(), null, null, "desc", "avg_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(20.0, result[0]["avg_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_Sum_ReturnsSum()
+ {
+ JsonElement records = ParseArray("[{\"price\":10},{\"price\":20},{\"price\":30}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new(), null, null, "desc", "sum_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(60.0, result[0]["sum_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_Min_ReturnsMin()
+ {
+ JsonElement records = ParseArray("[{\"price\":10},{\"price\":20},{\"price\":5}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "min", "price", false, new(), null, null, "desc", "min_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(5.0, result[0]["min_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_Max_ReturnsMax()
+ {
+ JsonElement records = ParseArray("[{\"price\":10},{\"price\":20},{\"price\":5}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "max", "price", false, new(), null, null, "desc", "max_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(20.0, result[0]["max_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_CountDistinct_ReturnsDistinctCount()
+ {
+ JsonElement records = ParseArray("[{\"supplierId\":1},{\"supplierId\":2},{\"supplierId\":1},{\"supplierId\":3}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "supplierId", true, new(), null, null, "desc", "count_supplierId");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(3.0, result[0]["count_supplierId"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_AvgDistinct_ReturnsDistinctAvg()
+ {
+ JsonElement records = ParseArray("[{\"price\":10},{\"price\":10},{\"price\":20},{\"price\":30}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "avg", "price", true, new(), null, null, "desc", "avg_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(20.0, result[0]["avg_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_GroupBy_ReturnsGroupedResults()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\",\"price\":10},{\"category\":\"A\",\"price\":20},{\"category\":\"B\",\"price\":50}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "category" }, null, null, "desc", "sum_price");
+
+ Assert.AreEqual(2, result.Count);
+ // Desc order: B(50) first, then A(30)
+ Assert.AreEqual("B", result[0]["category"]?.ToString());
+ Assert.AreEqual(50.0, result[0]["sum_price"]);
+ Assert.AreEqual("A", result[1]["category"]?.ToString());
+ Assert.AreEqual(30.0, result[1]["sum_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_GroupBy_Asc_ReturnsSortedAsc()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\",\"price\":10},{\"category\":\"B\",\"price\":30},{\"category\":\"A\",\"price\":20}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "category" }, null, null, "asc", "sum_price");
+
+ Assert.AreEqual(2, result.Count);
+ Assert.AreEqual("A", result[0]["category"]?.ToString());
+ Assert.AreEqual(30.0, result[0]["sum_price"]);
+ Assert.AreEqual("B", result[1]["category"]?.ToString());
+ Assert.AreEqual(30.0, result[1]["sum_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_CountStar_GroupBy_ReturnsGroupCounts()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\"},{\"category\":\"A\"},{\"category\":\"B\"}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new() { "category" }, null, null, "desc", "count");
+
+ Assert.AreEqual(2, result.Count);
+ Assert.AreEqual("A", result[0]["category"]?.ToString());
+ Assert.AreEqual(2.0, result[0]["count"]);
+ Assert.AreEqual("B", result[1]["category"]?.ToString());
+ Assert.AreEqual(1.0, result[1]["count"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingGt_FiltersResults()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\",\"price\":10},{\"category\":\"A\",\"price\":20},{\"category\":\"B\",\"price\":5}]");
+ var having = new Dictionary { ["gt"] = 10 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "category" }, having, null, "desc", "sum_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual("A", result[0]["category"]?.ToString());
+ Assert.AreEqual(30.0, result[0]["sum_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingGteLte_FiltersRange()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\",\"price\":100},{\"category\":\"B\",\"price\":20},{\"category\":\"C\",\"price\":1}]");
+ var having = new Dictionary { ["gte"] = 10, ["lte"] = 50 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "category" }, having, null, "desc", "sum_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual("B", result[0]["category"]?.ToString());
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingIn_FiltersExactValues()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\"},{\"category\":\"A\"},{\"category\":\"B\"},{\"category\":\"C\"},{\"category\":\"C\"},{\"category\":\"C\"}]");
+ var havingIn = new List { 2, 3 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new() { "category" }, null, havingIn, "desc", "count");
+
+ Assert.AreEqual(2, result.Count);
+ // C(3) desc, A(2)
+ Assert.AreEqual("C", result[0]["category"]?.ToString());
+ Assert.AreEqual(3.0, result[0]["count"]);
+ Assert.AreEqual("A", result[1]["category"]?.ToString());
+ Assert.AreEqual(2.0, result[1]["count"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingEq_FiltersSingleValue()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\",\"price\":10},{\"category\":\"B\",\"price\":20}]");
+ var having = new Dictionary { ["eq"] = 10 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "category" }, having, null, "desc", "sum_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual("A", result[0]["category"]?.ToString());
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingNeq_FiltersOutValue()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\",\"price\":10},{\"category\":\"B\",\"price\":20}]");
+ var having = new Dictionary { ["neq"] = 10 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "category" }, having, null, "desc", "sum_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual("B", result[0]["category"]?.ToString());
+ }
+
+ [TestMethod]
+ public void PerformAggregation_EmptyRecords_ReturnsNull()
+ {
+ JsonElement records = ParseArray("[]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "avg", "price", false, new(), null, null, "desc", "avg_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.IsNull(result[0]["avg_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_EmptyRecordsCountStar_ReturnsZero()
+ {
+ JsonElement records = ParseArray("[]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new(), null, null, "desc", "count");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(0.0, result[0]["count"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_MultipleGroupByFields_ReturnsCorrectGroups()
+ {
+ JsonElement records = ParseArray("[{\"cat\":\"A\",\"region\":\"East\",\"price\":10},{\"cat\":\"A\",\"region\":\"East\",\"price\":20},{\"cat\":\"A\",\"region\":\"West\",\"price\":5}]");
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "cat", "region" }, null, null, "desc", "sum_price");
+
+ Assert.AreEqual(2, result.Count);
+ // (A,East)=30 desc, (A,West)=5
+ Assert.AreEqual("A", result[0]["cat"]?.ToString());
+ Assert.AreEqual("East", result[0]["region"]?.ToString());
+ Assert.AreEqual(30.0, result[0]["sum_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingNoResults_ReturnsEmpty()
+ {
+ JsonElement records = ParseArray("[{\"category\":\"A\",\"price\":10}]");
+ var having = new Dictionary { ["gt"] = 100 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new() { "category" }, having, null, "desc", "sum_price");
+
+ Assert.AreEqual(0, result.Count);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingOnSingleResult_Passes()
+ {
+ JsonElement records = ParseArray("[{\"price\":50},{\"price\":60}]");
+ var having = new Dictionary { ["gte"] = 100 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new(), having, null, "desc", "sum_price");
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual(110.0, result[0]["sum_price"]);
+ }
+
+ [TestMethod]
+ public void PerformAggregation_HavingOnSingleResult_Fails()
+ {
+ JsonElement records = ParseArray("[{\"price\":50},{\"price\":60}]");
+ var having = new Dictionary { ["gt"] = 200 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "price", false, new(), having, null, "desc", "sum_price");
+
+ Assert.AreEqual(0, result.Count);
+ }
+
+ #endregion
+
+ #region Helper Methods
+
+ private static JsonElement ParseArray(string json)
+ {
+ return JsonDocument.Parse(json).RootElement;
+ }
+
+ private static JsonElement ParseContent(CallToolResult result)
+ {
+ TextContentBlock firstContent = (TextContentBlock)result.Content[0];
+ return JsonDocument.Parse(firstContent.Text).RootElement;
+ }
+
+ private static void AssertToolDisabledError(JsonElement content)
+ {
+ Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
+ Assert.IsTrue(error.TryGetProperty("type", out JsonElement errorType));
+ Assert.AreEqual("ToolDisabled", errorType.GetString());
+ }
+
+ private static RuntimeConfig CreateConfig(bool aggregateRecordsEnabled = true)
+ {
+ Dictionary entities = new()
+ {
+ ["Book"] = new Entity(
+ Source: new("books", EntitySourceType.Table, null, null),
+ GraphQL: new("Book", "Books"),
+ Fields: null,
+ Rest: new(Enabled: true),
+ Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
+ new EntityAction(Action: EntityActionOperation.Read, Fields: null, Policy: null)
+ }) },
+ Mappings: null,
+ Relationships: null,
+ Mcp: null
+ )
+ };
+
+ return new RuntimeConfig(
+ Schema: "test-schema",
+ DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
+ Runtime: new(
+ Rest: new(),
+ GraphQL: new(),
+ Mcp: new(
+ Enabled: true,
+ Path: "/mcp",
+ DmlTools: new(
+ describeEntities: true,
+ readRecords: true,
+ createRecord: true,
+ updateRecord: true,
+ deleteRecord: true,
+ executeEntity: true,
+ aggregateRecords: aggregateRecordsEnabled
+ )
+ ),
+ Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
+ ),
+ Entities: new(entities)
+ );
+ }
+
+ private static RuntimeConfig CreateConfigWithEntityDmlDisabled()
+ {
+ Dictionary entities = new()
+ {
+ ["Book"] = new Entity(
+ Source: new("books", EntitySourceType.Table, null, null),
+ GraphQL: new("Book", "Books"),
+ Fields: null,
+ Rest: new(Enabled: true),
+ Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
+ new EntityAction(Action: EntityActionOperation.Read, Fields: null, Policy: null)
+ }) },
+ Mappings: null,
+ Relationships: null,
+ Mcp: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: false)
+ )
+ };
+
+ return new RuntimeConfig(
+ Schema: "test-schema",
+ DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
+ Runtime: new(
+ Rest: new(),
+ GraphQL: new(),
+ Mcp: new(
+ Enabled: true,
+ Path: "/mcp",
+ DmlTools: new(
+ describeEntities: true,
+ readRecords: true,
+ createRecord: true,
+ updateRecord: true,
+ deleteRecord: true,
+ executeEntity: true,
+ aggregateRecords: true
+ )
+ ),
+ Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
+ ),
+ Entities: new(entities)
+ );
+ }
+
+ private static IServiceProvider CreateServiceProvider(RuntimeConfig config)
+ {
+ ServiceCollection services = new();
+
+ RuntimeConfigProvider configProvider = TestHelper.GenerateInMemoryRuntimeConfigProvider(config);
+ services.AddSingleton(configProvider);
+
+ Mock mockAuthResolver = new();
+ mockAuthResolver.Setup(x => x.IsValidRoleContext(It.IsAny())).Returns(true);
+ services.AddSingleton(mockAuthResolver.Object);
+
+ Mock mockHttpContext = new();
+ Mock mockRequest = new();
+ mockRequest.Setup(x => x.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER]).Returns("anonymous");
+ mockHttpContext.Setup(x => x.Request).Returns(mockRequest.Object);
+
+ Mock mockHttpContextAccessor = new();
+ mockHttpContextAccessor.Setup(x => x.HttpContext).Returns(mockHttpContext.Object);
+ services.AddSingleton(mockHttpContextAccessor.Object);
+
+ services.AddLogging();
+
+ return services.BuildServiceProvider();
+ }
+
+ #endregion
+ }
+}
diff --git a/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs b/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
index d2f6554cd3..b4ae074207 100644
--- a/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
+++ b/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
@@ -48,6 +48,7 @@ public class EntityLevelDmlToolConfigurationTests
[DataRow("UpdateRecord", "{\"entity\": \"Book\", \"keys\": {\"id\": 1}, \"fields\": {\"title\": \"Updated\"}}", false, DisplayName = "UpdateRecord respects entity-level DmlToolEnabled=false")]
[DataRow("DeleteRecord", "{\"entity\": \"Book\", \"keys\": {\"id\": 1}}", false, DisplayName = "DeleteRecord respects entity-level DmlToolEnabled=false")]
[DataRow("ExecuteEntity", "{\"entity\": \"GetBook\"}", true, DisplayName = "ExecuteEntity respects entity-level DmlToolEnabled=false")]
+ [DataRow("AggregateRecords", "{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\"}", false, DisplayName = "AggregateRecords respects entity-level DmlToolEnabled=false")]
public async Task DmlTool_RespectsEntityLevelDmlToolDisabled(string toolType, string jsonArguments, bool isStoredProcedure)
{
// Arrange
@@ -238,6 +239,7 @@ private static IMcpTool CreateTool(string toolType)
"UpdateRecord" => new UpdateRecordTool(),
"DeleteRecord" => new DeleteRecordTool(),
"ExecuteEntity" => new ExecuteEntityTool(),
+ "AggregateRecords" => new AggregateRecordsTool(),
_ => throw new ArgumentException($"Unknown tool type: {toolType}", nameof(toolType))
};
}
From f855e96eba1cf6de7cc782b4d014fb795b703e48 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 17:22:14 +0000
Subject: [PATCH 03/43] Add first/after pagination support to aggregate_records
tool
Co-authored-by: JerryNixon <1749983+JerryNixon@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 102 +++++++++++
.../Mcp/AggregateRecordsToolTests.cs | 158 +++++++++++++++++-
2 files changed, 259 insertions(+), 1 deletion(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index e64710e46e..2fa3bfa89c 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -98,6 +98,15 @@ public Tool GetToolMetadata()
""description"": ""Aggregated value is in the given list.""
}
}
+ },
+ ""first"": {
+ ""type"": ""integer"",
+ ""description"": ""Maximum number of results to return. Used for pagination. Only applies with groupby."",
+ ""minimum"": 1
+ },
+ ""after"": {
+ ""type"": ""string"",
+ ""description"": ""Cursor for pagination. Returns results after this cursor. Only applies with groupby and first.""
}
},
""required"": [""entity"", ""function"", ""field""]
@@ -166,6 +175,18 @@ public async Task ExecuteAsync(
string? filter = root.TryGetProperty("filter", out JsonElement filterEl) ? filterEl.GetString() : null;
string orderby = root.TryGetProperty("orderby", out JsonElement orderbyEl) ? (orderbyEl.GetString() ?? "desc") : "desc";
+ int? first = null;
+ if (root.TryGetProperty("first", out JsonElement firstEl) && firstEl.ValueKind == JsonValueKind.Number)
+ {
+ first = firstEl.GetInt32();
+ if (first < 1)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Argument 'first' must be at least 1.", logger);
+ }
+ }
+
+ string? after = root.TryGetProperty("after", out JsonElement afterEl) ? afterEl.GetString() : null;
+
List groupby = new();
if (root.TryGetProperty("groupby", out JsonElement groupbyEl) && groupbyEl.ValueKind == JsonValueKind.Array)
{
@@ -311,6 +332,26 @@ public async Task ExecuteAsync(
List> aggregatedResults = PerformAggregation(
records, function, field, distinct, groupby, havingOps, havingIn, orderby, alias);
+ // Apply pagination if first is specified with groupby
+ if (first.HasValue && groupby.Count > 0)
+ {
+ PaginationResult paginatedResult = ApplyPagination(aggregatedResults, first.Value, after);
+ return McpResponseBuilder.BuildSuccessResult(
+ new Dictionary
+ {
+ ["entity"] = entityName,
+ ["result"] = new Dictionary
+ {
+ ["items"] = paginatedResult.Items,
+ ["endCursor"] = paginatedResult.EndCursor,
+ ["hasNextPage"] = paginatedResult.HasNextPage
+ },
+ ["message"] = $"Successfully aggregated records for entity '{entityName}'"
+ },
+ logger,
+ $"AggregateRecordsTool success for entity {entityName}.");
+ }
+
return McpResponseBuilder.BuildSuccessResult(
new Dictionary
{
@@ -450,6 +491,67 @@ internal static string ComputeAlias(string function, string field)
}
}
+ ///
+ /// Represents the result of applying pagination to aggregated results.
+ ///
+ internal sealed class PaginationResult
+ {
+ public List> Items { get; set; } = new();
+ public string? EndCursor { get; set; }
+ public bool HasNextPage { get; set; }
+ }
+
+ ///
+ /// Applies cursor-based pagination to aggregated results.
+ /// The cursor is an opaque base64-encoded offset integer.
+ ///
+ internal static PaginationResult ApplyPagination(
+ List> allResults,
+ int first,
+ string? after)
+ {
+ int startIndex = 0;
+
+ if (!string.IsNullOrWhiteSpace(after))
+ {
+ try
+ {
+ byte[] bytes = Convert.FromBase64String(after);
+ string decoded = System.Text.Encoding.UTF8.GetString(bytes);
+ if (int.TryParse(decoded, out int cursorOffset))
+ {
+ startIndex = cursorOffset;
+ }
+ }
+ catch (FormatException)
+ {
+ // Invalid cursor format; start from beginning
+ }
+ }
+
+ List> pageItems = allResults
+ .Skip(startIndex)
+ .Take(first)
+ .ToList();
+
+ bool hasNextPage = startIndex + first < allResults.Count;
+ string? endCursor = null;
+
+ if (pageItems.Count > 0)
+ {
+ int lastItemIndex = startIndex + pageItems.Count;
+ endCursor = Convert.ToBase64String(
+ System.Text.Encoding.UTF8.GetBytes(lastItemIndex.ToString()));
+ }
+
+ return new PaginationResult
+ {
+ Items = pageItems,
+ EndCursor = endCursor,
+ HasNextPage = hasNextPage
+ };
+ }
+
private static double? ComputeAggregateValue(List records, string function, string field, bool distinct, bool isCountStar)
{
if (isCountStar)
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index a1fb2b691c..f7e3930d7b 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -57,7 +57,7 @@ public void GetToolMetadata_HasInputSchema()
AggregateRecordsTool tool = new();
Tool metadata = tool.GetToolMetadata();
Assert.AreEqual(JsonValueKind.Object, metadata.InputSchema.ValueKind);
- Assert.IsTrue(metadata.InputSchema.TryGetProperty("properties", out _));
+ Assert.IsTrue(metadata.InputSchema.TryGetProperty("properties", out JsonElement properties));
Assert.IsTrue(metadata.InputSchema.TryGetProperty("required", out JsonElement required));
List requiredFields = new();
@@ -69,6 +69,12 @@ public void GetToolMetadata_HasInputSchema()
CollectionAssert.Contains(requiredFields, "entity");
CollectionAssert.Contains(requiredFields, "function");
CollectionAssert.Contains(requiredFields, "field");
+
+ // Verify first and after properties exist in schema
+ Assert.IsTrue(properties.TryGetProperty("first", out JsonElement firstProp));
+ Assert.AreEqual("integer", firstProp.GetProperty("type").GetString());
+ Assert.IsTrue(properties.TryGetProperty("after", out JsonElement afterProp));
+ Assert.AreEqual("string", afterProp.GetProperty("type").GetString());
}
#endregion
@@ -460,6 +466,156 @@ public void PerformAggregation_HavingOnSingleResult_Fails()
#endregion
+ #region Pagination Tests
+
+ [TestMethod]
+ public void ApplyPagination_FirstOnly_ReturnsFirstNItems()
+ {
+ List> allResults = new()
+ {
+ new() { ["category"] = "A", ["count"] = 10.0 },
+ new() { ["category"] = "B", ["count"] = 8.0 },
+ new() { ["category"] = "C", ["count"] = 6.0 },
+ new() { ["category"] = "D", ["count"] = 4.0 },
+ new() { ["category"] = "E", ["count"] = 2.0 }
+ };
+
+ AggregateRecordsTool.PaginationResult result = AggregateRecordsTool.ApplyPagination(allResults, 3, null);
+
+ Assert.AreEqual(3, result.Items.Count);
+ Assert.AreEqual("A", result.Items[0]["category"]?.ToString());
+ Assert.AreEqual("C", result.Items[2]["category"]?.ToString());
+ Assert.IsTrue(result.HasNextPage);
+ Assert.IsNotNull(result.EndCursor);
+ }
+
+ [TestMethod]
+ public void ApplyPagination_FirstWithAfter_ReturnsNextPage()
+ {
+ List> allResults = new()
+ {
+ new() { ["category"] = "A", ["count"] = 10.0 },
+ new() { ["category"] = "B", ["count"] = 8.0 },
+ new() { ["category"] = "C", ["count"] = 6.0 },
+ new() { ["category"] = "D", ["count"] = 4.0 },
+ new() { ["category"] = "E", ["count"] = 2.0 }
+ };
+
+ // First page
+ AggregateRecordsTool.PaginationResult firstPage = AggregateRecordsTool.ApplyPagination(allResults, 3, null);
+ Assert.AreEqual(3, firstPage.Items.Count);
+ Assert.IsTrue(firstPage.HasNextPage);
+
+ // Second page using cursor from first page
+ AggregateRecordsTool.PaginationResult secondPage = AggregateRecordsTool.ApplyPagination(allResults, 3, firstPage.EndCursor);
+ Assert.AreEqual(2, secondPage.Items.Count);
+ Assert.AreEqual("D", secondPage.Items[0]["category"]?.ToString());
+ Assert.AreEqual("E", secondPage.Items[1]["category"]?.ToString());
+ Assert.IsFalse(secondPage.HasNextPage);
+ }
+
+ [TestMethod]
+ public void ApplyPagination_FirstExceedsTotalCount_ReturnsAllItems()
+ {
+ List> allResults = new()
+ {
+ new() { ["category"] = "A", ["count"] = 10.0 },
+ new() { ["category"] = "B", ["count"] = 8.0 }
+ };
+
+ AggregateRecordsTool.PaginationResult result = AggregateRecordsTool.ApplyPagination(allResults, 5, null);
+
+ Assert.AreEqual(2, result.Items.Count);
+ Assert.IsFalse(result.HasNextPage);
+ }
+
+ [TestMethod]
+ public void ApplyPagination_FirstExactlyMatchesTotalCount_HasNextPageIsFalse()
+ {
+ List> allResults = new()
+ {
+ new() { ["category"] = "A", ["count"] = 10.0 },
+ new() { ["category"] = "B", ["count"] = 8.0 },
+ new() { ["category"] = "C", ["count"] = 6.0 }
+ };
+
+ AggregateRecordsTool.PaginationResult result = AggregateRecordsTool.ApplyPagination(allResults, 3, null);
+
+ Assert.AreEqual(3, result.Items.Count);
+ Assert.IsFalse(result.HasNextPage);
+ }
+
+ [TestMethod]
+ public void ApplyPagination_EmptyResults_ReturnsEmptyPage()
+ {
+ List> allResults = new();
+
+ AggregateRecordsTool.PaginationResult result = AggregateRecordsTool.ApplyPagination(allResults, 5, null);
+
+ Assert.AreEqual(0, result.Items.Count);
+ Assert.IsFalse(result.HasNextPage);
+ Assert.IsNull(result.EndCursor);
+ }
+
+ [TestMethod]
+ public void ApplyPagination_InvalidCursor_StartsFromBeginning()
+ {
+ List> allResults = new()
+ {
+ new() { ["category"] = "A", ["count"] = 10.0 },
+ new() { ["category"] = "B", ["count"] = 8.0 }
+ };
+
+ AggregateRecordsTool.PaginationResult result = AggregateRecordsTool.ApplyPagination(allResults, 5, "not-valid-base64!!!");
+
+ Assert.AreEqual(2, result.Items.Count);
+ Assert.AreEqual("A", result.Items[0]["category"]?.ToString());
+ }
+
+ [TestMethod]
+ public void ApplyPagination_CursorBeyondResults_ReturnsEmptyPage()
+ {
+ List> allResults = new()
+ {
+ new() { ["category"] = "A", ["count"] = 10.0 }
+ };
+
+ // Cursor pointing beyond the end
+ string cursor = Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes("100"));
+ AggregateRecordsTool.PaginationResult result = AggregateRecordsTool.ApplyPagination(allResults, 5, cursor);
+
+ Assert.AreEqual(0, result.Items.Count);
+ Assert.IsFalse(result.HasNextPage);
+ Assert.IsNull(result.EndCursor);
+ }
+
+ [TestMethod]
+ public void ApplyPagination_MultiplePages_TraversesAllResults()
+ {
+ List> allResults = new();
+ for (int i = 0; i < 8; i++)
+ {
+ allResults.Add(new() { ["category"] = $"Cat{i}", ["count"] = (double)(8 - i) });
+ }
+
+ // Page 1
+ AggregateRecordsTool.PaginationResult page1 = AggregateRecordsTool.ApplyPagination(allResults, 3, null);
+ Assert.AreEqual(3, page1.Items.Count);
+ Assert.IsTrue(page1.HasNextPage);
+
+ // Page 2
+ AggregateRecordsTool.PaginationResult page2 = AggregateRecordsTool.ApplyPagination(allResults, 3, page1.EndCursor);
+ Assert.AreEqual(3, page2.Items.Count);
+ Assert.IsTrue(page2.HasNextPage);
+
+ // Page 3 (last page)
+ AggregateRecordsTool.PaginationResult page3 = AggregateRecordsTool.ApplyPagination(allResults, 3, page2.EndCursor);
+ Assert.AreEqual(2, page3.Items.Count);
+ Assert.IsFalse(page3.HasNextPage);
+ }
+
+ #endregion
+
#region Helper Methods
private static JsonElement ParseArray(string json)
From 35733211c35a2f1eab877f9cfdadbdb03b8fd53b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 17:29:15 +0000
Subject: [PATCH 04/43] Add exhaustive tool instructions and all 13 spec
example tests
Co-authored-by: JerryNixon <1749983+JerryNixon@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 55 ++-
.../Mcp/AggregateRecordsToolTests.cs | 439 ++++++++++++++++++
2 files changed, 476 insertions(+), 18 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 2fa3bfa89c..c6fbd08198 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -42,71 +42,90 @@ public Tool GetToolMetadata()
return new Tool
{
Name = "aggregate_records",
- Description = "STEP 1: describe_entities -> find entities with READ permission and their fields. STEP 2: call this tool to compute aggregations (count, avg, sum, min, max) with optional filter, groupby, having, and orderby.",
+ Description = "Computes aggregations (count, avg, sum, min, max) on entity data. "
+ + "STEP 1: Call describe_entities to discover entities with READ permission and their field names. "
+ + "STEP 2: Call this tool with the exact entity name, an aggregation function, and a field name from STEP 1. "
+ + "REQUIRED: entity (exact entity name), function (one of: count, avg, sum, min, max), field (exact field name, or '*' ONLY for count). "
+ + "OPTIONAL: filter (OData WHERE clause applied before aggregating, e.g. 'unitPrice lt 10'), "
+ + "distinct (true to deduplicate values before aggregating), "
+ + "groupby (array of field names to group results by, e.g. ['categoryName']), "
+ + "orderby ('asc' or 'desc' to sort grouped results by aggregated value; requires groupby), "
+ + "having (object to filter groups after aggregating, operators: eq, neq, gt, gte, lt, lte, in; requires groupby), "
+ + "first (integer >= 1, maximum grouped results to return; requires groupby), "
+ + "after (opaque cursor string from a previous response's endCursor; requires first and groupby). "
+ + "RESPONSE: The aggregated value is aliased as '{function}_{field}' (e.g. avg_unitPrice, sum_revenue). "
+ + "For count with field '*', the alias is 'count'. "
+ + "When first is used with groupby, response contains: items (array), endCursor (string), hasNextPage (boolean). "
+ + "RULES: 1) ALWAYS call describe_entities first to get valid entity and field names. "
+ + "2) Use field '*' ONLY with function 'count'. "
+ + "3) For avg, sum, min, max: field MUST be a numeric field name from describe_entities. "
+ + "4) orderby, having, first, and after ONLY apply when groupby is provided. "
+ + "5) after REQUIRES first to also be set. "
+ + "6) Use first and after for paginating large grouped result sets.",
InputSchema = JsonSerializer.Deserialize(
@"{
""type"": ""object"",
""properties"": {
""entity"": {
""type"": ""string"",
- ""description"": ""Entity name with READ permission.""
+ ""description"": ""Exact entity name from describe_entities that has READ permission. Must match exactly (case-sensitive).""
},
""function"": {
""type"": ""string"",
""enum"": [""count"", ""avg"", ""sum"", ""min"", ""max""],
- ""description"": ""Aggregation function to apply.""
+ ""description"": ""Aggregation function to apply. Use 'count' to count records, 'avg' for average, 'sum' for total, 'min' for minimum, 'max' for maximum. For count use field '*' or a specific field name. For avg, sum, min, max the field must be numeric.""
},
""field"": {
""type"": ""string"",
- ""description"": ""Field to aggregate. Use '*' for count.""
+ ""description"": ""Exact field name from describe_entities to aggregate. Use '*' ONLY with function 'count' to count all records. For avg, sum, min, max, provide a numeric field name.""
},
""distinct"": {
""type"": ""boolean"",
- ""description"": ""Apply DISTINCT before aggregating."",
+ ""description"": ""When true, removes duplicate values before applying the aggregation function. For example, count with distinct counts unique values only. Default is false."",
""default"": false
},
""filter"": {
""type"": ""string"",
- ""description"": ""OData filter applied before aggregating (WHERE). Example: 'unitPrice lt 10'"",
+ ""description"": ""OData filter expression applied before aggregating (acts as a WHERE clause). Supported operators: eq, ne, gt, ge, lt, le, and, or, not. Example: 'unitPrice lt 10' filters to rows where unitPrice is less than 10 before aggregating. Example: 'discontinued eq true and categoryName eq ''Seafood''' filters discontinued seafood products."",
""default"": """"
},
""groupby"": {
""type"": ""array"",
""items"": { ""type"": ""string"" },
- ""description"": ""Fields to group by, e.g., ['category', 'region']. Grouped field values are included in the response."",
+ ""description"": ""Array of exact field names from describe_entities to group results by. Each unique combination of grouped field values produces one aggregated row. Grouped field values are included in the response alongside the aggregated value. Example: ['categoryName'] groups by category. Example: ['categoryName', 'region'] groups by both fields."",
""default"": []
},
""orderby"": {
""type"": ""string"",
""enum"": [""asc"", ""desc""],
- ""description"": ""Sort aggregated results by the computed value. Only applies with groupby."",
+ ""description"": ""Sort direction for grouped results by the computed aggregated value. 'desc' returns highest values first, 'asc' returns lowest first. ONLY applies when groupby is provided. Default is 'desc'."",
""default"": ""desc""
},
""having"": {
""type"": ""object"",
- ""description"": ""Filter applied after aggregating on the result (HAVING). Operators are AND-ed together."",
+ ""description"": ""Filter applied AFTER aggregating to filter grouped results by the computed aggregated value (acts as a HAVING clause). ONLY applies when groupby is provided. Multiple operators are AND-ed together. For example, use gt with value 20 to keep groups where the aggregated value exceeds 20. Combine gte and lte to define a range."",
""properties"": {
- ""eq"": { ""type"": ""number"", ""description"": ""Aggregated value equals."" },
- ""neq"": { ""type"": ""number"", ""description"": ""Aggregated value not equals."" },
- ""gt"": { ""type"": ""number"", ""description"": ""Aggregated value greater than."" },
- ""gte"": { ""type"": ""number"", ""description"": ""Aggregated value greater than or equal."" },
- ""lt"": { ""type"": ""number"", ""description"": ""Aggregated value less than."" },
- ""lte"": { ""type"": ""number"", ""description"": ""Aggregated value less than or equal."" },
+ ""eq"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value equals this number."" },
+ ""neq"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value does not equal this number."" },
+ ""gt"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is greater than this number."" },
+ ""gte"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is greater than or equal to this number."" },
+ ""lt"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is less than this number."" },
+ ""lte"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is less than or equal to this number."" },
""in"": {
""type"": ""array"",
""items"": { ""type"": ""number"" },
- ""description"": ""Aggregated value is in the given list.""
+ ""description"": ""Keep groups where the aggregated value matches any number in this list. Example: [5, 10] keeps groups with aggregated value 5 or 10.""
}
}
},
""first"": {
""type"": ""integer"",
- ""description"": ""Maximum number of results to return. Used for pagination. Only applies with groupby."",
+ ""description"": ""Maximum number of grouped results to return. Used for pagination of grouped results. ONLY applies when groupby is provided. Must be >= 1. When set, the response includes 'items', 'endCursor', and 'hasNextPage' fields for pagination."",
""minimum"": 1
},
""after"": {
""type"": ""string"",
- ""description"": ""Cursor for pagination. Returns results after this cursor. Only applies with groupby and first.""
+ ""description"": ""Opaque cursor string for pagination. Pass the 'endCursor' value from a previous response to get the next page of results. REQUIRES both groupby and first to be set. Do not construct this value manually; always use the endCursor from a previous response.""
}
},
""required"": [""entity"", ""function"", ""field""]
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index f7e3930d7b..dce07fff80 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -570,6 +570,8 @@ public void ApplyPagination_InvalidCursor_StartsFromBeginning()
Assert.AreEqual(2, result.Items.Count);
Assert.AreEqual("A", result.Items[0]["category"]?.ToString());
+ Assert.IsFalse(result.HasNextPage);
+ Assert.IsNotNull(result.EndCursor);
}
[TestMethod]
@@ -616,6 +618,443 @@ public void ApplyPagination_MultiplePages_TraversesAllResults()
#endregion
+ #region Spec Example Tests
+
+ ///
+ /// Spec Example 1: "How many products are there?"
+ /// COUNT(*) → 77
+ ///
+ [TestMethod]
+ public void SpecExample01_CountStar_ReturnsTotal()
+ {
+ // Build 77 product records
+ List items = new();
+ for (int i = 1; i <= 77; i++)
+ {
+ items.Add($"{{\"id\":{i}}}");
+ }
+
+ JsonElement records = ParseArray($"[{string.Join(",", items)}]");
+ string alias = AggregateRecordsTool.ComputeAlias("count", "*");
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new(), null, null, "desc", alias);
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual("count", alias);
+ Assert.AreEqual(77.0, result[0]["count"]);
+ }
+
+ ///
+ /// Spec Example 2: "What is the average price of products under $10?"
+ /// AVG(unitPrice) WHERE unitPrice < 10 → 6.74
+ /// Filter is applied at DB level; we supply pre-filtered records.
+ ///
+ [TestMethod]
+ public void SpecExample02_AvgWithFilter_ReturnsFilteredAverage()
+ {
+ // Pre-filtered records (unitPrice < 10) that average to 6.74
+ // 4.50 + 6.00 + 9.72 = 20.22 / 3 = 6.74
+ JsonElement records = ParseArray("[{\"unitPrice\":4.5},{\"unitPrice\":6.0},{\"unitPrice\":9.72}]");
+ string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
+ var result = AggregateRecordsTool.PerformAggregation(records, "avg", "unitPrice", false, new(), null, null, "desc", alias);
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual("avg_unitPrice", alias);
+ Assert.AreEqual(6.74, result[0]["avg_unitPrice"]);
+ }
+
+ ///
+ /// Spec Example 3: "Which categories have more than 20 products?"
+ /// COUNT(*) GROUP BY categoryName HAVING COUNT(*) > 20
+ /// Expected: Beverages=24, Condiments=22
+ ///
+ [TestMethod]
+ public void SpecExample03_CountGroupByHavingGt_FiltersGroups()
+ {
+ List items = new();
+ for (int i = 0; i < 24; i++)
+ {
+ items.Add("{\"categoryName\":\"Beverages\"}");
+ }
+
+ for (int i = 0; i < 22; i++)
+ {
+ items.Add("{\"categoryName\":\"Condiments\"}");
+ }
+
+ for (int i = 0; i < 12; i++)
+ {
+ items.Add("{\"categoryName\":\"Seafood\"}");
+ }
+
+ JsonElement records = ParseArray($"[{string.Join(",", items)}]");
+ string alias = AggregateRecordsTool.ComputeAlias("count", "*");
+ var having = new Dictionary { ["gt"] = 20 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new() { "categoryName" }, having, null, "desc", alias);
+
+ Assert.AreEqual(2, result.Count);
+ // Desc order: Beverages(24), Condiments(22)
+ Assert.AreEqual("Beverages", result[0]["categoryName"]?.ToString());
+ Assert.AreEqual(24.0, result[0]["count"]);
+ Assert.AreEqual("Condiments", result[1]["categoryName"]?.ToString());
+ Assert.AreEqual(22.0, result[1]["count"]);
+ }
+
+ ///
+ /// Spec Example 4: "For discontinued products, which categories have a total revenue between $500 and $10,000?"
+ /// SUM(unitPrice) WHERE discontinued=1 GROUP BY categoryName HAVING SUM >= 500 AND <= 10000
+ /// Expected: Seafood=1834.50, Produce=742.00
+ ///
+ [TestMethod]
+ public void SpecExample04_SumFilterGroupByHavingRange_ReturnsMatchingGroups()
+ {
+ // Pre-filtered (discontinued) records with prices summing per category
+ JsonElement records = ParseArray(
+ "[" +
+ "{\"categoryName\":\"Seafood\",\"unitPrice\":900}," +
+ "{\"categoryName\":\"Seafood\",\"unitPrice\":934.5}," +
+ "{\"categoryName\":\"Produce\",\"unitPrice\":400}," +
+ "{\"categoryName\":\"Produce\",\"unitPrice\":342}," +
+ "{\"categoryName\":\"Dairy\",\"unitPrice\":50}" + // Sum 50, below 500
+ "]");
+ string alias = AggregateRecordsTool.ComputeAlias("sum", "unitPrice");
+ var having = new Dictionary { ["gte"] = 500, ["lte"] = 10000 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "unitPrice", false, new() { "categoryName" }, having, null, "desc", alias);
+
+ Assert.AreEqual(2, result.Count);
+ Assert.AreEqual("sum_unitPrice", alias);
+ // Desc order: Seafood(1834.5), Produce(742)
+ Assert.AreEqual("Seafood", result[0]["categoryName"]?.ToString());
+ Assert.AreEqual(1834.5, result[0]["sum_unitPrice"]);
+ Assert.AreEqual("Produce", result[1]["categoryName"]?.ToString());
+ Assert.AreEqual(742.0, result[1]["sum_unitPrice"]);
+ }
+
+ ///
+ /// Spec Example 5: "How many distinct suppliers do we have?"
+ /// COUNT(DISTINCT supplierId) → 29
+ ///
+ [TestMethod]
+ public void SpecExample05_CountDistinct_ReturnsDistinctCount()
+ {
+ // Build records with 29 distinct supplierIds plus duplicates
+ List items = new();
+ for (int i = 1; i <= 29; i++)
+ {
+ items.Add($"{{\"supplierId\":{i}}}");
+ }
+
+ // Add duplicates
+ items.Add("{\"supplierId\":1}");
+ items.Add("{\"supplierId\":5}");
+ items.Add("{\"supplierId\":10}");
+
+ JsonElement records = ParseArray($"[{string.Join(",", items)}]");
+ string alias = AggregateRecordsTool.ComputeAlias("count", "supplierId");
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "supplierId", true, new(), null, null, "desc", alias);
+
+ Assert.AreEqual(1, result.Count);
+ Assert.AreEqual("count_supplierId", alias);
+ Assert.AreEqual(29.0, result[0]["count_supplierId"]);
+ }
+
+ ///
+ /// Spec Example 6: "Which categories have exactly 5 or 10 products?"
+ /// COUNT(*) GROUP BY categoryName HAVING COUNT(*) IN (5, 10)
+ /// Expected: Grains=5, Produce=5
+ ///
+ [TestMethod]
+ public void SpecExample06_CountGroupByHavingIn_FiltersExactCounts()
+ {
+ List items = new();
+ for (int i = 0; i < 5; i++)
+ {
+ items.Add("{\"categoryName\":\"Grains\"}");
+ }
+
+ for (int i = 0; i < 5; i++)
+ {
+ items.Add("{\"categoryName\":\"Produce\"}");
+ }
+
+ for (int i = 0; i < 12; i++)
+ {
+ items.Add("{\"categoryName\":\"Beverages\"}");
+ }
+
+ JsonElement records = ParseArray($"[{string.Join(",", items)}]");
+ string alias = AggregateRecordsTool.ComputeAlias("count", "*");
+ var havingIn = new List { 5, 10 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new() { "categoryName" }, null, havingIn, "desc", alias);
+
+ Assert.AreEqual(2, result.Count);
+ // Both have count=5, same order as grouped
+ Assert.AreEqual(5.0, result[0]["count"]);
+ Assert.AreEqual(5.0, result[1]["count"]);
+ }
+
+ ///
+ /// Spec Example 7: "What is the average distinct unit price per category, for categories averaging over $25?"
+ /// AVG(DISTINCT unitPrice) GROUP BY categoryName HAVING AVG(DISTINCT unitPrice) > 25
+ /// Expected: Meat/Poultry=54.01, Beverages=32.50
+ ///
+ [TestMethod]
+ public void SpecExample07_AvgDistinctGroupByHavingGt_FiltersAboveThreshold()
+ {
+ // Meat/Poultry: distinct prices {40.00, 68.02} → avg = 54.01
+ // Beverages: distinct prices {25.00, 40.00} → avg = 32.50
+ // Condiments: distinct prices {10.00, 15.00} → avg = 12.50 (below threshold)
+ JsonElement records = ParseArray(
+ "[" +
+ "{\"categoryName\":\"Meat/Poultry\",\"unitPrice\":40.00}," +
+ "{\"categoryName\":\"Meat/Poultry\",\"unitPrice\":68.02}," +
+ "{\"categoryName\":\"Meat/Poultry\",\"unitPrice\":40.00}," + // duplicate
+ "{\"categoryName\":\"Beverages\",\"unitPrice\":25.00}," +
+ "{\"categoryName\":\"Beverages\",\"unitPrice\":40.00}," +
+ "{\"categoryName\":\"Beverages\",\"unitPrice\":25.00}," + // duplicate
+ "{\"categoryName\":\"Condiments\",\"unitPrice\":10.00}," +
+ "{\"categoryName\":\"Condiments\",\"unitPrice\":15.00}" +
+ "]");
+ string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
+ var having = new Dictionary { ["gt"] = 25 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "avg", "unitPrice", true, new() { "categoryName" }, having, null, "desc", alias);
+
+ Assert.AreEqual(2, result.Count);
+ Assert.AreEqual("avg_unitPrice", alias);
+ // Desc order: Meat/Poultry(54.01), Beverages(32.5)
+ Assert.AreEqual("Meat/Poultry", result[0]["categoryName"]?.ToString());
+ Assert.AreEqual(54.01, result[0]["avg_unitPrice"]);
+ Assert.AreEqual("Beverages", result[1]["categoryName"]?.ToString());
+ Assert.AreEqual(32.5, result[1]["avg_unitPrice"]);
+ }
+
+ ///
+ /// Spec Example 8: "Which categories have the most products?"
+ /// COUNT(*) GROUP BY categoryName ORDER BY DESC
+ /// Expected: Confections=13, Beverages=12, Condiments=12, Seafood=12
+ ///
+ [TestMethod]
+ public void SpecExample08_CountGroupByOrderByDesc_ReturnsSortedDesc()
+ {
+ List items = new();
+ for (int i = 0; i < 13; i++)
+ {
+ items.Add("{\"categoryName\":\"Confections\"}");
+ }
+
+ for (int i = 0; i < 12; i++)
+ {
+ items.Add("{\"categoryName\":\"Beverages\"}");
+ }
+
+ for (int i = 0; i < 12; i++)
+ {
+ items.Add("{\"categoryName\":\"Condiments\"}");
+ }
+
+ for (int i = 0; i < 12; i++)
+ {
+ items.Add("{\"categoryName\":\"Seafood\"}");
+ }
+
+ JsonElement records = ParseArray($"[{string.Join(",", items)}]");
+ string alias = AggregateRecordsTool.ComputeAlias("count", "*");
+ var result = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new() { "categoryName" }, null, null, "desc", alias);
+
+ Assert.AreEqual(4, result.Count);
+ Assert.AreEqual("Confections", result[0]["categoryName"]?.ToString());
+ Assert.AreEqual(13.0, result[0]["count"]);
+ // Remaining 3 all have count=12
+ Assert.AreEqual(12.0, result[1]["count"]);
+ Assert.AreEqual(12.0, result[2]["count"]);
+ Assert.AreEqual(12.0, result[3]["count"]);
+ }
+
+ ///
+ /// Spec Example 9: "What are the cheapest categories by average price?"
+ /// AVG(unitPrice) GROUP BY categoryName ORDER BY ASC
+ /// Expected: Grains/Cereals=20.25, Condiments=23.06, Produce=32.37
+ ///
+ [TestMethod]
+ public void SpecExample09_AvgGroupByOrderByAsc_ReturnsSortedAsc()
+ {
+ // Grains/Cereals: {15.50, 25.00} → avg = 20.25
+ // Condiments: {20.12, 26.00} → avg = 23.06
+ // Produce: {28.74, 36.00} → avg = 32.37
+ JsonElement records = ParseArray(
+ "[" +
+ "{\"categoryName\":\"Grains/Cereals\",\"unitPrice\":15.50}," +
+ "{\"categoryName\":\"Grains/Cereals\",\"unitPrice\":25.00}," +
+ "{\"categoryName\":\"Condiments\",\"unitPrice\":20.12}," +
+ "{\"categoryName\":\"Condiments\",\"unitPrice\":26.00}," +
+ "{\"categoryName\":\"Produce\",\"unitPrice\":28.74}," +
+ "{\"categoryName\":\"Produce\",\"unitPrice\":36.00}" +
+ "]");
+ string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
+ var result = AggregateRecordsTool.PerformAggregation(records, "avg", "unitPrice", false, new() { "categoryName" }, null, null, "asc", alias);
+
+ Assert.AreEqual(3, result.Count);
+ // Asc order: Grains/Cereals(20.25), Condiments(23.06), Produce(32.37)
+ Assert.AreEqual("Grains/Cereals", result[0]["categoryName"]?.ToString());
+ Assert.AreEqual(20.25, result[0]["avg_unitPrice"]);
+ Assert.AreEqual("Condiments", result[1]["categoryName"]?.ToString());
+ Assert.AreEqual(23.06, result[1]["avg_unitPrice"]);
+ Assert.AreEqual("Produce", result[2]["categoryName"]?.ToString());
+ Assert.AreEqual(32.37, result[2]["avg_unitPrice"]);
+ }
+
+ ///
+ /// Spec Example 10: "For categories with over $500 revenue from discontinued products, which has the highest total?"
+ /// SUM(unitPrice) WHERE discontinued=1 GROUP BY categoryName HAVING SUM > 500 ORDER BY DESC
+ /// Expected: Seafood=1834.50, Meat/Poultry=1062.50, Produce=742.00
+ ///
+ [TestMethod]
+ public void SpecExample10_SumFilterGroupByHavingGtOrderByDesc_ReturnsSortedFiltered()
+ {
+ // Pre-filtered (discontinued) records
+ JsonElement records = ParseArray(
+ "[" +
+ "{\"categoryName\":\"Seafood\",\"unitPrice\":900}," +
+ "{\"categoryName\":\"Seafood\",\"unitPrice\":934.5}," +
+ "{\"categoryName\":\"Meat/Poultry\",\"unitPrice\":500}," +
+ "{\"categoryName\":\"Meat/Poultry\",\"unitPrice\":562.5}," +
+ "{\"categoryName\":\"Produce\",\"unitPrice\":400}," +
+ "{\"categoryName\":\"Produce\",\"unitPrice\":342}," +
+ "{\"categoryName\":\"Dairy\",\"unitPrice\":50}" + // Sum 50, below 500
+ "]");
+ string alias = AggregateRecordsTool.ComputeAlias("sum", "unitPrice");
+ var having = new Dictionary { ["gt"] = 500 };
+ var result = AggregateRecordsTool.PerformAggregation(records, "sum", "unitPrice", false, new() { "categoryName" }, having, null, "desc", alias);
+
+ Assert.AreEqual(3, result.Count);
+ // Desc order: Seafood(1834.5), Meat/Poultry(1062.5), Produce(742)
+ Assert.AreEqual("Seafood", result[0]["categoryName"]?.ToString());
+ Assert.AreEqual(1834.5, result[0]["sum_unitPrice"]);
+ Assert.AreEqual("Meat/Poultry", result[1]["categoryName"]?.ToString());
+ Assert.AreEqual(1062.5, result[1]["sum_unitPrice"]);
+ Assert.AreEqual("Produce", result[2]["categoryName"]?.ToString());
+ Assert.AreEqual(742.0, result[2]["sum_unitPrice"]);
+ }
+
+ ///
+ /// Spec Example 11: "Show me the first 5 categories by product count"
+ /// COUNT(*) GROUP BY categoryName ORDER BY DESC FIRST 5
+ /// Expected: 5 items with hasNextPage=true, endCursor set
+ ///
+ [TestMethod]
+ public void SpecExample11_CountGroupByOrderByDescFirst5_ReturnsPaginatedResults()
+ {
+ List items = new();
+ string[] categories = { "Confections", "Beverages", "Condiments", "Seafood", "Dairy", "Grains/Cereals", "Meat/Poultry", "Produce" };
+ int[] counts = { 13, 12, 12, 12, 10, 7, 6, 5 };
+ for (int c = 0; c < categories.Length; c++)
+ {
+ for (int i = 0; i < counts[c]; i++)
+ {
+ items.Add($"{{\"categoryName\":\"{categories[c]}\"}}");
+ }
+ }
+
+ JsonElement records = ParseArray($"[{string.Join(",", items)}]");
+ string alias = AggregateRecordsTool.ComputeAlias("count", "*");
+ var allResults = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new() { "categoryName" }, null, null, "desc", alias);
+
+ Assert.AreEqual(8, allResults.Count);
+
+ // Apply pagination: first=5
+ AggregateRecordsTool.PaginationResult page1 = AggregateRecordsTool.ApplyPagination(allResults, 5, null);
+
+ Assert.AreEqual(5, page1.Items.Count);
+ Assert.AreEqual("Confections", page1.Items[0]["categoryName"]?.ToString());
+ Assert.AreEqual(13.0, page1.Items[0]["count"]);
+ Assert.AreEqual("Dairy", page1.Items[4]["categoryName"]?.ToString());
+ Assert.AreEqual(10.0, page1.Items[4]["count"]);
+ Assert.IsTrue(page1.HasNextPage);
+ Assert.IsNotNull(page1.EndCursor);
+ }
+
+ ///
+ /// Spec Example 12: "Show me the next 5 categories" (continuation of Example 11)
+ /// COUNT(*) GROUP BY categoryName ORDER BY DESC FIRST 5 AFTER cursor
+ /// Expected: 3 items (remaining), hasNextPage=false
+ ///
+ [TestMethod]
+ public void SpecExample12_CountGroupByOrderByDescFirst5After_ReturnsNextPage()
+ {
+ List items = new();
+ string[] categories = { "Confections", "Beverages", "Condiments", "Seafood", "Dairy", "Grains/Cereals", "Meat/Poultry", "Produce" };
+ int[] counts = { 13, 12, 12, 12, 10, 7, 6, 5 };
+ for (int c = 0; c < categories.Length; c++)
+ {
+ for (int i = 0; i < counts[c]; i++)
+ {
+ items.Add($"{{\"categoryName\":\"{categories[c]}\"}}");
+ }
+ }
+
+ JsonElement records = ParseArray($"[{string.Join(",", items)}]");
+ string alias = AggregateRecordsTool.ComputeAlias("count", "*");
+ var allResults = AggregateRecordsTool.PerformAggregation(records, "count", "*", false, new() { "categoryName" }, null, null, "desc", alias);
+
+ // Page 1
+ AggregateRecordsTool.PaginationResult page1 = AggregateRecordsTool.ApplyPagination(allResults, 5, null);
+ Assert.IsTrue(page1.HasNextPage);
+
+ // Page 2 (continuation)
+ AggregateRecordsTool.PaginationResult page2 = AggregateRecordsTool.ApplyPagination(allResults, 5, page1.EndCursor);
+
+ Assert.AreEqual(3, page2.Items.Count);
+ Assert.AreEqual("Grains/Cereals", page2.Items[0]["categoryName"]?.ToString());
+ Assert.AreEqual(7.0, page2.Items[0]["count"]);
+ Assert.AreEqual("Meat/Poultry", page2.Items[1]["categoryName"]?.ToString());
+ Assert.AreEqual(6.0, page2.Items[1]["count"]);
+ Assert.AreEqual("Produce", page2.Items[2]["categoryName"]?.ToString());
+ Assert.AreEqual(5.0, page2.Items[2]["count"]);
+ Assert.IsFalse(page2.HasNextPage);
+ }
+
+ ///
+ /// Spec Example 13: "Show me the top 3 most expensive categories by average price"
+ /// AVG(unitPrice) GROUP BY categoryName ORDER BY DESC FIRST 3
+ /// Expected: Meat/Poultry=54.01, Beverages=37.98, Seafood=37.08
+ ///
+ [TestMethod]
+ public void SpecExample13_AvgGroupByOrderByDescFirst3_ReturnsTop3()
+ {
+ // Meat/Poultry: {40.00, 68.02} → avg = 54.01
+ // Beverages: {30.96, 45.00} → avg = 37.98
+ // Seafood: {25.16, 49.00} → avg = 37.08
+ // Condiments: {10.00, 15.00} → avg = 12.50
+ JsonElement records = ParseArray(
+ "[" +
+ "{\"categoryName\":\"Meat/Poultry\",\"unitPrice\":40.00}," +
+ "{\"categoryName\":\"Meat/Poultry\",\"unitPrice\":68.02}," +
+ "{\"categoryName\":\"Beverages\",\"unitPrice\":30.96}," +
+ "{\"categoryName\":\"Beverages\",\"unitPrice\":45.00}," +
+ "{\"categoryName\":\"Seafood\",\"unitPrice\":25.16}," +
+ "{\"categoryName\":\"Seafood\",\"unitPrice\":49.00}," +
+ "{\"categoryName\":\"Condiments\",\"unitPrice\":10.00}," +
+ "{\"categoryName\":\"Condiments\",\"unitPrice\":15.00}" +
+ "]");
+ string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
+ var allResults = AggregateRecordsTool.PerformAggregation(records, "avg", "unitPrice", false, new() { "categoryName" }, null, null, "desc", alias);
+
+ Assert.AreEqual(4, allResults.Count);
+
+ // Apply pagination: first=3
+ AggregateRecordsTool.PaginationResult page = AggregateRecordsTool.ApplyPagination(allResults, 3, null);
+
+ Assert.AreEqual(3, page.Items.Count);
+ Assert.AreEqual("Meat/Poultry", page.Items[0]["categoryName"]?.ToString());
+ Assert.AreEqual(54.01, page.Items[0]["avg_unitPrice"]);
+ Assert.AreEqual("Beverages", page.Items[1]["categoryName"]?.ToString());
+ Assert.AreEqual(37.98, page.Items[1]["avg_unitPrice"]);
+ Assert.AreEqual("Seafood", page.Items[2]["categoryName"]?.ToString());
+ Assert.AreEqual(37.08, page.Items[2]["avg_unitPrice"]);
+ Assert.IsTrue(page.HasNextPage);
+ }
+
+ #endregion
+
#region Helper Methods
private static JsonElement ParseArray(string json)
From f66bf3f8289c3b199c90f93dfdbbfcea96f6f2e9 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 17:48:11 +0000
Subject: [PATCH 05/43] Changes before error encountered
Co-authored-by: JerryNixon <1749983+JerryNixon@users.noreply.github.com>
---
schemas/dab.draft.schema.json | 5 +
.../BuiltInTools/AggregateRecordsTool.cs | 43 +++++-
.../Utils/McpTelemetryErrorCodes.cs | 5 +
.../Utils/McpTelemetryHelper.cs | 2 +
.../Mcp/AggregateRecordsToolTests.cs | 122 ++++++++++++++++++
5 files changed, 173 insertions(+), 4 deletions(-)
diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json
index cbe38b7d72..ec1afc063a 100644
--- a/schemas/dab.draft.schema.json
+++ b/schemas/dab.draft.schema.json
@@ -315,6 +315,11 @@
"type": "boolean",
"description": "Enable/disable the execute-entity tool.",
"default": false
+ },
+ "aggregate-records": {
+ "type": "boolean",
+ "description": "Enable/disable the aggregate-records tool.",
+ "default": false
}
}
}
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index c6fbd08198..59bd465ad0 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -150,6 +150,8 @@ public async Task ExecuteAsync(
return McpErrorHelpers.ToolDisabled(toolName, logger);
}
+ string entityName = string.Empty;
+
try
{
cancellationToken.ThrowIfCancellationRequested();
@@ -162,11 +164,13 @@ public async Task ExecuteAsync(
JsonElement root = arguments.RootElement;
// Parse required arguments
- if (!McpArgumentParser.TryParseEntity(root, out string entityName, out string parseError))
+ if (!McpArgumentParser.TryParseEntity(root, out string parsedEntityName, out string parseError))
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", parseError, logger);
}
+ entityName = parsedEntityName;
+
if (runtimeConfig.Entities?.TryGetValue(entityName, out Entity? entity) == true &&
entity.Mcp?.DmlToolEnabled == false)
{
@@ -381,13 +385,44 @@ public async Task ExecuteAsync(
logger,
$"AggregateRecordsTool success for entity {entityName}.");
}
+ catch (TimeoutException timeoutEx)
+ {
+ logger?.LogError(timeoutEx, "Aggregation operation timed out for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(
+ toolName,
+ "TimeoutError",
+ $"The aggregation query for entity '{entityName}' timed out. "
+ + "This is NOT a tool error. The database did not respond in time. "
+ + "This may occur with large datasets or complex aggregations. "
+ + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.",
+ logger);
+ }
+ catch (TaskCanceledException taskEx)
+ {
+ logger?.LogError(taskEx, "Aggregation task was canceled for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(
+ toolName,
+ "TimeoutError",
+ $"The aggregation query for entity '{entityName}' was canceled, likely due to a timeout. "
+ + "This is NOT a tool error. The database did not respond in time. "
+ + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.",
+ logger);
+ }
catch (OperationCanceledException)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "OperationCanceled", "The aggregate operation was canceled.", logger);
+ logger?.LogWarning("Aggregation operation was canceled for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(
+ toolName,
+ "OperationCanceled",
+ $"The aggregation query for entity '{entityName}' was canceled before completion. "
+ + "This is NOT a tool error. The operation was interrupted, possibly due to a timeout or client disconnect. "
+ + "No results were returned. You may retry the same request.",
+ logger);
}
- catch (DbException argEx)
+ catch (DbException dbEx)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", argEx.Message, logger);
+ logger?.LogError(dbEx, "Database error during aggregation for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", dbEx.Message, logger);
}
catch (ArgumentException argEx)
{
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs
index f69a26fa5d..3ef3aa4d74 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs
@@ -37,5 +37,10 @@ internal static class McpTelemetryErrorCodes
/// Operation cancelled error code.
///
public const string OPERATION_CANCELLED = "OperationCancelled";
+
+ ///
+ /// Operation timed out error code.
+ ///
+ public const string TIMEOUT = "Timeout";
}
}
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
index 2a60557f8d..eabbdc62d8 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
@@ -124,6 +124,7 @@ public static string InferOperationFromTool(IMcpTool tool, string toolName)
"delete_record" => "delete",
"describe_entities" => "describe",
"execute_entity" => "execute",
+ "aggregate_records" => "aggregate",
_ => "execute" // Fallback for any unknown built-in tools
};
}
@@ -188,6 +189,7 @@ public static string MapExceptionToErrorCode(Exception ex)
return ex switch
{
OperationCanceledException => McpTelemetryErrorCodes.OPERATION_CANCELLED,
+ TimeoutException => McpTelemetryErrorCodes.TIMEOUT,
DataApiBuilderException dabEx when dabEx.SubStatusCode == DataApiBuilderException.SubStatusCodes.AuthenticationChallenge
=> McpTelemetryErrorCodes.AUTHENTICATION_FAILED,
DataApiBuilderException dabEx when dabEx.SubStatusCode == DataApiBuilderException.SubStatusCodes.AuthorizationCheckFailed
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index dce07fff80..9255a2e9c5 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -618,6 +618,128 @@ public void ApplyPagination_MultiplePages_TraversesAllResults()
#endregion
+ #region Timeout and Cancellation Tests
+
+ ///
+ /// Verifies that OperationCanceledException produces a model-explicit error
+ /// that clearly states the operation was canceled, not errored.
+ ///
+ [TestMethod]
+ public async Task AggregateRecords_OperationCanceled_ReturnsExplicitCanceledMessage()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ // Create a pre-canceled token
+ CancellationTokenSource cts = new();
+ cts.Cancel();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, cts.Token);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
+ string errorType = error.GetProperty("type").GetString();
+ string errorMessage = error.GetProperty("message").GetString();
+
+ // Verify the error type identifies it as a cancellation
+ Assert.AreEqual("OperationCanceled", errorType);
+
+ // Verify the message explicitly tells the model this is NOT a tool error
+ Assert.IsTrue(errorMessage.Contains("NOT a tool error"), "Message must explicitly state this is NOT a tool error.");
+
+ // Verify the message tells the model what happened
+ Assert.IsTrue(errorMessage.Contains("canceled"), "Message must mention the operation was canceled.");
+
+ // Verify the message tells the model it can retry
+ Assert.IsTrue(errorMessage.Contains("retry"), "Message must tell the model it can retry.");
+ }
+
+ ///
+ /// Verifies that the timeout error message provides explicit guidance to the model
+ /// about what happened and what to do next.
+ ///
+ [TestMethod]
+ public void TimeoutErrorMessage_ContainsModelGuidance()
+ {
+ // Simulate what the tool builds for a TimeoutException response
+ string entityName = "Product";
+ string expectedMessage = $"The aggregation query for entity '{entityName}' timed out. "
+ + "This is NOT a tool error. The database did not respond in time. "
+ + "This may occur with large datasets or complex aggregations. "
+ + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+
+ // Verify message explicitly states it's NOT a tool error
+ Assert.IsTrue(expectedMessage.Contains("NOT a tool error"), "Timeout message must state this is NOT a tool error.");
+
+ // Verify message explains the cause
+ Assert.IsTrue(expectedMessage.Contains("database did not respond"), "Timeout message must explain the database didn't respond.");
+
+ // Verify message mentions large datasets
+ Assert.IsTrue(expectedMessage.Contains("large datasets"), "Timeout message must mention large datasets as a possible cause.");
+
+ // Verify message provides actionable remediation steps
+ Assert.IsTrue(expectedMessage.Contains("filter"), "Timeout message must suggest using a filter.");
+ Assert.IsTrue(expectedMessage.Contains("groupby"), "Timeout message must suggest reducing groupby fields.");
+ Assert.IsTrue(expectedMessage.Contains("first"), "Timeout message must suggest using pagination with first.");
+ }
+
+ ///
+ /// Verifies that TaskCanceledException (which typically signals HTTP/DB timeout)
+ /// produces a TimeoutError, not a cancellation error.
+ ///
+ [TestMethod]
+ public void TaskCanceledErrorMessage_ContainsTimeoutGuidance()
+ {
+ // Simulate what the tool builds for a TaskCanceledException response
+ string entityName = "Product";
+ string expectedMessage = $"The aggregation query for entity '{entityName}' was canceled, likely due to a timeout. "
+ + "This is NOT a tool error. The database did not respond in time. "
+ + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+
+ // TaskCanceledException should produce a TimeoutError, not OperationCanceled
+ Assert.IsTrue(expectedMessage.Contains("NOT a tool error"), "TaskCanceled message must state this is NOT a tool error.");
+ Assert.IsTrue(expectedMessage.Contains("timeout"), "TaskCanceled message must reference timeout as the cause.");
+ Assert.IsTrue(expectedMessage.Contains("filter"), "TaskCanceled message must suggest filter as remediation.");
+ Assert.IsTrue(expectedMessage.Contains("first"), "TaskCanceled message must suggest first for pagination.");
+ }
+
+ ///
+ /// Verifies that the OperationCanceled error message for a specific entity
+ /// includes the entity name so the model knows which aggregation failed.
+ ///
+ [TestMethod]
+ public void CanceledErrorMessage_IncludesEntityName()
+ {
+ string entityName = "LargeProductCatalog";
+ string expectedMessage = $"The aggregation query for entity '{entityName}' was canceled before completion. "
+ + "This is NOT a tool error. The operation was interrupted, possibly due to a timeout or client disconnect. "
+ + "No results were returned. You may retry the same request.";
+
+ Assert.IsTrue(expectedMessage.Contains(entityName), "Canceled message must include the entity name.");
+ Assert.IsTrue(expectedMessage.Contains("No results were returned"), "Canceled message must state no results were returned.");
+ }
+
+ ///
+ /// Verifies that the timeout error message for a specific entity
+ /// includes the entity name so the model knows which aggregation timed out.
+ ///
+ [TestMethod]
+ public void TimeoutErrorMessage_IncludesEntityName()
+ {
+ string entityName = "HugeTransactionLog";
+ string expectedMessage = $"The aggregation query for entity '{entityName}' timed out. "
+ + "This is NOT a tool error. The database did not respond in time. "
+ + "This may occur with large datasets or complex aggregations. "
+ + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+
+ Assert.IsTrue(expectedMessage.Contains(entityName), "Timeout message must include the entity name.");
+ }
+
+ #endregion
+
#region Spec Example Tests
///
From 829a630ad8b9d6e427996c3c33d87ecb3d4a8653 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 17:56:23 +0000
Subject: [PATCH 06/43] Changes before error encountered
Co-authored-by: JerryNixon <210500244+JerryNixon@users.noreply.github.com>
---
schemas/dab.draft.schema.json | 6 ++++
src/Cli/Commands/ConfigureOptions.cs | 5 +++
src/Cli/ConfigGenerator.cs | 12 ++++++-
src/Config/ObjectModel/McpRuntimeOptions.cs | 31 ++++++++++++++++++-
.../Configurations/RuntimeConfigValidator.cs | 10 ++++++
5 files changed, 62 insertions(+), 2 deletions(-)
diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json
index ec1afc063a..94df7ca77c 100644
--- a/schemas/dab.draft.schema.json
+++ b/schemas/dab.draft.schema.json
@@ -275,6 +275,12 @@
"description": "Allow enabling/disabling MCP requests for all entities.",
"default": true
},
+ "query-timeout": {
+ "type": "integer",
+ "description": "Query timeout in seconds for MCP tool operations. Applies to all MCP tools that execute database queries.",
+ "default": 10,
+ "minimum": 1
+ },
"dml-tools": {
"oneOf": [
{
diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs
index ecd5ecd185..fc0ab7b8e5 100644
--- a/src/Cli/Commands/ConfigureOptions.cs
+++ b/src/Cli/Commands/ConfigureOptions.cs
@@ -42,6 +42,7 @@ public ConfigureOptions(
bool? runtimeMcpEnabled = null,
string? runtimeMcpPath = null,
string? runtimeMcpDescription = null,
+ int? runtimeMcpQueryTimeout = null,
bool? runtimeMcpDmlToolsEnabled = null,
bool? runtimeMcpDmlToolsDescribeEntitiesEnabled = null,
bool? runtimeMcpDmlToolsCreateRecordEnabled = null,
@@ -102,6 +103,7 @@ public ConfigureOptions(
RuntimeMcpEnabled = runtimeMcpEnabled;
RuntimeMcpPath = runtimeMcpPath;
RuntimeMcpDescription = runtimeMcpDescription;
+ RuntimeMcpQueryTimeout = runtimeMcpQueryTimeout;
RuntimeMcpDmlToolsEnabled = runtimeMcpDmlToolsEnabled;
RuntimeMcpDmlToolsDescribeEntitiesEnabled = runtimeMcpDmlToolsDescribeEntitiesEnabled;
RuntimeMcpDmlToolsCreateRecordEnabled = runtimeMcpDmlToolsCreateRecordEnabled;
@@ -203,6 +205,9 @@ public ConfigureOptions(
[Option("runtime.mcp.description", Required = false, HelpText = "Set the MCP server description to be exposed in the initialize response.")]
public string? RuntimeMcpDescription { get; }
+ [Option("runtime.mcp.query-timeout", Required = false, HelpText = "Set the query timeout in seconds for MCP tool operations. Default: 10 (integer). Must be >= 1.")]
+ public int? RuntimeMcpQueryTimeout { get; }
+
[Option("runtime.mcp.dml-tools.enabled", Required = false, HelpText = "Enable DAB's MCP DML tools endpoint. Default: true (boolean).")]
public bool? RuntimeMcpDmlToolsEnabled { get; }
diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs
index 2eaf50a822..fa632f10ac 100644
--- a/src/Cli/ConfigGenerator.cs
+++ b/src/Cli/ConfigGenerator.cs
@@ -876,13 +876,15 @@ private static bool TryUpdateConfiguredRuntimeOptions(
if (options.RuntimeMcpEnabled != null ||
options.RuntimeMcpPath != null ||
options.RuntimeMcpDescription != null ||
+ options.RuntimeMcpQueryTimeout != null ||
options.RuntimeMcpDmlToolsEnabled != null ||
options.RuntimeMcpDmlToolsDescribeEntitiesEnabled != null ||
options.RuntimeMcpDmlToolsCreateRecordEnabled != null ||
options.RuntimeMcpDmlToolsReadRecordsEnabled != null ||
options.RuntimeMcpDmlToolsUpdateRecordEnabled != null ||
options.RuntimeMcpDmlToolsDeleteRecordEnabled != null ||
- options.RuntimeMcpDmlToolsExecuteEntityEnabled != null)
+ options.RuntimeMcpDmlToolsExecuteEntityEnabled != null ||
+ options.RuntimeMcpDmlToolsAggregateRecordsEnabled != null)
{
McpRuntimeOptions updatedMcpOptions = runtimeConfig?.Runtime?.Mcp ?? new();
bool status = TryUpdateConfiguredMcpValues(options, ref updatedMcpOptions);
@@ -1161,6 +1163,14 @@ private static bool TryUpdateConfiguredMcpValues(
_logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Description as '{updatedValue}'", updatedValue);
}
+ // Runtime.Mcp.QueryTimeout
+ updatedValue = options?.RuntimeMcpQueryTimeout;
+ if (updatedValue != null)
+ {
+ updatedMcpOptions = updatedMcpOptions! with { QueryTimeout = (int)updatedValue };
+ _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.QueryTimeout as '{updatedValue}'", updatedValue);
+ }
+
// Handle DML tools configuration
bool hasToolUpdates = false;
DmlToolsConfig? currentDmlTools = updatedMcpOptions?.DmlTools;
diff --git a/src/Config/ObjectModel/McpRuntimeOptions.cs b/src/Config/ObjectModel/McpRuntimeOptions.cs
index e17d53fc8f..324e0caa55 100644
--- a/src/Config/ObjectModel/McpRuntimeOptions.cs
+++ b/src/Config/ObjectModel/McpRuntimeOptions.cs
@@ -10,6 +10,7 @@ namespace Azure.DataApiBuilder.Config.ObjectModel;
public record McpRuntimeOptions
{
public const string DEFAULT_PATH = "/mcp";
+ public const int DEFAULT_QUERY_TIMEOUT_SECONDS = 10;
///
/// Whether MCP endpoints are enabled
@@ -36,12 +37,21 @@ public record McpRuntimeOptions
[JsonPropertyName("description")]
public string? Description { get; init; }
+ ///
+ /// Query timeout in seconds for MCP tool operations.
+ /// This timeout is applied to database queries executed by MCP tools.
+ /// Default: 10 seconds.
+ ///
+ [JsonPropertyName("query-timeout")]
+ public int? QueryTimeout { get; init; }
+
[JsonConstructor]
public McpRuntimeOptions(
bool? Enabled = null,
string? Path = null,
DmlToolsConfig? DmlTools = null,
- string? Description = null)
+ string? Description = null,
+ int? QueryTimeout = null)
{
this.Enabled = Enabled ?? true;
@@ -67,6 +77,12 @@ public McpRuntimeOptions(
}
this.Description = Description;
+
+ if (QueryTimeout is not null)
+ {
+ this.QueryTimeout = QueryTimeout;
+ UserProvidedQueryTimeout = true;
+ }
}
///
@@ -78,4 +94,17 @@ public McpRuntimeOptions(
[JsonIgnore(Condition = JsonIgnoreCondition.Always)]
[MemberNotNullWhen(true, nameof(Enabled))]
public bool UserProvidedPath { get; init; } = false;
+
+ ///
+ /// Flag which informs CLI and JSON serializer whether to write query-timeout
+ /// property and value to the runtime config file.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.Always)]
+ public bool UserProvidedQueryTimeout { get; init; } = false;
+
+ ///
+ /// Gets the effective query timeout in seconds, using the default if not specified.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.Always)]
+ public int EffectiveQueryTimeoutSeconds => QueryTimeout ?? DEFAULT_QUERY_TIMEOUT_SECONDS;
}
diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs
index f5112844da..ea2299bc6f 100644
--- a/src/Core/Configurations/RuntimeConfigValidator.cs
+++ b/src/Core/Configurations/RuntimeConfigValidator.cs
@@ -914,6 +914,16 @@ public void ValidateMcpUri(RuntimeConfig runtimeConfig)
statusCode: HttpStatusCode.ServiceUnavailable,
subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError));
}
+
+ // Validate query-timeout if provided
+ if (runtimeConfig.Runtime.Mcp.QueryTimeout is not null && runtimeConfig.Runtime.Mcp.QueryTimeout < 1)
+ {
+ HandleOrRecordException(new DataApiBuilderException(
+ message: "MCP query-timeout must be a positive integer (>= 1 second). " +
+ $"Provided value: {runtimeConfig.Runtime.Mcp.QueryTimeout}.",
+ statusCode: HttpStatusCode.ServiceUnavailable,
+ subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError));
+ }
}
private void ValidateAuthenticationOptions(RuntimeConfig runtimeConfig)
From 3ccc7482951ef006eecfb390a1d2ab2aec159e55 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 18:20:20 +0000
Subject: [PATCH 07/43] Update query-timeout default to 30s, add converter
support, apply timeout to all MCP tools, add tests
Co-authored-by: JerryNixon <1749983+JerryNixon@users.noreply.github.com>
---
schemas/dab.draft.schema.json | 4 +-
.../BuiltInTools/AggregateRecordsTool.cs | 4 +-
.../Utils/McpTelemetryHelper.cs | 29 +-
src/Cli/Commands/ConfigureOptions.cs | 2 +-
.../McpRuntimeOptionsConverterFactory.cs | 17 +-
src/Config/ObjectModel/McpRuntimeOptions.cs | 9 +-
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 452 ++++++++++++++++++
7 files changed, 505 insertions(+), 12 deletions(-)
create mode 100644 src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json
index 94df7ca77c..e78861807d 100644
--- a/schemas/dab.draft.schema.json
+++ b/schemas/dab.draft.schema.json
@@ -277,8 +277,8 @@
},
"query-timeout": {
"type": "integer",
- "description": "Query timeout in seconds for MCP tool operations. Applies to all MCP tools that execute database queries.",
- "default": 10,
+ "description": "Execution timeout in seconds for MCP tool operations. Applies to all MCP tools.",
+ "default": 30,
"minimum": 1
},
"dml-tools": {
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 59bd465ad0..fa75cd2fb9 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -35,7 +35,7 @@ public class AggregateRecordsTool : IMcpTool
{
public ToolType ToolType { get; } = ToolType.BuiltIn;
- private static readonly HashSet ValidFunctions = new(StringComparer.OrdinalIgnoreCase) { "count", "avg", "sum", "min", "max" };
+ private static readonly HashSet _validFunctions = new(StringComparer.OrdinalIgnoreCase) { "count", "avg", "sum", "min", "max" };
public Tool GetToolMetadata()
{
@@ -183,7 +183,7 @@ public async Task ExecuteAsync(
}
string function = funcEl.GetString()!.ToLowerInvariant();
- if (!ValidFunctions.Contains(function))
+ if (!_validFunctions.Contains(function))
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", $"Invalid function '{function}'. Must be one of: count, avg, sum, min, max.", logger);
}
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
index eabbdc62d8..105bb57ced 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
@@ -60,8 +60,33 @@ public static async Task ExecuteWithTelemetryAsync(
operation: operation,
dbProcedure: dbProcedure);
- // Execute the tool
- CallToolResult result = await tool.ExecuteAsync(arguments, serviceProvider, cancellationToken);
+ // Read query-timeout from current config per invocation (hot-reload safe).
+ int timeoutSeconds = McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS;
+ RuntimeConfigProvider? runtimeConfigProvider = serviceProvider.GetService();
+ if (runtimeConfigProvider is not null)
+ {
+ RuntimeConfig config = runtimeConfigProvider.GetConfig();
+ timeoutSeconds = config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds ?? McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS;
+ }
+
+ // Wrap tool execution with the configured timeout using a linked CancellationTokenSource.
+ using CancellationTokenSource timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
+ timeoutCts.CancelAfter(TimeSpan.FromSeconds(timeoutSeconds));
+
+ CallToolResult result;
+ try
+ {
+ result = await tool.ExecuteAsync(arguments, serviceProvider, timeoutCts.Token);
+ }
+ catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested)
+ {
+ // The timeout CTS fired, not the caller's token. Surface as TimeoutException
+ // so downstream telemetry and tool handlers see TIMEOUT, not cancellation.
+ throw new TimeoutException(
+ $"The MCP tool '{toolName}' did not complete within {timeoutSeconds} seconds. "
+ + "This is NOT a tool error. The operation exceeded the configured query-timeout. "
+ + "Try narrowing results with a filter, reducing groupby fields, or using pagination.");
+ }
// Check if the tool returned an error result (tools catch exceptions internally
// and return CallToolResult with IsError=true instead of throwing)
diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs
index fc0ab7b8e5..bf12cd5199 100644
--- a/src/Cli/Commands/ConfigureOptions.cs
+++ b/src/Cli/Commands/ConfigureOptions.cs
@@ -205,7 +205,7 @@ public ConfigureOptions(
[Option("runtime.mcp.description", Required = false, HelpText = "Set the MCP server description to be exposed in the initialize response.")]
public string? RuntimeMcpDescription { get; }
- [Option("runtime.mcp.query-timeout", Required = false, HelpText = "Set the query timeout in seconds for MCP tool operations. Default: 10 (integer). Must be >= 1.")]
+ [Option("runtime.mcp.query-timeout", Required = false, HelpText = "Set the execution timeout in seconds for MCP tool operations. Applies to all MCP tools. Default: 30 (integer). Must be >= 1.")]
public int? RuntimeMcpQueryTimeout { get; }
[Option("runtime.mcp.dml-tools.enabled", Required = false, HelpText = "Enable DAB's MCP DML tools endpoint. Default: true (boolean).")]
diff --git a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
index 8b3c640725..ad4edc229e 100644
--- a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
+++ b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
@@ -66,12 +66,13 @@ internal McpRuntimeOptionsConverter(DeserializationVariableReplacementSettings?
string? path = null;
DmlToolsConfig? dmlTools = null;
string? description = null;
+ int? queryTimeout = null;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
{
- return new McpRuntimeOptions(enabled, path, dmlTools, description);
+ return new McpRuntimeOptions(enabled, path, dmlTools, description, queryTimeout);
}
string? propertyName = reader.GetString();
@@ -107,6 +108,14 @@ internal McpRuntimeOptionsConverter(DeserializationVariableReplacementSettings?
break;
+ case "query-timeout":
+ if (reader.TokenType is not JsonTokenType.Null)
+ {
+ queryTimeout = reader.GetInt32();
+ }
+
+ break;
+
default:
throw new JsonException($"Unexpected property {propertyName}");
}
@@ -150,6 +159,12 @@ public override void Write(Utf8JsonWriter writer, McpRuntimeOptions value, JsonS
JsonSerializer.Serialize(writer, value.Description, options);
}
+ // Write query-timeout if it's user provided
+ if (value?.UserProvidedQueryTimeout is true && value.QueryTimeout.HasValue)
+ {
+ writer.WriteNumber("query-timeout", value.QueryTimeout.Value);
+ }
+
writer.WriteEndObject();
}
}
diff --git a/src/Config/ObjectModel/McpRuntimeOptions.cs b/src/Config/ObjectModel/McpRuntimeOptions.cs
index 324e0caa55..f4b4281a14 100644
--- a/src/Config/ObjectModel/McpRuntimeOptions.cs
+++ b/src/Config/ObjectModel/McpRuntimeOptions.cs
@@ -10,7 +10,7 @@ namespace Azure.DataApiBuilder.Config.ObjectModel;
public record McpRuntimeOptions
{
public const string DEFAULT_PATH = "/mcp";
- public const int DEFAULT_QUERY_TIMEOUT_SECONDS = 10;
+ public const int DEFAULT_QUERY_TIMEOUT_SECONDS = 30;
///
/// Whether MCP endpoints are enabled
@@ -38,9 +38,10 @@ public record McpRuntimeOptions
public string? Description { get; init; }
///
- /// Query timeout in seconds for MCP tool operations.
- /// This timeout is applied to database queries executed by MCP tools.
- /// Default: 10 seconds.
+ /// Execution timeout in seconds for MCP tool operations.
+ /// This timeout wraps the entire tool execution including database queries.
+ /// It applies to all MCP tools, not just aggregation.
+ /// Default: 30 seconds.
///
[JsonPropertyName("query-timeout")]
public int? QueryTimeout { get; init; }
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
new file mode 100644
index 0000000000..f5b29f2b8a
--- /dev/null
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -0,0 +1,452 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+#nullable enable
+
+using System;
+using System.Collections.Generic;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.DataApiBuilder.Config;
+using Azure.DataApiBuilder.Config.ObjectModel;
+using Azure.DataApiBuilder.Core.Configurations;
+using Azure.DataApiBuilder.Mcp.Model;
+using Azure.DataApiBuilder.Mcp.Utils;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using ModelContextProtocol.Protocol;
+using static Azure.DataApiBuilder.Mcp.Model.McpEnums;
+
+namespace Azure.DataApiBuilder.Service.Tests.Mcp
+{
+ ///
+ /// Tests for the MCP query-timeout configuration property.
+ /// Verifies:
+ /// - Default value of 30 seconds when not configured
+ /// - Custom value overrides default
+ /// - Timeout wrapping applies to all MCP tools via ExecuteWithTelemetryAsync
+ /// - Hot reload: changing config value updates behavior without restart
+ /// - Timeout surfaces as TimeoutException, not generic cancellation
+ /// - Telemetry maps timeout to TIMEOUT error code
+ ///
+ [TestClass]
+ public class McpQueryTimeoutTests
+ {
+ #region Default Value Tests
+
+ [TestMethod]
+ public void McpRuntimeOptions_DefaultQueryTimeout_Is30Seconds()
+ {
+ Assert.AreEqual(30, McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS);
+ }
+
+ [TestMethod]
+ public void McpRuntimeOptions_EffectiveTimeout_ReturnsDefault_WhenNotConfigured()
+ {
+ McpRuntimeOptions options = new();
+ Assert.IsNull(options.QueryTimeout);
+ Assert.AreEqual(30, options.EffectiveQueryTimeoutSeconds);
+ }
+
+ [TestMethod]
+ public void McpRuntimeOptions_EffectiveTimeout_ReturnsConfiguredValue()
+ {
+ McpRuntimeOptions options = new(QueryTimeout: 60);
+ Assert.AreEqual(60, options.QueryTimeout);
+ Assert.AreEqual(60, options.EffectiveQueryTimeoutSeconds);
+ }
+
+ [TestMethod]
+ public void McpRuntimeOptions_UserProvidedQueryTimeout_FalseByDefault()
+ {
+ McpRuntimeOptions options = new();
+ Assert.IsFalse(options.UserProvidedQueryTimeout);
+ }
+
+ [TestMethod]
+ public void McpRuntimeOptions_UserProvidedQueryTimeout_TrueWhenSet()
+ {
+ McpRuntimeOptions options = new(QueryTimeout: 45);
+ Assert.IsTrue(options.UserProvidedQueryTimeout);
+ }
+
+ #endregion
+
+ #region Custom Value Tests
+
+ [TestMethod]
+ public void McpRuntimeOptions_CustomTimeout_1Second()
+ {
+ McpRuntimeOptions options = new(QueryTimeout: 1);
+ Assert.AreEqual(1, options.EffectiveQueryTimeoutSeconds);
+ }
+
+ [TestMethod]
+ public void McpRuntimeOptions_CustomTimeout_120Seconds()
+ {
+ McpRuntimeOptions options = new(QueryTimeout: 120);
+ Assert.AreEqual(120, options.EffectiveQueryTimeoutSeconds);
+ }
+
+ [TestMethod]
+ public void RuntimeConfig_McpQueryTimeout_ExposedInConfig()
+ {
+ RuntimeConfig config = CreateConfigWithQueryTimeout(45);
+ Assert.AreEqual(45, config.Runtime?.Mcp?.QueryTimeout);
+ Assert.AreEqual(45, config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds);
+ }
+
+ [TestMethod]
+ public void RuntimeConfig_McpQueryTimeout_DefaultWhenNotSet()
+ {
+ RuntimeConfig config = CreateConfigWithoutQueryTimeout();
+ Assert.IsNull(config.Runtime?.Mcp?.QueryTimeout);
+ Assert.AreEqual(30, config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds);
+ }
+
+ #endregion
+
+ #region Timeout Wrapping Tests
+
+ [TestMethod]
+ public async Task ExecuteWithTelemetry_CompletesSuccessfully_WithinTimeout()
+ {
+ // A tool that completes immediately should succeed
+ RuntimeConfig config = CreateConfigWithQueryTimeout(30);
+ IServiceProvider sp = CreateServiceProviderWithConfig(config);
+ IMcpTool tool = new ImmediateCompletionTool();
+
+ CallToolResult result = await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ tool, "test_tool", null, sp, CancellationToken.None);
+
+ // Tool should complete without throwing TimeoutException
+ Assert.IsNotNull(result);
+ Assert.IsTrue(result.IsError != true, "Tool result should not be an error");
+ }
+
+ [TestMethod]
+ public async Task ExecuteWithTelemetry_ThrowsTimeoutException_WhenToolExceedsTimeout()
+ {
+ // Configure a very short timeout (1 second) and a tool that takes longer
+ RuntimeConfig config = CreateConfigWithQueryTimeout(1);
+ IServiceProvider sp = CreateServiceProviderWithConfig(config);
+ IMcpTool tool = new SlowTool(delaySeconds: 30);
+
+ await Assert.ThrowsExceptionAsync(async () =>
+ {
+ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ tool, "slow_tool", null, sp, CancellationToken.None);
+ });
+ }
+
+ [TestMethod]
+ public async Task ExecuteWithTelemetry_TimeoutMessage_ContainsToolName()
+ {
+ RuntimeConfig config = CreateConfigWithQueryTimeout(1);
+ IServiceProvider sp = CreateServiceProviderWithConfig(config);
+ IMcpTool tool = new SlowTool(delaySeconds: 30);
+
+ try
+ {
+ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ tool, "aggregate_records", null, sp, CancellationToken.None);
+ Assert.Fail("Expected TimeoutException");
+ }
+ catch (TimeoutException ex)
+ {
+ Assert.IsTrue(ex.Message.Contains("aggregate_records"), "Message should contain tool name");
+ Assert.IsTrue(ex.Message.Contains("1 seconds"), "Message should contain timeout value");
+ Assert.IsTrue(ex.Message.Contains("NOT a tool error"), "Message should clarify it is not a tool error");
+ }
+ }
+
+ [TestMethod]
+ public async Task ExecuteWithTelemetry_ClientCancellation_PropagatesAsCancellation()
+ {
+ // Client cancellation (not timeout) should propagate as OperationCanceledException
+ // rather than being converted to TimeoutException.
+ RuntimeConfig config = CreateConfigWithQueryTimeout(30);
+ IServiceProvider sp = CreateServiceProviderWithConfig(config);
+ IMcpTool tool = new SlowTool(delaySeconds: 30);
+
+ using CancellationTokenSource cts = new();
+ cts.Cancel(); // Cancel immediately
+
+ try
+ {
+ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ tool, "test_tool", null, sp, cts.Token);
+ Assert.Fail("Expected OperationCanceledException or subclass to be thrown");
+ }
+ catch (TimeoutException)
+ {
+ Assert.Fail("Client cancellation should NOT be converted to TimeoutException");
+ }
+ catch (OperationCanceledException)
+ {
+ // Expected: client-initiated cancellation propagates as OperationCanceledException
+ // (or subclass TaskCanceledException)
+ }
+ }
+
+ [TestMethod]
+ public async Task ExecuteWithTelemetry_AppliesTimeout_ToAllToolTypes()
+ {
+ // Verify timeout applies to both built-in and custom tool types
+ RuntimeConfig config = CreateConfigWithQueryTimeout(1);
+ IServiceProvider sp = CreateServiceProviderWithConfig(config);
+
+ // Test with built-in tool type
+ IMcpTool builtInTool = new SlowTool(delaySeconds: 30, toolType: ToolType.BuiltIn);
+ await Assert.ThrowsExceptionAsync(async () =>
+ {
+ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ builtInTool, "builtin_slow", null, sp, CancellationToken.None);
+ });
+
+ // Test with custom tool type
+ IMcpTool customTool = new SlowTool(delaySeconds: 30, toolType: ToolType.Custom);
+ await Assert.ThrowsExceptionAsync(async () =>
+ {
+ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ customTool, "custom_slow", null, sp, CancellationToken.None);
+ });
+ }
+
+ #endregion
+
+ #region Hot Reload Tests
+
+ [TestMethod]
+ public async Task ExecuteWithTelemetry_ReadsConfigPerInvocation_HotReload()
+ {
+ // First invocation with long timeout should succeed
+ RuntimeConfig config1 = CreateConfigWithQueryTimeout(30);
+ IServiceProvider sp1 = CreateServiceProviderWithConfig(config1);
+
+ IMcpTool fastTool = new ImmediateCompletionTool();
+ CallToolResult result1 = await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ fastTool, "test_tool", null, sp1, CancellationToken.None);
+ Assert.IsNotNull(result1);
+
+ // Second invocation with very short timeout and a slow tool should timeout.
+ // This demonstrates that each invocation reads the current config value.
+ RuntimeConfig config2 = CreateConfigWithQueryTimeout(1);
+ IServiceProvider sp2 = CreateServiceProviderWithConfig(config2);
+
+ IMcpTool slowTool = new SlowTool(delaySeconds: 30);
+ await Assert.ThrowsExceptionAsync(async () =>
+ {
+ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ slowTool, "test_tool", null, sp2, CancellationToken.None);
+ });
+ }
+
+ #endregion
+
+ #region Telemetry Tests
+
+ [TestMethod]
+ public void MapExceptionToErrorCode_TimeoutException_ReturnsTIMEOUT()
+ {
+ string errorCode = McpTelemetryHelper.MapExceptionToErrorCode(new TimeoutException());
+ Assert.AreEqual(McpTelemetryErrorCodes.TIMEOUT, errorCode);
+ }
+
+ [TestMethod]
+ public void MapExceptionToErrorCode_OperationCanceled_ReturnsOPERATION_CANCELLED()
+ {
+ string errorCode = McpTelemetryHelper.MapExceptionToErrorCode(new OperationCanceledException());
+ Assert.AreEqual(McpTelemetryErrorCodes.OPERATION_CANCELLED, errorCode);
+ }
+
+ [TestMethod]
+ public void MapExceptionToErrorCode_TaskCanceled_ReturnsOPERATION_CANCELLED()
+ {
+ string errorCode = McpTelemetryHelper.MapExceptionToErrorCode(new TaskCanceledException());
+ Assert.AreEqual(McpTelemetryErrorCodes.OPERATION_CANCELLED, errorCode);
+ }
+
+ #endregion
+
+ #region JSON Serialization Tests
+
+ [TestMethod]
+ public void McpRuntimeOptions_Serialization_IncludesQueryTimeout_WhenUserProvided()
+ {
+ McpRuntimeOptions options = new(QueryTimeout: 45);
+ JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
+ string json = JsonSerializer.Serialize(options, serializerOptions);
+ Assert.IsTrue(json.Contains("\"query-timeout\": 45") || json.Contains("\"query-timeout\":45"));
+ }
+
+ [TestMethod]
+ public void McpRuntimeOptions_Deserialization_ReadsQueryTimeout()
+ {
+ string json = @"{""enabled"": true, ""query-timeout"": 60}";
+ JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
+ McpRuntimeOptions? options = JsonSerializer.Deserialize(json, serializerOptions);
+ Assert.IsNotNull(options);
+ Assert.AreEqual(60, options.QueryTimeout);
+ Assert.AreEqual(60, options.EffectiveQueryTimeoutSeconds);
+ }
+
+ [TestMethod]
+ public void McpRuntimeOptions_Deserialization_DefaultsWhenOmitted()
+ {
+ string json = @"{""enabled"": true}";
+ JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
+ McpRuntimeOptions? options = JsonSerializer.Deserialize(json, serializerOptions);
+ Assert.IsNotNull(options);
+ Assert.IsNull(options.QueryTimeout);
+ Assert.AreEqual(30, options.EffectiveQueryTimeoutSeconds);
+ }
+
+ #endregion
+
+ #region Helpers
+
+ private static RuntimeConfig CreateConfigWithQueryTimeout(int timeoutSeconds)
+ {
+ return new RuntimeConfig(
+ Schema: "test-schema",
+ DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
+ Runtime: new(
+ Rest: new(),
+ GraphQL: new(),
+ Mcp: new(
+ Enabled: true,
+ Path: "/mcp",
+ QueryTimeout: timeoutSeconds,
+ DmlTools: new(
+ describeEntities: true,
+ readRecords: true,
+ createRecord: true,
+ updateRecord: true,
+ deleteRecord: true,
+ executeEntity: true,
+ aggregateRecords: true
+ )
+ ),
+ Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
+ ),
+ Entities: new(new Dictionary())
+ );
+ }
+
+ private static RuntimeConfig CreateConfigWithoutQueryTimeout()
+ {
+ return new RuntimeConfig(
+ Schema: "test-schema",
+ DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
+ Runtime: new(
+ Rest: new(),
+ GraphQL: new(),
+ Mcp: new(
+ Enabled: true,
+ Path: "/mcp",
+ DmlTools: new(
+ describeEntities: true,
+ readRecords: true,
+ createRecord: true,
+ updateRecord: true,
+ deleteRecord: true,
+ executeEntity: true,
+ aggregateRecords: true
+ )
+ ),
+ Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
+ ),
+ Entities: new(new Dictionary())
+ );
+ }
+
+ private static IServiceProvider CreateServiceProviderWithConfig(RuntimeConfig config)
+ {
+ ServiceCollection services = new();
+ RuntimeConfigProvider configProvider = TestHelper.GenerateInMemoryRuntimeConfigProvider(config);
+ services.AddSingleton(configProvider);
+ services.AddLogging();
+ return services.BuildServiceProvider();
+ }
+
+ ///
+ /// A mock tool that completes immediately with a success result.
+ ///
+ private class ImmediateCompletionTool : IMcpTool
+ {
+ public ToolType ToolType { get; } = ToolType.BuiltIn;
+
+ public Tool GetToolMetadata()
+ {
+ using JsonDocument doc = JsonDocument.Parse("{\"type\": \"object\"}");
+ return new Tool
+ {
+ Name = "test_tool",
+ Description = "A test tool that completes immediately",
+ InputSchema = doc.RootElement.Clone()
+ };
+ }
+
+ public Task ExecuteAsync(
+ JsonDocument? arguments,
+ IServiceProvider serviceProvider,
+ CancellationToken cancellationToken = default)
+ {
+ return Task.FromResult(new CallToolResult
+ {
+ Content = new List
+ {
+ new TextContentBlock { Text = "{\"result\": \"success\"}" }
+ }
+ });
+ }
+ }
+
+ ///
+ /// A mock tool that delays for a specified duration, respecting cancellation.
+ /// Used to test timeout behavior.
+ ///
+ private class SlowTool : IMcpTool
+ {
+ private readonly int _delaySeconds;
+
+ public SlowTool(int delaySeconds, ToolType toolType = ToolType.BuiltIn)
+ {
+ _delaySeconds = delaySeconds;
+ ToolType = toolType;
+ }
+
+ public ToolType ToolType { get; }
+
+ public Tool GetToolMetadata()
+ {
+ using JsonDocument doc = JsonDocument.Parse("{\"type\": \"object\"}");
+ return new Tool
+ {
+ Name = "slow_tool",
+ Description = "A test tool that takes a long time",
+ InputSchema = doc.RootElement.Clone()
+ };
+ }
+
+ public async Task ExecuteAsync(
+ JsonDocument? arguments,
+ IServiceProvider serviceProvider,
+ CancellationToken cancellationToken = default)
+ {
+ await Task.Delay(TimeSpan.FromSeconds(_delaySeconds), cancellationToken);
+ return new CallToolResult
+ {
+ Content = new List
+ {
+ new TextContentBlock { Text = "{\"result\": \"completed\"}" }
+ }
+ };
+ }
+ }
+
+ #endregion
+ }
+}
From 381899d84dbb8ff0827b7ebcafce947f9c230ccc Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 18:33:35 +0000
Subject: [PATCH 08/43] Fix group key collision using \\0 delimiter, add
#nullable enable to test file
Co-authored-by: JerryNixon <1749983+JerryNixon@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 4 +++-
src/Service.Tests/Mcp/AggregateRecordsToolTests.cs | 2 ++
2 files changed, 5 insertions(+), 1 deletion(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index fa75cd2fb9..f19de23eeb 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -706,7 +706,9 @@ private static string BuildGroupKey(JsonElement record, List groupby)
}
}
- return string.Join("|", parts);
+ // Use null character (\0) as delimiter to avoid collisions with
+ // field values that may contain printable characters like '|'.
+ return string.Join("\0", parts);
}
private static Dictionary ExtractGroupFields(JsonElement record, List groupby)
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 9255a2e9c5..4a23a8c1fc 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -1,6 +1,8 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
+#nullable enable
+
using System;
using System.Collections.Generic;
using System.Text.Json;
From fde4d65a84da58b2c2aa009afe72dfbb3c71afa1 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 19:53:42 +0000
Subject: [PATCH 09/43] Fix nullable warnings in AggregateRecordsToolTests.cs
Co-authored-by: anushakolan <45540936+anushakolan@users.noreply.github.com>
---
src/Service.Tests/Mcp/AggregateRecordsToolTests.cs | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 4a23a8c1fc..67477a9d2f 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -643,14 +643,15 @@ public async Task AggregateRecords_OperationCanceled_ReturnsExplicitCanceledMess
Assert.IsTrue(result.IsError == true);
JsonElement content = ParseContent(result);
Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
- string errorType = error.GetProperty("type").GetString();
- string errorMessage = error.GetProperty("message").GetString();
+ string? errorType = error.GetProperty("type").GetString();
+ string? errorMessage = error.GetProperty("message").GetString();
// Verify the error type identifies it as a cancellation
Assert.AreEqual("OperationCanceled", errorType);
// Verify the message explicitly tells the model this is NOT a tool error
- Assert.IsTrue(errorMessage.Contains("NOT a tool error"), "Message must explicitly state this is NOT a tool error.");
+ Assert.IsNotNull(errorMessage);
+ Assert.IsTrue(errorMessage!.Contains("NOT a tool error"), "Message must explicitly state this is NOT a tool error.");
// Verify the message tells the model what happened
Assert.IsTrue(errorMessage.Contains("canceled"), "Message must mention the operation was canceled.");
From ba371d52bd7dd5d9d8d7c65e7dbd648f53b40b05 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 2 Mar 2026 19:54:58 +0000
Subject: [PATCH 10/43] Add null check for errorType in
AggregateRecordsToolTests
Co-authored-by: anushakolan <45540936+anushakolan@users.noreply.github.com>
---
src/Service.Tests/Mcp/AggregateRecordsToolTests.cs | 1 +
1 file changed, 1 insertion(+)
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 67477a9d2f..ce578e746e 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -647,6 +647,7 @@ public async Task AggregateRecords_OperationCanceled_ReturnsExplicitCanceledMess
string? errorMessage = error.GetProperty("message").GetString();
// Verify the error type identifies it as a cancellation
+ Assert.IsNotNull(errorType);
Assert.AreEqual("OperationCanceled", errorType);
// Verify the message explicitly tells the model this is NOT a tool error
From d340cb43bd3b9682113cc36aaaaaad410bb5afd4 Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 13:52:27 -0700
Subject: [PATCH 11/43] Apply validation fixes and additional tests from
copilot/update-aggregate-records-tool-fixes
---
.../BuiltInTools/AggregateRecordsTool.cs | 29 +-
.../Utils/McpTelemetryHelper.cs | 2 +-
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 2 +-
.../UnitTests/AggregateRecordsToolTests.cs | 411 ++++++++++++++++++
.../UnitTests/McpTelemetryTests.cs | 168 ++++++-
5 files changed, 603 insertions(+), 9 deletions(-)
create mode 100644 src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index f19de23eeb..b8dd85c175 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -52,16 +52,15 @@ public Tool GetToolMetadata()
+ "orderby ('asc' or 'desc' to sort grouped results by aggregated value; requires groupby), "
+ "having (object to filter groups after aggregating, operators: eq, neq, gt, gte, lt, lte, in; requires groupby), "
+ "first (integer >= 1, maximum grouped results to return; requires groupby), "
- + "after (opaque cursor string from a previous response's endCursor; requires first and groupby). "
+ + "after (opaque cursor string from a previous response's endCursor for pagination). "
+ "RESPONSE: The aggregated value is aliased as '{function}_{field}' (e.g. avg_unitPrice, sum_revenue). "
+ "For count with field '*', the alias is 'count'. "
+ "When first is used with groupby, response contains: items (array), endCursor (string), hasNextPage (boolean). "
+ "RULES: 1) ALWAYS call describe_entities first to get valid entity and field names. "
+ "2) Use field '*' ONLY with function 'count'. "
+ "3) For avg, sum, min, max: field MUST be a numeric field name from describe_entities. "
- + "4) orderby, having, first, and after ONLY apply when groupby is provided. "
- + "5) after REQUIRES first to also be set. "
- + "6) Use first and after for paginating large grouped result sets.",
+ + "4) orderby, having, and first ONLY apply when groupby is provided. "
+ + "5) Use first and after for paginating large grouped result sets.",
InputSchema = JsonSerializer.Deserialize(
@"{
""type"": ""object"",
@@ -194,7 +193,25 @@ public async Task ExecuteAsync(
}
string field = fieldEl.GetString()!;
+
+ // Validate field/function compatibility
+ bool isCountStar = function == "count" && field == "*";
+
+ if (field == "*" && function != "count")
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"Field '*' is only valid with function 'count'. For function '{function}', provide a specific field name.", logger);
+ }
+
bool distinct = root.TryGetProperty("distinct", out JsonElement distinctEl) && distinctEl.GetBoolean();
+
+ // Reject count(*) with distinct as it is semantically undefined
+ if (isCountStar && distinct)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "Cannot use distinct=true with field='*'. DISTINCT requires a specific field name. Use a field name instead of '*' to count distinct values.", logger);
+ }
+
string? filter = root.TryGetProperty("filter", out JsonElement filterEl) ? filterEl.GetString() : null;
string orderby = root.TryGetProperty("orderby", out JsonElement orderbyEl) ? (orderbyEl.GetString() ?? "desc") : "desc";
@@ -285,7 +302,6 @@ public async Task ExecuteAsync(
// Build select list: groupby fields + aggregation field
List selectFields = new(groupby);
- bool isCountStar = function == "count" && field == "*";
if (!isCountStar && !selectFields.Contains(field, StringComparer.OrdinalIgnoreCase))
{
selectFields.Add(field);
@@ -610,7 +626,8 @@ internal static PaginationResult ApplyPagination(
{
if (isCountStar)
{
- return distinct ? 0 : records.Count;
+ // count(*) always counts all rows; distinct is rejected at ExecuteAsync validation level
+ return records.Count;
}
List values = new();
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
index 105bb57ced..ac567d4d8c 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
@@ -83,7 +83,7 @@ public static async Task ExecuteWithTelemetryAsync(
// The timeout CTS fired, not the caller's token. Surface as TimeoutException
// so downstream telemetry and tool handlers see TIMEOUT, not cancellation.
throw new TimeoutException(
- $"The MCP tool '{toolName}' did not complete within {timeoutSeconds} seconds. "
+ $"The MCP tool '{toolName}' did not complete within {timeoutSeconds} {(timeoutSeconds == 1 ? "second" : "seconds")}. "
+ "This is NOT a tool error. The operation exceeded the configured query-timeout. "
+ "Try narrowing results with a filter, reducing groupby fields, or using pagination.");
}
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
index f5b29f2b8a..0f5ee3951a 100644
--- a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -156,7 +156,7 @@ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
catch (TimeoutException ex)
{
Assert.IsTrue(ex.Message.Contains("aggregate_records"), "Message should contain tool name");
- Assert.IsTrue(ex.Message.Contains("1 seconds"), "Message should contain timeout value");
+ Assert.IsTrue(ex.Message.Contains("1 second"), "Message should contain timeout value");
Assert.IsTrue(ex.Message.Contains("NOT a tool error"), "Message should clarify it is not a tool error");
}
}
diff --git a/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
new file mode 100644
index 0000000000..dee8842a0d
--- /dev/null
+++ b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
@@ -0,0 +1,411 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+#nullable enable
+
+using System.Collections.Generic;
+using System.Text.Json;
+using Azure.DataApiBuilder.Mcp.BuiltInTools;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace Azure.DataApiBuilder.Service.Tests.UnitTests
+{
+ ///
+ /// Unit tests for AggregateRecordsTool's internal helper methods.
+ /// Covers validation paths, aggregation logic, and pagination behavior.
+ ///
+ [TestClass]
+ public class AggregateRecordsToolTests
+ {
+ #region ComputeAlias tests
+
+ [TestMethod]
+ [DataRow("count", "*", "count", DisplayName = "count(*) alias is 'count'")]
+ [DataRow("count", "userId", "count_userId", DisplayName = "count(field) alias is 'count_field'")]
+ [DataRow("avg", "price", "avg_price", DisplayName = "avg alias")]
+ [DataRow("sum", "amount", "sum_amount", DisplayName = "sum alias")]
+ [DataRow("min", "age", "min_age", DisplayName = "min alias")]
+ [DataRow("max", "score", "max_score", DisplayName = "max alias")]
+ public void ComputeAlias_ReturnsExpectedAlias(string function, string field, string expectedAlias)
+ {
+ string result = AggregateRecordsTool.ComputeAlias(function, field);
+ Assert.AreEqual(expectedAlias, result);
+ }
+
+ #endregion
+
+ #region PerformAggregation tests - no groupby
+
+ private static JsonElement CreateRecordsArray(params double[] values)
+ {
+ var list = new List
[TestMethod]
- public void BlogScenario_ProductDiscontinuation_SqlContainsOrderByAsc()
+ public void BlogScenario_ProductDiscontinuation_AliasAndTypeCorrect()
{
- Mock qb = CreateMockQueryBuilder();
-
- string aggExpr = AggregateRecordsTool.BuildAggregateExpression("sum", "totalRevenue", false, false, qb.Object);
- Assert.AreEqual("SUM([totalRevenue])", aggExpr);
-
string alias = AggregateRecordsTool.ComputeAlias("sum", "totalRevenue");
Assert.AreEqual("sum_totalRevenue", alias);
}
@@ -262,13 +161,8 @@ public void BlogScenario_ProductDiscontinuation_SqlContainsOrderByAsc()
/// AVG quarterlyRevenue with HAVING gt 2000000
///
[TestMethod]
- public void BlogScenario_QuarterlyPerformance_AvgWithHaving()
+ public void BlogScenario_QuarterlyPerformance_AliasAndTypeCorrect()
{
- Mock qb = CreateMockQueryBuilder();
-
- string aggExpr = AggregateRecordsTool.BuildAggregateExpression("avg", "quarterlyRevenue", false, false, qb.Object);
- Assert.AreEqual("AVG([quarterlyRevenue])", aggExpr);
-
string alias = AggregateRecordsTool.ComputeAlias("avg", "quarterlyRevenue");
Assert.AreEqual("avg_quarterlyRevenue", alias);
}
@@ -278,13 +172,8 @@ public void BlogScenario_QuarterlyPerformance_AvgWithHaving()
/// SUM totalRevenue grouped by region and customerTier, HAVING gt 5000000
///
[TestMethod]
- public void BlogScenario_RevenueConcentration_MultipleGroupByFields()
+ public void BlogScenario_RevenueConcentration_AliasAndTypeCorrect()
{
- Mock qb = CreateMockQueryBuilder();
-
- string aggExpr = AggregateRecordsTool.BuildAggregateExpression("sum", "totalRevenue", false, false, qb.Object);
- Assert.AreEqual("SUM([totalRevenue])", aggExpr);
-
string alias = AggregateRecordsTool.ComputeAlias("sum", "totalRevenue");
Assert.AreEqual("sum_totalRevenue", alias);
}
@@ -294,13 +183,8 @@ public void BlogScenario_RevenueConcentration_MultipleGroupByFields()
/// SUM onHandValue grouped by productLine and warehouseRegion, HAVING gt 2500000
///
[TestMethod]
- public void BlogScenario_RiskExposure_SumWithMultiGroupByAndHaving()
+ public void BlogScenario_RiskExposure_AliasAndTypeCorrect()
{
- Mock qb = CreateMockQueryBuilder();
-
- string aggExpr = AggregateRecordsTool.BuildAggregateExpression("sum", "onHandValue", false, false, qb.Object);
- Assert.AreEqual("SUM([onHandValue])", aggExpr);
-
string alias = AggregateRecordsTool.ComputeAlias("sum", "onHandValue");
Assert.AreEqual("sum_onHandValue", alias);
}
From c5920c7bb33714f013907f8d0820d4abb189945a Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:28:49 -0700
Subject: [PATCH 17/43] Fix SQL generation bugs in AggregateRecordsTool
- Fix COUNT(*): Use primary key column (PK NOT NULL, so COUNT(pk)
COUNT(*)) instead of AggregationColumn with empty schema/table/'*'
which produced invalid SQL like count([].[*])
- Fix TOP + OFFSET/FETCH conflict: Remove TOP N when pagination is
used since SQL Server forbids both in the same query
- Add database type validation: Return error for PostgreSQL/MySQL/
CosmosDB since engine only supports aggregation for MsSql/DWSQL
- Add HAVING validation: Reject having without groupby
- Add tests for star-field-with-avg, distinct-count-star, and
having-without-groupby validation
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 91 +++++++++++--------
.../Mcp/AggregateRecordsToolTests.cs | 45 +++++++++
2 files changed, 100 insertions(+), 36 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index bb2aa4efad..2509629231 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -5,6 +5,7 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
+using System.Text.RegularExpressions;
using Azure.DataApiBuilder.Auth;
using Azure.DataApiBuilder.Config.DatabasePrimitives;
using Azure.DataApiBuilder.Config.ObjectModel;
@@ -251,6 +252,12 @@ public async Task ExecuteAsync(
List? havingIn = null;
if (root.TryGetProperty("having", out JsonElement havingEl) && havingEl.ValueKind == JsonValueKind.Object)
{
+ if (groupby.Count == 0)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'having' parameter requires 'groupby' to be specified. HAVING filters groups after aggregation.", logger);
+ }
+
havingOps = new Dictionary(StringComparer.OrdinalIgnoreCase);
foreach (JsonProperty prop in havingEl.EnumerateObject())
{
@@ -350,6 +357,14 @@ public async Task ExecuteAsync(
// Get database-specific components
DatabaseType databaseType = runtimeConfig.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType;
+
+ // Aggregation is only supported for MsSql/DWSQL (matching engine's GraphQL aggregation support)
+ if (databaseType != DatabaseType.MSSQL && databaseType != DatabaseType.DWSQL)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "UnsupportedDatabase",
+ $"Aggregation is not supported for database type '{databaseType}'. Aggregation is only available for Azure SQL, SQL Server, and SQL Data Warehouse.", logger);
+ }
+
IAbstractQueryManagerFactory queryManagerFactory = serviceProvider.GetRequiredService();
IQueryBuilder queryBuilder = queryManagerFactory.GetQueryBuilder(databaseType);
IQueryExecutor queryExecutor = queryManagerFactory.GetQueryExecutor(databaseType);
@@ -364,6 +379,17 @@ public async Task ExecuteAsync(
$"Field '{field}' not found for entity '{entityName}'.", logger);
}
}
+ else
+ {
+ // For COUNT(*), use primary key column since PK is always NOT NULL,
+ // making COUNT(pk) equivalent to COUNT(*). The engine's Build(AggregationColumn)
+ // does not support "*" as a column name (it would produce invalid SQL like count([].[*])).
+ SourceDefinition sourceDefinition = sqlMetadataProvider.GetSourceDefinition(entityName);
+ if (sourceDefinition.PrimaryKey.Count > 0)
+ {
+ backingField = sourceDefinition.PrimaryKey[0];
+ }
+ }
// Resolve backing column names for groupby fields
List<(string entityField, string backingCol)> groupbyMapping = new();
@@ -392,11 +418,11 @@ public async Task ExecuteAsync(
dbObject.SchemaName, dbObject.Name, backingCol, structure.SourceAlias);
}
- // Build aggregation column using engine's AggregationColumn type
+ // Build aggregation column using engine's AggregationColumn type.
+ // For COUNT(*), we use the primary key column (PK is always NOT NULL, so COUNT(pk) ≡ COUNT(*)).
AggregationType aggType = Enum.Parse(function);
- AggregationColumn aggColumn = isCountStar
- ? new AggregationColumn("", "", "*", AggregationType.count, alias, false)
- : new AggregationColumn(dbObject.SchemaName, dbObject.Name, backingField!, aggType, alias, distinct, structure.SourceAlias);
+ AggregationColumn aggColumn = new(
+ dbObject.SchemaName, dbObject.Name, backingField!, aggType, alias, distinct, structure.SourceAlias);
// Build HAVING predicates using engine's Predicate model
List havingPredicates = new();
@@ -464,36 +490,20 @@ public async Task ExecuteAsync(
// Use engine's query builder to generate SQL
string sql = queryBuilder.Build(structure);
- // For groupby queries: add ORDER BY aggregate expression before FOR JSON PATH
+ // For groupby queries: add ORDER BY aggregate expression and pagination
if (groupbyMapping.Count > 0)
{
string direction = orderby.Equals("asc", StringComparison.OrdinalIgnoreCase) ? "ASC" : "DESC";
- string orderByAggExpr = isCountStar
- ? "COUNT(*)"
- : distinct
- ? $"{function.ToUpperInvariant()}(DISTINCT {queryBuilder.QuoteIdentifier(structure.SourceAlias)}.{queryBuilder.QuoteIdentifier(backingField!)})"
- : $"{function.ToUpperInvariant()}({queryBuilder.QuoteIdentifier(structure.SourceAlias)}.{queryBuilder.QuoteIdentifier(backingField!)})";
+ string quotedCol = $"{queryBuilder.QuoteIdentifier(structure.SourceAlias)}.{queryBuilder.QuoteIdentifier(backingField!)}";
+ string orderByAggExpr = distinct
+ ? $"{function.ToUpperInvariant()}(DISTINCT {quotedCol})"
+ : $"{function.ToUpperInvariant()}({quotedCol})";
string orderByClause = $" ORDER BY {orderByAggExpr} {direction}";
- // Insert ORDER BY before FOR JSON PATH (MsSql/DWSQL) or before LIMIT (PG/MySQL)
- int insertIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
- if (insertIdx < 0)
- {
- insertIdx = sql.IndexOf(" LIMIT ", StringComparison.OrdinalIgnoreCase);
- }
-
- if (insertIdx > 0)
- {
- sql = sql.Insert(insertIdx, orderByClause);
- }
- else
- {
- sql += orderByClause;
- }
-
- // Add pagination (OFFSET/FETCH or LIMIT/OFFSET) for grouped results
if (first.HasValue)
{
+ // With pagination: SQL Server requires ORDER BY for OFFSET/FETCH and
+ // does not allow both TOP and OFFSET/FETCH. Remove TOP and add ORDER BY + OFFSET/FETCH.
int offset = DecodeCursorOffset(after);
int fetchCount = first.Value + 1;
string offsetParam = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
@@ -501,24 +511,33 @@ public async Task ExecuteAsync(
string limitParam = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
structure.Parameters.Add(limitParam, new DbConnectionParam(fetchCount));
- int paginationIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
- string paginationClause;
- if (databaseType == DatabaseType.MSSQL || databaseType == DatabaseType.DWSQL)
+ string paginationClause = $" OFFSET {offsetParam} ROWS FETCH NEXT {limitParam} ROWS ONLY";
+
+ // Remove TOP N from the SELECT clause (TOP conflicts with OFFSET/FETCH)
+ sql = Regex.Replace(sql, @"SELECT TOP \d+", "SELECT");
+
+ // Insert ORDER BY + pagination before FOR JSON PATH
+ int jsonPathIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
+ if (jsonPathIdx > 0)
{
- paginationClause = $" OFFSET {offsetParam} ROWS FETCH NEXT {limitParam} ROWS ONLY";
+ sql = sql.Insert(jsonPathIdx, orderByClause + paginationClause);
}
else
{
- paginationClause = $" LIMIT {limitParam} OFFSET {offsetParam}";
+ sql += orderByClause + paginationClause;
}
-
- if (paginationIdx > 0)
+ }
+ else
+ {
+ // Without pagination: insert ORDER BY before FOR JSON PATH
+ int jsonPathIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
+ if (jsonPathIdx > 0)
{
- sql = sql.Insert(paginationIdx, paginationClause);
+ sql = sql.Insert(jsonPathIdx, orderByClause);
}
else
{
- sql += paginationClause;
+ sql += orderByClause;
}
}
}
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index dd94ae593d..9cc2790430 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -204,6 +204,51 @@ public async Task AggregateRecords_FirstExceedsMax_ReturnsInvalidArguments()
Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("100000"));
}
+ [TestMethod]
+ public async Task AggregateRecords_StarFieldWithAvg_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"avg\", \"field\": \"*\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("count"));
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_DistinctCountStar_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"distinct\": true}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("DISTINCT"));
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_HavingWithoutGroupBy_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"having\": {\"gt\": 5}}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("groupby"));
+ }
+
#endregion
#region Alias Convention Tests
From 203fde1bb9a31bbdcc36d24af94a80ce9b5c6115 Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:31:21 -0700
Subject: [PATCH 18/43] Add comprehensive blog scenario tests from DAB MCP blog
Add 8 tests covering all 5 scenarios from the DAB MCP blog post
(devblogs.microsoft.com/azure-sql/data-api-builder-mcp-questions):
1. Strategic customer importance (sum/groupby/orderby desc/first 1)
2. Product discontinuation (sum/groupby/orderby asc/first 1)
3. Quarterly performance (avg/groupby/having gt/orderby desc)
4. Revenue concentration (sum/complex filter/multi-groupby/having)
5. Risk exposure (sum/filter/multi-groupby/having gt)
Each test verifies the exact blog JSON payload passes input validation,
plus tests for schema completeness, describe_entities instruction,
and alias convention documentation.
80 tests pass.
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../Mcp/AggregateRecordsToolTests.cs | 225 ++++++++++++++++++
1 file changed, 225 insertions(+)
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 9cc2790430..78c83d12be 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -631,6 +631,231 @@ public void SpecExample13_AvgGroupByOrderByDescFirst3_CorrectAlias()
#endregion
+ #region Blog Scenario Tests (devblogs.microsoft.com/azure-sql/data-api-builder-mcp-questions)
+
+ // These tests verify that the exact JSON payloads from the DAB MCP blog
+ // pass input validation. The tool will fail at metadata resolution (no real DB)
+ // but must NOT return "InvalidArguments", proving the input shape is valid.
+
+ ///
+ /// Blog Scenario 1: Strategic customer importance
+ /// "Who is our most important customer based on total revenue?"
+ /// Uses: sum, totalRevenue, filter, groupby [customerId, customerName], orderby desc, first 1
+ ///
+ [TestMethod]
+ public async Task BlogScenario1_StrategicCustomerImportance_PassesInputValidation()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ string json = @"{
+ ""entity"": ""Book"",
+ ""function"": ""sum"",
+ ""field"": ""totalRevenue"",
+ ""filter"": ""isActive eq true and orderDate ge 2025-01-01"",
+ ""groupby"": [""customerId"", ""customerName""],
+ ""orderby"": ""desc"",
+ ""first"": 1
+ }";
+
+ JsonDocument args = JsonDocument.Parse(json);
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
+ Assert.AreNotEqual("InvalidArguments", errorType,
+ "Blog scenario 1 JSON must pass input validation (sum/totalRevenue/groupby/orderby/first).");
+ Assert.AreEqual("sum_totalRevenue", AggregateRecordsTool.ComputeAlias("sum", "totalRevenue"));
+ }
+
+ ///
+ /// Blog Scenario 2: Product discontinuation candidate
+ /// "Which product should we consider discontinuing based on lowest totalRevenue?"
+ /// Uses: sum, totalRevenue, filter, groupby [productId, productName], orderby asc, first 1
+ ///
+ [TestMethod]
+ public async Task BlogScenario2_ProductDiscontinuation_PassesInputValidation()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ string json = @"{
+ ""entity"": ""Book"",
+ ""function"": ""sum"",
+ ""field"": ""totalRevenue"",
+ ""filter"": ""isActive eq true and inStock gt 0 and orderDate ge 2025-01-01"",
+ ""groupby"": [""productId"", ""productName""],
+ ""orderby"": ""asc"",
+ ""first"": 1
+ }";
+
+ JsonDocument args = JsonDocument.Parse(json);
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
+ Assert.AreNotEqual("InvalidArguments", errorType,
+ "Blog scenario 2 JSON must pass input validation (sum/totalRevenue/groupby/orderby asc/first).");
+ Assert.AreEqual("sum_totalRevenue", AggregateRecordsTool.ComputeAlias("sum", "totalRevenue"));
+ }
+
+ ///
+ /// Blog Scenario 3: Forward-looking performance expectation
+ /// "Average quarterlyRevenue per region, regions averaging > $2,000,000?"
+ /// Uses: avg, quarterlyRevenue, filter, groupby [region], having {gt: 2000000}, orderby desc
+ ///
+ [TestMethod]
+ public async Task BlogScenario3_QuarterlyPerformance_PassesInputValidation()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ string json = @"{
+ ""entity"": ""Book"",
+ ""function"": ""avg"",
+ ""field"": ""quarterlyRevenue"",
+ ""filter"": ""fiscalYear eq 2025"",
+ ""groupby"": [""region""],
+ ""having"": { ""gt"": 2000000 },
+ ""orderby"": ""desc""
+ }";
+
+ JsonDocument args = JsonDocument.Parse(json);
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
+ Assert.AreNotEqual("InvalidArguments", errorType,
+ "Blog scenario 3 JSON must pass input validation (avg/quarterlyRevenue/groupby/having gt).");
+ Assert.AreEqual("avg_quarterlyRevenue", AggregateRecordsTool.ComputeAlias("avg", "quarterlyRevenue"));
+ }
+
+ ///
+ /// Blog Scenario 4: Revenue concentration across regions
+ /// "Total revenue of active retail customers in Midwest/Southwest, >$5M, by region and customerTier"
+ /// Uses: sum, totalRevenue, complex filter with OR, groupby [region, customerTier], having {gt: 5000000}, orderby desc
+ ///
+ [TestMethod]
+ public async Task BlogScenario4_RevenueConcentration_PassesInputValidation()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ string json = @"{
+ ""entity"": ""Book"",
+ ""function"": ""sum"",
+ ""field"": ""totalRevenue"",
+ ""filter"": ""isActive eq true and customerType eq 'Retail' and (region eq 'Midwest' or region eq 'Southwest')"",
+ ""groupby"": [""region"", ""customerTier""],
+ ""having"": { ""gt"": 5000000 },
+ ""orderby"": ""desc""
+ }";
+
+ JsonDocument args = JsonDocument.Parse(json);
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
+ Assert.AreNotEqual("InvalidArguments", errorType,
+ "Blog scenario 4 JSON must pass input validation (sum/totalRevenue/complex filter/multi-groupby/having).");
+ Assert.AreEqual("sum_totalRevenue", AggregateRecordsTool.ComputeAlias("sum", "totalRevenue"));
+ }
+
+ ///
+ /// Blog Scenario 5: Risk exposure by product line
+ /// "For discontinued products, total onHandValue by productLine and warehouseRegion, >$2.5M"
+ /// Uses: sum, onHandValue, filter, groupby [productLine, warehouseRegion], having {gt: 2500000}, orderby desc
+ ///
+ [TestMethod]
+ public async Task BlogScenario5_RiskExposure_PassesInputValidation()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ string json = @"{
+ ""entity"": ""Book"",
+ ""function"": ""sum"",
+ ""field"": ""onHandValue"",
+ ""filter"": ""discontinued eq true and onHandValue gt 0"",
+ ""groupby"": [""productLine"", ""warehouseRegion""],
+ ""having"": { ""gt"": 2500000 },
+ ""orderby"": ""desc""
+ }";
+
+ JsonDocument args = JsonDocument.Parse(json);
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
+ Assert.AreNotEqual("InvalidArguments", errorType,
+ "Blog scenario 5 JSON must pass input validation (sum/onHandValue/filter/multi-groupby/having).");
+ Assert.AreEqual("sum_onHandValue", AggregateRecordsTool.ComputeAlias("sum", "onHandValue"));
+ }
+
+ ///
+ /// Verifies that the tool schema supports all properties used across the 5 blog scenarios.
+ ///
+ [TestMethod]
+ public void BlogScenarios_ToolSchema_SupportsAllRequiredProperties()
+ {
+ AggregateRecordsTool tool = new();
+ Tool metadata = tool.GetToolMetadata();
+ JsonElement properties = metadata.InputSchema.GetProperty("properties");
+
+ string[] blogProperties = { "entity", "function", "field", "filter", "groupby", "orderby", "having", "first" };
+ foreach (string prop in blogProperties)
+ {
+ Assert.IsTrue(properties.TryGetProperty(prop, out _),
+ $"Tool schema must include '{prop}' property used in blog scenarios.");
+ }
+
+ // Additional schema properties used in spec but not blog
+ Assert.IsTrue(properties.TryGetProperty("distinct", out _), "Tool schema must include 'distinct'.");
+ Assert.IsTrue(properties.TryGetProperty("after", out _), "Tool schema must include 'after'.");
+ }
+
+ ///
+ /// Verifies that the tool description instructs models to call describe_entities first.
+ ///
+ [TestMethod]
+ public void BlogScenarios_ToolDescription_ForcesDescribeEntitiesFirst()
+ {
+ AggregateRecordsTool tool = new();
+ Tool metadata = tool.GetToolMetadata();
+
+ Assert.IsTrue(metadata.Description!.Contains("describe_entities"),
+ "Tool description must instruct models to call describe_entities first.");
+ Assert.IsTrue(metadata.Description.Contains("STEP 1"),
+ "Tool description must use numbered steps starting with STEP 1.");
+ }
+
+ ///
+ /// Verifies that the tool description documents the alias convention used in blog examples.
+ ///
+ [TestMethod]
+ public void BlogScenarios_ToolDescription_DocumentsAliasConvention()
+ {
+ AggregateRecordsTool tool = new();
+ Tool metadata = tool.GetToolMetadata();
+
+ Assert.IsTrue(metadata.Description!.Contains("{function}_{field}"),
+ "Tool description must document the alias pattern '{function}_{field}'.");
+ Assert.IsTrue(metadata.Description.Contains("'count'"),
+ "Tool description must mention the special 'count' alias for count(*).");
+ }
+
+ #endregion
+
#region Helper Methods
private static JsonElement ParseContent(CallToolResult result)
From fab587f0a96961a59fa2b9ed0cd869233ab40c68 Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:34:04 -0700
Subject: [PATCH 19/43] Tighten tool description and parameter docs to remove
duplication
Remove redundant parameter listings from Description (already in
InputSchema). Description now covers only: workflow steps, rules
not expressed elsewhere, and response alias convention.
Parameter descriptions simplified to one sentence each, removing
repeated phrases like 'from describe_entities' and 'ONLY applies
when groupby is provided' (stated once in groupby description).
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 59 ++++++++-----------
.../Mcp/AggregateRecordsToolTests.cs | 4 +-
2 files changed, 26 insertions(+), 37 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 2509629231..2b028c9ec4 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -46,88 +46,77 @@ public Tool GetToolMetadata()
{
Name = "aggregate_records",
Description = "Computes aggregations (count, avg, sum, min, max) on entity data. "
- + "STEP 1: Call describe_entities to discover entities with READ permission and their field names. "
- + "STEP 2: Call this tool with the exact entity name, an aggregation function, and a field name from STEP 1. "
- + "REQUIRED: entity (exact entity name), function (one of: count, avg, sum, min, max), field (exact field name, or '*' ONLY for count). "
- + "OPTIONAL: filter (OData WHERE clause applied before aggregating, e.g. 'unitPrice lt 10'), "
- + "distinct (true to deduplicate values before aggregating), "
- + "groupby (array of field names to group results by, e.g. ['categoryName']), "
- + "orderby ('asc' or 'desc' to sort grouped results by aggregated value; requires groupby), "
- + "having (object to filter groups after aggregating, operators: eq, neq, gt, gte, lt, lte, in; requires groupby), "
- + "first (integer >= 1, maximum grouped results to return; requires groupby), "
- + "after (opaque cursor string from a previous response's endCursor for pagination). "
- + "RESPONSE: The aggregated value is aliased as '{function}_{field}' (e.g. avg_unitPrice, sum_revenue). "
- + "For count with field '*', the alias is 'count'. "
- + "When first is used with groupby, response contains: items (array), endCursor (string), hasNextPage (boolean). "
- + "RULES: 1) ALWAYS call describe_entities first to get valid entity and field names. "
- + "2) Use field '*' ONLY with function 'count'. "
- + "3) For avg, sum, min, max: field MUST be a numeric field name from describe_entities. "
- + "4) orderby, having, and first ONLY apply when groupby is provided. "
- + "5) Use first and after for paginating large grouped result sets.",
+ + "WORKFLOW: 1) Call describe_entities first to get entity names and field names. "
+ + "2) Call this tool with entity, function, and field from step 1. "
+ + "RULES: field '*' is ONLY valid with count. "
+ + "orderby, having, first, and after ONLY apply when groupby is provided. "
+ + "RESPONSE: Result is aliased as '{function}_{field}' (e.g. avg_unitPrice). "
+ + "For count(*), the alias is 'count'. "
+ + "With groupby and first, response includes items, endCursor, and hasNextPage for pagination.",
InputSchema = JsonSerializer.Deserialize(
@"{
""type"": ""object"",
""properties"": {
""entity"": {
""type"": ""string"",
- ""description"": ""Exact entity name from describe_entities that has READ permission. Must match exactly (case-sensitive).""
+ ""description"": ""Entity name from describe_entities with READ permission (case-sensitive).""
},
""function"": {
""type"": ""string"",
""enum"": [""count"", ""avg"", ""sum"", ""min"", ""max""],
- ""description"": ""Aggregation function to apply. Use 'count' to count records, 'avg' for average, 'sum' for total, 'min' for minimum, 'max' for maximum. For count use field '*' or a specific field name. For avg, sum, min, max the field must be numeric.""
+ ""description"": ""Aggregation function. count supports field '*'; avg, sum, min, max require a numeric field.""
},
""field"": {
""type"": ""string"",
- ""description"": ""Exact field name from describe_entities to aggregate. Use '*' ONLY with function 'count' to count all records. For avg, sum, min, max, provide a numeric field name.""
+ ""description"": ""Field name to aggregate, or '*' with count to count all rows.""
},
""distinct"": {
""type"": ""boolean"",
- ""description"": ""When true, removes duplicate values before applying the aggregation function. For example, count with distinct counts unique values only. Default is false."",
+ ""description"": ""Remove duplicate values before aggregating. Not valid with field '*'."",
""default"": false
},
""filter"": {
""type"": ""string"",
- ""description"": ""OData filter expression applied before aggregating (acts as a WHERE clause). Supported operators: eq, ne, gt, ge, lt, le, and, or, not. Example: 'unitPrice lt 10' filters to rows where unitPrice is less than 10 before aggregating. Example: 'discontinued eq true and categoryName eq ''Seafood''' filters discontinued seafood products."",
+ ""description"": ""OData WHERE clause applied before aggregating. Operators: eq, ne, gt, ge, lt, le, and, or, not. Example: 'unitPrice lt 10'."",
""default"": """"
},
""groupby"": {
""type"": ""array"",
""items"": { ""type"": ""string"" },
- ""description"": ""Array of exact field names from describe_entities to group results by. Each unique combination of grouped field values produces one aggregated row. Grouped field values are included in the response alongside the aggregated value. Example: ['categoryName'] groups by category. Example: ['categoryName', 'region'] groups by both fields."",
+ ""description"": ""Field names to group by. Each unique combination produces one aggregated row. Enables orderby, having, first, and after."",
""default"": []
},
""orderby"": {
""type"": ""string"",
""enum"": [""asc"", ""desc""],
- ""description"": ""Sort direction for grouped results by the computed aggregated value. 'desc' returns highest values first, 'asc' returns lowest first. ONLY applies when groupby is provided. Default is 'desc'."",
+ ""description"": ""Sort grouped results by the aggregated value. Requires groupby."",
""default"": ""desc""
},
""having"": {
""type"": ""object"",
- ""description"": ""Filter applied AFTER aggregating to filter grouped results by the computed aggregated value (acts as a HAVING clause). ONLY applies when groupby is provided. Multiple operators are AND-ed together. For example, use gt with value 20 to keep groups where the aggregated value exceeds 20. Combine gte and lte to define a range."",
+ ""description"": ""Filter groups by the aggregated value (HAVING clause). Requires groupby. Multiple operators are AND-ed."",
""properties"": {
- ""eq"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value equals this number."" },
- ""neq"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value does not equal this number."" },
- ""gt"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is greater than this number."" },
- ""gte"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is greater than or equal to this number."" },
- ""lt"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is less than this number."" },
- ""lte"": { ""type"": ""number"", ""description"": ""Keep groups where the aggregated value is less than or equal to this number."" },
+ ""eq"": { ""type"": ""number"", ""description"": ""Equals."" },
+ ""neq"": { ""type"": ""number"", ""description"": ""Not equals."" },
+ ""gt"": { ""type"": ""number"", ""description"": ""Greater than."" },
+ ""gte"": { ""type"": ""number"", ""description"": ""Greater than or equal."" },
+ ""lt"": { ""type"": ""number"", ""description"": ""Less than."" },
+ ""lte"": { ""type"": ""number"", ""description"": ""Less than or equal."" },
""in"": {
""type"": ""array"",
""items"": { ""type"": ""number"" },
- ""description"": ""Keep groups where the aggregated value matches any number in this list. Example: [5, 10] keeps groups with aggregated value 5 or 10.""
+ ""description"": ""Matches any value in the list.""
}
}
},
""first"": {
""type"": ""integer"",
- ""description"": ""Maximum number of grouped results to return. Used for pagination of grouped results. ONLY applies when groupby is provided. Must be >= 1. When set, the response includes 'items', 'endCursor', and 'hasNextPage' fields for pagination."",
+ ""description"": ""Max grouped results to return. Requires groupby. Enables paginated response with endCursor and hasNextPage."",
""minimum"": 1
},
""after"": {
""type"": ""string"",
- ""description"": ""Opaque cursor string for pagination. Pass the 'endCursor' value from a previous response to get the next page of results. REQUIRES both groupby and first to be set. Do not construct this value manually; always use the endCursor from a previous response.""
+ ""description"": ""Opaque cursor from a previous endCursor for next-page retrieval. Requires groupby and first. Do not construct manually.""
}
},
""required"": [""entity"", ""function"", ""field""]
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 78c83d12be..242fdb8b6f 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -835,8 +835,8 @@ public void BlogScenarios_ToolDescription_ForcesDescribeEntitiesFirst()
Assert.IsTrue(metadata.Description!.Contains("describe_entities"),
"Tool description must instruct models to call describe_entities first.");
- Assert.IsTrue(metadata.Description.Contains("STEP 1"),
- "Tool description must use numbered steps starting with STEP 1.");
+ Assert.IsTrue(metadata.Description.Contains("1)"),
+ "Tool description must use numbered workflow steps.");
}
///
From 5c93f9283b1018c334a3442f5bf269318c8eab31 Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:38:34 -0700
Subject: [PATCH 20/43] Add early field validation and FieldNotFound error
helper
Validate field and groupby field names immediately after metadata
resolution, before authorization or query building. Invalid field
names now return a FieldNotFound error with model-friendly guidance
to call describe_entities for valid field names.
- Add McpErrorHelpers.FieldNotFound() with entity name, field name,
parameter name, and describe_entities guidance
- Move field existence checks before auth in AggregateRecordsTool
- Remove redundant late validation (already caught early)
- Add tests for FieldNotFound error type and message content
82 tests pass.
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 37 ++++++++++------
.../Utils/McpErrorHelpers.cs | 11 +++++
.../Mcp/AggregateRecordsToolTests.cs | 43 +++++++++++++++++++
3 files changed, 77 insertions(+), 14 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 2b028c9ec4..2dee64ccd1 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -278,6 +278,24 @@ public async Task ExecuteAsync(
return McpResponseBuilder.BuildErrorResult(toolName, "EntityNotFound", metadataError, logger);
}
+ // Early field validation: check all user-supplied field names before authorization or query building.
+ // This lets the model discover and fix typos immediately.
+ if (!isCountStar)
+ {
+ if (!sqlMetadataProvider.TryGetBackingColumn(entityName, field, out _))
+ {
+ return McpErrorHelpers.FieldNotFound(toolName, entityName, field, "field", logger);
+ }
+ }
+
+ foreach (string gField in groupby)
+ {
+ if (!sqlMetadataProvider.TryGetBackingColumn(entityName, gField, out _))
+ {
+ return McpErrorHelpers.FieldNotFound(toolName, entityName, gField, "groupby", logger);
+ }
+ }
+
// Authorization
IAuthorizationResolver authResolver = serviceProvider.GetRequiredService();
IAuthorizationService authorizationService = serviceProvider.GetRequiredService();
@@ -358,15 +376,11 @@ public async Task ExecuteAsync(
IQueryBuilder queryBuilder = queryManagerFactory.GetQueryBuilder(databaseType);
IQueryExecutor queryExecutor = queryManagerFactory.GetQueryExecutor(databaseType);
- // Resolve backing column name for the aggregation field
+ // Resolve backing column name for the aggregation field (already validated early)
string? backingField = null;
if (!isCountStar)
{
- if (!sqlMetadataProvider.TryGetBackingColumn(entityName, field, out backingField))
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- $"Field '{field}' not found for entity '{entityName}'.", logger);
- }
+ sqlMetadataProvider.TryGetBackingColumn(entityName, field, out backingField);
}
else
{
@@ -380,17 +394,12 @@ public async Task ExecuteAsync(
}
}
- // Resolve backing column names for groupby fields
+ // Resolve backing column names for groupby fields (already validated early)
List<(string entityField, string backingCol)> groupbyMapping = new();
foreach (string gField in groupby)
{
- if (!sqlMetadataProvider.TryGetBackingColumn(entityName, gField, out string? backingGCol))
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- $"GroupBy field '{gField}' not found for entity '{entityName}'.", logger);
- }
-
- groupbyMapping.Add((gField, backingGCol));
+ sqlMetadataProvider.TryGetBackingColumn(entityName, gField, out string? backingGCol);
+ groupbyMapping.Add((gField, backingGCol!));
}
string alias = ComputeAlias(function, field);
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpErrorHelpers.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpErrorHelpers.cs
index 1a5c223798..13835b2fa9 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpErrorHelpers.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpErrorHelpers.cs
@@ -24,5 +24,16 @@ public static CallToolResult ToolDisabled(string toolName, ILogger? logger, stri
string message = customMessage ?? $"The {toolName} tool is disabled in the configuration.";
return McpResponseBuilder.BuildErrorResult(toolName, Model.McpErrorCode.ToolDisabled.ToString(), message, logger);
}
+
+ ///
+ /// Returns a model-friendly error when a field name is not found for an entity.
+ /// Guides the model to call describe_entities to discover valid field names.
+ ///
+ public static CallToolResult FieldNotFound(string toolName, string entityName, string fieldName, string parameterName, ILogger? logger)
+ {
+ string message = $"Field '{fieldName}' in '{parameterName}' was not found for entity '{entityName}'. "
+ + $"Call describe_entities to get valid field names for '{entityName}'.";
+ return McpResponseBuilder.BuildErrorResult(toolName, "FieldNotFound", message, logger);
+ }
}
}
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 242fdb8b6f..41e028e28a 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -15,6 +15,7 @@
using Azure.DataApiBuilder.Core.Configurations;
using Azure.DataApiBuilder.Mcp.BuiltInTools;
using Azure.DataApiBuilder.Mcp.Model;
+using Azure.DataApiBuilder.Mcp.Utils;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.VisualStudio.TestTools.UnitTesting;
@@ -856,6 +857,48 @@ public void BlogScenarios_ToolDescription_DocumentsAliasConvention()
#endregion
+ #region FieldNotFound Error Helper Tests
+
+ ///
+ /// Verifies the FieldNotFound error helper produces the correct error type
+ /// and a model-friendly message that includes the field name, entity, and guidance.
+ ///
+ [TestMethod]
+ public void FieldNotFound_ReturnsCorrectErrorTypeAndMessage()
+ {
+ CallToolResult result = McpErrorHelpers.FieldNotFound("aggregate_records", "Product", "badField", "field", null);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ JsonElement error = content.GetProperty("error");
+
+ Assert.AreEqual("FieldNotFound", error.GetProperty("type").GetString());
+ string message = error.GetProperty("message").GetString()!;
+ Assert.IsTrue(message.Contains("badField"), "Message must include the invalid field name.");
+ Assert.IsTrue(message.Contains("Product"), "Message must include the entity name.");
+ Assert.IsTrue(message.Contains("field"), "Message must identify which parameter was invalid.");
+ Assert.IsTrue(message.Contains("describe_entities"), "Message must guide the model to call describe_entities.");
+ }
+
+ ///
+ /// Verifies the FieldNotFound error helper identifies the groupby parameter.
+ ///
+ [TestMethod]
+ public void FieldNotFound_GroupBy_IdentifiesParameter()
+ {
+ CallToolResult result = McpErrorHelpers.FieldNotFound("aggregate_records", "Product", "invalidCol", "groupby", null);
+
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ string message = content.GetProperty("error").GetProperty("message").GetString()!;
+
+ Assert.IsTrue(message.Contains("invalidCol"), "Message must include the invalid field name.");
+ Assert.IsTrue(message.Contains("groupby"), "Message must identify 'groupby' as the parameter.");
+ Assert.IsTrue(message.Contains("describe_entities"), "Message must guide the model to call describe_entities.");
+ }
+
+ #endregion
+
#region Helper Methods
private static JsonElement ParseContent(CallToolResult result)
From d1268f2f34c2cc8f3c40343cca2d795e2f401758 Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:40:14 -0700
Subject: [PATCH 21/43] Rename truncated variables to descriptive names
Rename abbreviated variable names to their full, readable forms:
funcElfunctionElement, fieldElfieldElement, distinctEldistinctElement,
filterElfilterElement, orderbyElorderbyElement, firstElfirstElement,
afterElafterElement, groupbyElgroupbyElement, ggroupbyItem,
gValgroupbyFieldName, gFieldgroupbyField, havingElhavingElement,
havingOpshavingOperators, havingInhavingInValues, aggTypeaggregationType,
aggColumnaggregationColumn, predOppredicateOperation, ophavingOperator,
predpredicate, backingColbackingColumn, backingGColbackingGroupbyColumn,
timeoutExtimeoutException, taskExtaskCanceledException,
dbExdbException, argExargumentException/dabException.
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 124 +++++++++---------
1 file changed, 62 insertions(+), 62 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 2dee64ccd1..debce4f090 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -168,23 +168,23 @@ public async Task ExecuteAsync(
return McpErrorHelpers.ToolDisabled(toolName, logger, $"DML tools are disabled for entity '{entityName}'.");
}
- if (!root.TryGetProperty("function", out JsonElement funcEl) || string.IsNullOrWhiteSpace(funcEl.GetString()))
+ if (!root.TryGetProperty("function", out JsonElement functionElement) || string.IsNullOrWhiteSpace(functionElement.GetString()))
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'function'.", logger);
}
- string function = funcEl.GetString()!.ToLowerInvariant();
+ string function = functionElement.GetString()!.ToLowerInvariant();
if (!_validFunctions.Contains(function))
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", $"Invalid function '{function}'. Must be one of: count, avg, sum, min, max.", logger);
}
- if (!root.TryGetProperty("field", out JsonElement fieldEl) || string.IsNullOrWhiteSpace(fieldEl.GetString()))
+ if (!root.TryGetProperty("field", out JsonElement fieldElement) || string.IsNullOrWhiteSpace(fieldElement.GetString()))
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'field'.", logger);
}
- string field = fieldEl.GetString()!;
+ string field = fieldElement.GetString()!;
// Validate field/function compatibility
bool isCountStar = function == "count" && field == "*";
@@ -195,7 +195,7 @@ public async Task ExecuteAsync(
$"Field '*' is only valid with function 'count'. For function '{function}', provide a specific field name.", logger);
}
- bool distinct = root.TryGetProperty("distinct", out JsonElement distinctEl) && distinctEl.GetBoolean();
+ bool distinct = root.TryGetProperty("distinct", out JsonElement distinctElement) && distinctElement.GetBoolean();
// Reject count(*) with distinct as it is semantically undefined
if (isCountStar && distinct)
@@ -204,13 +204,13 @@ public async Task ExecuteAsync(
"Cannot use distinct=true with field='*'. DISTINCT requires a specific field name. Use a field name instead of '*' to count distinct values.", logger);
}
- string? filter = root.TryGetProperty("filter", out JsonElement filterEl) ? filterEl.GetString() : null;
- string orderby = root.TryGetProperty("orderby", out JsonElement orderbyEl) ? (orderbyEl.GetString() ?? "desc") : "desc";
+ string? filter = root.TryGetProperty("filter", out JsonElement filterElement) ? filterElement.GetString() : null;
+ string orderby = root.TryGetProperty("orderby", out JsonElement orderbyElement) ? (orderbyElement.GetString() ?? "desc") : "desc";
int? first = null;
- if (root.TryGetProperty("first", out JsonElement firstEl) && firstEl.ValueKind == JsonValueKind.Number)
+ if (root.TryGetProperty("first", out JsonElement firstElement) && firstElement.ValueKind == JsonValueKind.Number)
{
- first = firstEl.GetInt32();
+ first = firstElement.GetInt32();
if (first < 1)
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Argument 'first' must be at least 1.", logger);
@@ -222,24 +222,24 @@ public async Task ExecuteAsync(
}
}
- string? after = root.TryGetProperty("after", out JsonElement afterEl) ? afterEl.GetString() : null;
+ string? after = root.TryGetProperty("after", out JsonElement afterElement) ? afterElement.GetString() : null;
List groupby = new();
- if (root.TryGetProperty("groupby", out JsonElement groupbyEl) && groupbyEl.ValueKind == JsonValueKind.Array)
+ if (root.TryGetProperty("groupby", out JsonElement groupbyElement) && groupbyElement.ValueKind == JsonValueKind.Array)
{
- foreach (JsonElement g in groupbyEl.EnumerateArray())
+ foreach (JsonElement groupbyItem in groupbyElement.EnumerateArray())
{
- string? gVal = g.GetString();
- if (!string.IsNullOrWhiteSpace(gVal))
+ string? groupbyFieldName = groupbyItem.GetString();
+ if (!string.IsNullOrWhiteSpace(groupbyFieldName))
{
- groupby.Add(gVal);
+ groupby.Add(groupbyFieldName);
}
}
}
- Dictionary? havingOps = null;
- List? havingIn = null;
- if (root.TryGetProperty("having", out JsonElement havingEl) && havingEl.ValueKind == JsonValueKind.Object)
+ Dictionary? havingOperators = null;
+ List? havingInValues = null;
+ if (root.TryGetProperty("having", out JsonElement havingElement) && havingElement.ValueKind == JsonValueKind.Object)
{
if (groupby.Count == 0)
{
@@ -247,20 +247,20 @@ public async Task ExecuteAsync(
"The 'having' parameter requires 'groupby' to be specified. HAVING filters groups after aggregation.", logger);
}
- havingOps = new Dictionary(StringComparer.OrdinalIgnoreCase);
- foreach (JsonProperty prop in havingEl.EnumerateObject())
+ havingOperators = new Dictionary(StringComparer.OrdinalIgnoreCase);
+ foreach (JsonProperty prop in havingElement.EnumerateObject())
{
if (prop.Name.Equals("in", StringComparison.OrdinalIgnoreCase) && prop.Value.ValueKind == JsonValueKind.Array)
{
- havingIn = new List();
+ havingInValues = new List();
foreach (JsonElement item in prop.Value.EnumerateArray())
{
- havingIn.Add(item.GetDouble());
+ havingInValues.Add(item.GetDouble());
}
}
else if (prop.Value.ValueKind == JsonValueKind.Number)
{
- havingOps[prop.Name] = prop.Value.GetDouble();
+ havingOperators[prop.Name] = prop.Value.GetDouble();
}
}
}
@@ -288,11 +288,11 @@ public async Task ExecuteAsync(
}
}
- foreach (string gField in groupby)
+ foreach (string groupbyField in groupby)
{
- if (!sqlMetadataProvider.TryGetBackingColumn(entityName, gField, out _))
+ if (!sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out _))
{
- return McpErrorHelpers.FieldNotFound(toolName, entityName, gField, "groupby", logger);
+ return McpErrorHelpers.FieldNotFound(toolName, entityName, groupbyField, "groupby", logger);
}
}
@@ -395,11 +395,11 @@ public async Task ExecuteAsync(
}
// Resolve backing column names for groupby fields (already validated early)
- List<(string entityField, string backingCol)> groupbyMapping = new();
- foreach (string gField in groupby)
+ List<(string entityField, string backingColumn)> groupbyMapping = new();
+ foreach (string groupbyField in groupby)
{
- sqlMetadataProvider.TryGetBackingColumn(entityName, gField, out string? backingGCol);
- groupbyMapping.Add((gField, backingGCol!));
+ sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out string? backingGroupbyColumn);
+ groupbyMapping.Add((groupbyField, backingGroupbyColumn!));
}
string alias = ComputeAlias(function, field);
@@ -408,27 +408,27 @@ public async Task ExecuteAsync(
structure.Columns.Clear();
// Add groupby columns as LabelledColumns and GroupByMetadata.Fields
- foreach (var (entityField, backingCol) in groupbyMapping)
+ foreach (var (entityField, backingColumn) in groupbyMapping)
{
structure.Columns.Add(new LabelledColumn(
- dbObject.SchemaName, dbObject.Name, backingCol, entityField, structure.SourceAlias));
- structure.GroupByMetadata.Fields[backingCol] = new Column(
- dbObject.SchemaName, dbObject.Name, backingCol, structure.SourceAlias);
+ dbObject.SchemaName, dbObject.Name, backingColumn, entityField, structure.SourceAlias));
+ structure.GroupByMetadata.Fields[backingColumn] = new Column(
+ dbObject.SchemaName, dbObject.Name, backingColumn, structure.SourceAlias);
}
// Build aggregation column using engine's AggregationColumn type.
// For COUNT(*), we use the primary key column (PK is always NOT NULL, so COUNT(pk) ≡ COUNT(*)).
- AggregationType aggType = Enum.Parse(function);
- AggregationColumn aggColumn = new(
- dbObject.SchemaName, dbObject.Name, backingField!, aggType, alias, distinct, structure.SourceAlias);
+ AggregationType aggregationType = Enum.Parse(function);
+ AggregationColumn aggregationColumn = new(
+ dbObject.SchemaName, dbObject.Name, backingField!, aggregationType, alias, distinct, structure.SourceAlias);
// Build HAVING predicates using engine's Predicate model
List havingPredicates = new();
- if (havingOps != null)
+ if (havingOperators != null)
{
- foreach (var op in havingOps)
+ foreach (var havingOperator in havingOperators)
{
- PredicateOperation predOp = op.Key.ToLowerInvariant() switch
+ PredicateOperation predicateOperation = havingOperator.Key.ToLowerInvariant() switch
{
"eq" => PredicateOperation.Equal,
"neq" => PredicateOperation.NotEqual,
@@ -436,21 +436,21 @@ public async Task ExecuteAsync(
"gte" => PredicateOperation.GreaterThanOrEqual,
"lt" => PredicateOperation.LessThan,
"lte" => PredicateOperation.LessThanOrEqual,
- _ => throw new ArgumentException($"Invalid having operator: {op.Key}")
+ _ => throw new ArgumentException($"Invalid having operator: {havingOperator.Key}")
};
string paramName = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
- structure.Parameters.Add(paramName, new DbConnectionParam(op.Value));
+ structure.Parameters.Add(paramName, new DbConnectionParam(havingOperator.Value));
havingPredicates.Add(new Predicate(
- new PredicateOperand(aggColumn),
- predOp,
+ new PredicateOperand(aggregationColumn),
+ predicateOperation,
new PredicateOperand(paramName)));
}
}
- if (havingIn != null && havingIn.Count > 0)
+ if (havingInValues != null && havingInValues.Count > 0)
{
List inParams = new();
- foreach (double val in havingIn)
+ foreach (double val in havingInValues)
{
string paramName = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
structure.Parameters.Add(paramName, new DbConnectionParam(val));
@@ -458,22 +458,22 @@ public async Task ExecuteAsync(
}
havingPredicates.Add(new Predicate(
- new PredicateOperand(aggColumn),
+ new PredicateOperand(aggregationColumn),
PredicateOperation.IN,
new PredicateOperand($"({string.Join(", ", inParams)})")));
}
// Combine multiple HAVING predicates with AND
Predicate? combinedHaving = null;
- foreach (var pred in havingPredicates)
+ foreach (var predicate in havingPredicates)
{
combinedHaving = combinedHaving == null
- ? pred
- : new Predicate(new PredicateOperand(combinedHaving), PredicateOperation.AND, new PredicateOperand(pred));
+ ? predicate
+ : new Predicate(new PredicateOperand(combinedHaving), PredicateOperation.AND, new PredicateOperand(predicate));
}
structure.GroupByMetadata.Aggregations.Add(
- new AggregationOperation(aggColumn, having: combinedHaving != null ? new List { combinedHaving } : null));
+ new AggregationOperation(aggregationColumn, having: combinedHaving != null ? new List { combinedHaving } : null));
structure.GroupByMetadata.RequestedAggregations = true;
// Clear default OrderByColumns (PK-based)
@@ -564,9 +564,9 @@ public async Task ExecuteAsync(
return BuildSimpleResponse(resultArray, entityName, alias, logger);
}
- catch (TimeoutException timeoutEx)
+ catch (TimeoutException timeoutException)
{
- logger?.LogError(timeoutEx, "Aggregation operation timed out for entity {Entity}.", entityName);
+ logger?.LogError(timeoutException, "Aggregation operation timed out for entity {Entity}.", entityName);
return McpResponseBuilder.BuildErrorResult(
toolName,
"TimeoutError",
@@ -576,9 +576,9 @@ public async Task ExecuteAsync(
+ "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.",
logger);
}
- catch (TaskCanceledException taskEx)
+ catch (TaskCanceledException taskCanceledException)
{
- logger?.LogError(taskEx, "Aggregation task was canceled for entity {Entity}.", entityName);
+ logger?.LogError(taskCanceledException, "Aggregation task was canceled for entity {Entity}.", entityName);
return McpResponseBuilder.BuildErrorResult(
toolName,
"TimeoutError",
@@ -598,18 +598,18 @@ public async Task ExecuteAsync(
+ "No results were returned. You may retry the same request.",
logger);
}
- catch (DbException dbEx)
+ catch (DbException dbException)
{
- logger?.LogError(dbEx, "Database error during aggregation for entity {Entity}.", entityName);
- return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", dbEx.Message, logger);
+ logger?.LogError(dbException, "Database error during aggregation for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", dbException.Message, logger);
}
- catch (ArgumentException argEx)
+ catch (ArgumentException argumentException)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", argEx.Message, logger);
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", argumentException.Message, logger);
}
- catch (DataApiBuilderException argEx)
+ catch (DataApiBuilderException dabException)
{
- return McpResponseBuilder.BuildErrorResult(toolName, argEx.StatusCode.ToString(), argEx.Message, logger);
+ return McpResponseBuilder.BuildErrorResult(toolName, dabException.StatusCode.ToString(), dabException.Message, logger);
}
catch (Exception ex)
{
From 539047153949d14f124098b1f102cba62dec9994 Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:41:35 -0700
Subject: [PATCH 22/43] Remove hallucinated first > 100000 validation
DAB config already has MaxResponseSize property that handles this
downstream through structure.Limit(). The engine applies the configured
limit automatically, making this artificial cap redundant.
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 5 +----
.../Mcp/AggregateRecordsToolTests.cs | 15 ---------------
2 files changed, 1 insertion(+), 19 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index debce4f090..11d629378e 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -216,10 +216,7 @@ public async Task ExecuteAsync(
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Argument 'first' must be at least 1.", logger);
}
- if (first > 100_000)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Argument 'first' must not exceed 100000.", logger);
- }
+
}
string? after = root.TryGetProperty("after", out JsonElement afterElement) ? afterElement.GetString() : null;
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 41e028e28a..9898b16c68 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -190,21 +190,6 @@ public async Task AggregateRecords_InvalidFunction_ReturnsInvalidArguments()
Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("median"));
}
- [TestMethod]
- public async Task AggregateRecords_FirstExceedsMax_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"first\": 200000, \"groupby\": [\"title\"]}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("100000"));
- }
-
[TestMethod]
public async Task AggregateRecords_StarFieldWithAvg_ReturnsInvalidArguments()
{
From b55cdde6efee4ee30df648b7a34edfc417ab1d73 Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:43:04 -0700
Subject: [PATCH 23/43] Clean up extra blank line from validation removal
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.cs | 2 --
1 file changed, 2 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 11d629378e..efccffdb3a 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -215,8 +215,6 @@ public async Task ExecuteAsync(
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Argument 'first' must be at least 1.", logger);
}
-
-
}
string? after = root.TryGetProperty("after", out JsonElement afterElement) ? afterElement.GetString() : null;
From 7f4e2596d568e858cca93524ab5b46a1b12d55ee Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:43:54 -0700
Subject: [PATCH 24/43] Add AggregateRecordsTool documentation for SQL-level
aggregations
---
.../BuiltInTools/AggregateRecordsTool.md | 104 ++++++++++++++++++
1 file changed, 104 insertions(+)
create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
new file mode 100644
index 0000000000..002dad9a89
--- /dev/null
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
@@ -0,0 +1,104 @@
+# AggregateRecordsTool
+
+MCP tool that computes SQL-level aggregations (COUNT, AVG, SUM, MIN, MAX) on DAB entities. All aggregation is pushed to the database engine — no in-memory computation.
+
+## Class Structure
+
+| Member | Kind | Purpose |
+|---|---|---|
+| `ToolType` | Property | Returns `ToolType.BuiltIn` for reflection-based discovery. |
+| `_validFunctions` | Static field | Allowlist of aggregation functions: count, avg, sum, min, max. |
+| `GetToolMetadata()` | Method | Returns the MCP `Tool` descriptor (name, description, JSON input schema). |
+| `ExecuteAsync()` | Method | Main entry point — validates input, resolves metadata, authorizes, builds the SQL query via the engine's `IQueryBuilder.Build(SqlQueryStructure)`, executes it, and formats the response. |
+| `ComputeAlias()` | Static method | Produces the result column alias: `"count"` for count(\*), otherwise `"{function}_{field}"`. |
+| `DecodeCursorOffset()` | Static method | Decodes a base64 opaque cursor string to an integer offset for OFFSET/FETCH pagination. Returns 0 on any invalid input. |
+| `BuildPaginatedResponse()` | Private method | Formats a grouped result set into `{ items, endCursor, hasNextPage }` when `first` is provided. |
+| `BuildSimpleResponse()` | Private method | Formats a scalar or grouped result set without pagination. |
+
+## ExecuteAsync Sequence
+
+```mermaid
+sequenceDiagram
+ participant Model as LLM / MCP Client
+ participant Tool as AggregateRecordsTool
+ participant Config as RuntimeConfigProvider
+ participant Meta as ISqlMetadataProvider
+ participant Auth as IAuthorizationService
+ participant QB as IQueryBuilder (engine)
+ participant QE as IQueryExecutor
+ participant DB as Database
+
+ Model->>Tool: ExecuteAsync(arguments, serviceProvider, cancellationToken)
+
+ Note over Tool: 1. Input validation
+ Tool->>Config: GetConfig()
+ Config-->>Tool: RuntimeConfig
+ Tool->>Tool: Validate tool enabled (runtime + entity level)
+ Tool->>Tool: Parse & validate arguments (entity, function, field, distinct, filter, groupby, having, first, after)
+
+ Note over Tool: 2. Metadata resolution
+ Tool->>Meta: TryResolveMetadata(entityName)
+ Meta-->>Tool: sqlMetadataProvider, dbObject, dataSourceName
+
+ Note over Tool: 3. Early field validation
+ Tool->>Meta: TryGetBackingColumn(entityName, field)
+ Meta-->>Tool: backingColumn (or FieldNotFound error)
+ loop Each groupby field
+ Tool->>Meta: TryGetBackingColumn(entityName, groupbyField)
+ Meta-->>Tool: backingColumn (or FieldNotFound error)
+ end
+
+ Note over Tool: 4. Authorization
+ Tool->>Auth: AuthorizeAsync(user, FindRequestContext, ColumnsPermissionsRequirement)
+ Auth-->>Tool: AuthorizationResult
+
+ Note over Tool: 5. Build SqlQueryStructure
+ Tool->>Tool: Create SqlQueryStructure from FindRequestContext
+ Tool->>Tool: Populate GroupByMetadata (fields, AggregationColumn, HAVING predicates)
+ Tool->>Tool: Clear default columns/OrderBy, set aggregation flag
+
+ Note over Tool: 6. Generate SQL via engine
+ Tool->>QB: Build(SqlQueryStructure)
+ QB-->>Tool: SQL string (SELECT ... GROUP BY ... HAVING ... FOR JSON PATH)
+
+ Note over Tool: 7. Post-process SQL
+ Tool->>Tool: Insert ORDER BY aggregate expression before FOR JSON PATH
+ opt Pagination (first provided)
+ Tool->>Tool: Remove TOP N (conflicts with OFFSET/FETCH)
+ Tool->>Tool: Append OFFSET/FETCH NEXT
+ end
+
+ Note over Tool: 8. Execute query
+ Tool->>QE: ExecuteQueryAsync(sql, parameters, GetJsonResultAsync, dataSourceName)
+ QE->>DB: Execute SQL
+ DB-->>QE: JSON result
+ QE-->>Tool: JsonDocument
+
+ Note over Tool: 9. Format response
+ alt first provided (paginated)
+ Tool->>Tool: BuildPaginatedResponse(resultArray, first, after)
+ Tool-->>Model: { items, endCursor, hasNextPage }
+ else simple
+ Tool->>Tool: BuildSimpleResponse(resultArray, alias)
+ Tool-->>Model: { entity, result: [{alias: value}] }
+ end
+
+ Note over Tool: Exception handling
+ alt TimeoutException
+ Tool-->>Model: TimeoutError — "query timed out, narrow filters or paginate"
+ else TaskCanceledException
+ Tool-->>Model: TimeoutError — "canceled, likely timeout"
+ else OperationCanceledException
+ Tool-->>Model: OperationCanceled — "interrupted, retry"
+ else DbException
+ Tool-->>Model: DatabaseOperationFailed
+ end
+```
+
+## Key Design Decisions
+
+- **No in-memory aggregation.** The engine's `GroupByMetadata` / `AggregationColumn` types drive SQL generation via `queryBuilder.Build(structure)`.
+- **COUNT(\*) workaround.** The engine's `Build(AggregationColumn)` doesn't support `*` as a column name, so the primary key column is used instead (`COUNT(pk)` ≡ `COUNT(*)` since PK is NOT NULL).
+- **ORDER BY aggregate.** Neither the GraphQL nor REST paths support ORDER BY on an aggregate expression, so the tool post-processes the generated SQL to insert it before `FOR JSON PATH`.
+- **TOP vs OFFSET/FETCH.** SQL Server forbids both in the same query. When pagination is used, `TOP N` is stripped via regex.
+- **Database support.** Only MsSql / DWSQL — matches the engine's GraphQL aggregation support. PostgreSQL, MySQL, and CosmosDB return an `UnsupportedDatabase` error.
From d83ded22f36afaa2eeb6f5d5b46083f3558af7ce Mon Sep 17 00:00:00 2001
From: Jerry Nixon
Date: Mon, 2 Mar 2026 18:55:23 -0700
Subject: [PATCH 25/43] Simplify sequence diagram and expand design decisions
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
---
.../BuiltInTools/AggregateRecordsTool.md | 93 ++++---------------
1 file changed, 20 insertions(+), 73 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
index 002dad9a89..718aa360e1 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
@@ -19,86 +19,33 @@ MCP tool that computes SQL-level aggregations (COUNT, AVG, SUM, MIN, MAX) on DAB
```mermaid
sequenceDiagram
- participant Model as LLM / MCP Client
+ participant Client as MCP Client
participant Tool as AggregateRecordsTool
- participant Config as RuntimeConfigProvider
- participant Meta as ISqlMetadataProvider
- participant Auth as IAuthorizationService
- participant QB as IQueryBuilder (engine)
- participant QE as IQueryExecutor
+ participant Engine as DAB Engine
participant DB as Database
- Model->>Tool: ExecuteAsync(arguments, serviceProvider, cancellationToken)
-
- Note over Tool: 1. Input validation
- Tool->>Config: GetConfig()
- Config-->>Tool: RuntimeConfig
- Tool->>Tool: Validate tool enabled (runtime + entity level)
- Tool->>Tool: Parse & validate arguments (entity, function, field, distinct, filter, groupby, having, first, after)
-
- Note over Tool: 2. Metadata resolution
- Tool->>Meta: TryResolveMetadata(entityName)
- Meta-->>Tool: sqlMetadataProvider, dbObject, dataSourceName
-
- Note over Tool: 3. Early field validation
- Tool->>Meta: TryGetBackingColumn(entityName, field)
- Meta-->>Tool: backingColumn (or FieldNotFound error)
- loop Each groupby field
- Tool->>Meta: TryGetBackingColumn(entityName, groupbyField)
- Meta-->>Tool: backingColumn (or FieldNotFound error)
- end
-
- Note over Tool: 4. Authorization
- Tool->>Auth: AuthorizeAsync(user, FindRequestContext, ColumnsPermissionsRequirement)
- Auth-->>Tool: AuthorizationResult
-
- Note over Tool: 5. Build SqlQueryStructure
- Tool->>Tool: Create SqlQueryStructure from FindRequestContext
- Tool->>Tool: Populate GroupByMetadata (fields, AggregationColumn, HAVING predicates)
- Tool->>Tool: Clear default columns/OrderBy, set aggregation flag
-
- Note over Tool: 6. Generate SQL via engine
- Tool->>QB: Build(SqlQueryStructure)
- QB-->>Tool: SQL string (SELECT ... GROUP BY ... HAVING ... FOR JSON PATH)
-
- Note over Tool: 7. Post-process SQL
- Tool->>Tool: Insert ORDER BY aggregate expression before FOR JSON PATH
- opt Pagination (first provided)
- Tool->>Tool: Remove TOP N (conflicts with OFFSET/FETCH)
- Tool->>Tool: Append OFFSET/FETCH NEXT
- end
-
- Note over Tool: 8. Execute query
- Tool->>QE: ExecuteQueryAsync(sql, parameters, GetJsonResultAsync, dataSourceName)
- QE->>DB: Execute SQL
- DB-->>QE: JSON result
- QE-->>Tool: JsonDocument
-
- Note over Tool: 9. Format response
- alt first provided (paginated)
- Tool->>Tool: BuildPaginatedResponse(resultArray, first, after)
- Tool-->>Model: { items, endCursor, hasNextPage }
- else simple
- Tool->>Tool: BuildSimpleResponse(resultArray, alias)
- Tool-->>Model: { entity, result: [{alias: value}] }
+ Client->>Tool: ExecuteAsync(arguments)
+ Tool->>Tool: Validate inputs & check tool enabled
+ Tool->>Engine: Resolve entity metadata & validate fields
+ Tool->>Engine: Authorize (column-level permissions)
+ Tool->>Engine: Build SQL via queryBuilder.Build(SqlQueryStructure)
+ Tool->>Tool: Post-process SQL (ORDER BY, pagination)
+ Tool->>DB: ExecuteQueryAsync → JSON result
+ alt Paginated (first provided)
+ Tool-->>Client: { items, endCursor, hasNextPage }
+ else Simple
+ Tool-->>Client: { entity, result: [{alias: value}] }
end
- Note over Tool: Exception handling
- alt TimeoutException
- Tool-->>Model: TimeoutError — "query timed out, narrow filters or paginate"
- else TaskCanceledException
- Tool-->>Model: TimeoutError — "canceled, likely timeout"
- else OperationCanceledException
- Tool-->>Model: OperationCanceled — "interrupted, retry"
- else DbException
- Tool-->>Model: DatabaseOperationFailed
- end
+ Note over Tool,Client: On error: TimeoutError, OperationCanceled, or DatabaseOperationFailed
```
## Key Design Decisions
-- **No in-memory aggregation.** The engine's `GroupByMetadata` / `AggregationColumn` types drive SQL generation via `queryBuilder.Build(structure)`.
-- **COUNT(\*) workaround.** The engine's `Build(AggregationColumn)` doesn't support `*` as a column name, so the primary key column is used instead (`COUNT(pk)` ≡ `COUNT(*)` since PK is NOT NULL).
-- **ORDER BY aggregate.** Neither the GraphQL nor REST paths support ORDER BY on an aggregate expression, so the tool post-processes the generated SQL to insert it before `FOR JSON PATH`.
-- **TOP vs OFFSET/FETCH.** SQL Server forbids both in the same query. When pagination is used, `TOP N` is stripped via regex.
+- **No in-memory aggregation.** The engine's `GroupByMetadata` / `AggregationColumn` types drive SQL generation via `queryBuilder.Build(structure)`. All aggregation is performed by the database.
+- **COUNT(\*) workaround.** The engine's `Build(AggregationColumn)` doesn't support `*` as a column name (it produces invalid SQL like `count([].[*])`), so the primary key column is used instead. `COUNT(pk)` ≡ `COUNT(*)` since PK is NOT NULL.
+- **ORDER BY post-processing.** Neither the GraphQL nor REST code paths support ORDER BY on an aggregate expression, so this tool inserts `ORDER BY {func}({col}) ASC|DESC` into the generated SQL before `FOR JSON PATH`.
+- **TOP vs OFFSET/FETCH.** SQL Server forbids both in the same query. When pagination (`first`) is used, `TOP N` is stripped via regex before appending `OFFSET/FETCH NEXT`.
+- **Early field validation.** All user-supplied field names (aggregation field, groupby fields) are validated against the entity's metadata before authorization or query building, so typos surface immediately with actionable guidance.
+- **Timeout vs cancellation.** `TimeoutException` (from `query-timeout` config) and `OperationCanceledException` (from client disconnect) are handled separately with distinct model-facing messages. Timeouts guide the model to narrow filters or paginate; cancellations suggest retry.
- **Database support.** Only MsSql / DWSQL — matches the engine's GraphQL aggregation support. PostgreSQL, MySQL, and CosmosDB return an `UnsupportedDatabase` error.
From 6815b656a54b4b074704462ecbfdc8d28cbebb5b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 3 Mar 2026 10:13:21 +0000
Subject: [PATCH 26/43] Changes before error encountered
Co-authored-by: souvikghosh04 <210500244+souvikghosh04@users.noreply.github.com>
---
schemas/dab.draft.schema.json | 5 +-
.../BuiltInTools/AggregateRecordsTool.cs | 182 ++++++++++--------
.../Utils/McpTelemetryHelper.cs | 6 +
src/Config/ObjectModel/McpRuntimeOptions.cs | 1 +
.../Configurations/RuntimeConfigValidator.cs | 5 +-
5 files changed, 116 insertions(+), 83 deletions(-)
diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json
index e78861807d..8f283fba36 100644
--- a/schemas/dab.draft.schema.json
+++ b/schemas/dab.draft.schema.json
@@ -277,9 +277,10 @@
},
"query-timeout": {
"type": "integer",
- "description": "Execution timeout in seconds for MCP tool operations. Applies to all MCP tools.",
+ "description": "Execution timeout in seconds for MCP tool operations. Applies to all MCP tools. Range: 1-600.",
"default": 30,
- "minimum": 1
+ "minimum": 1,
+ "maximum": 600
},
"dml-tools": {
"oneOf": [
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index efccffdb3a..806671fdaa 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -40,89 +40,91 @@ public class AggregateRecordsTool : IMcpTool
private static readonly HashSet _validFunctions = new(StringComparer.OrdinalIgnoreCase) { "count", "avg", "sum", "min", "max" };
- public Tool GetToolMetadata()
+ private static readonly Tool _cachedToolMetadata = new()
{
- return new Tool
- {
- Name = "aggregate_records",
- Description = "Computes aggregations (count, avg, sum, min, max) on entity data. "
- + "WORKFLOW: 1) Call describe_entities first to get entity names and field names. "
- + "2) Call this tool with entity, function, and field from step 1. "
- + "RULES: field '*' is ONLY valid with count. "
- + "orderby, having, first, and after ONLY apply when groupby is provided. "
- + "RESPONSE: Result is aliased as '{function}_{field}' (e.g. avg_unitPrice). "
- + "For count(*), the alias is 'count'. "
- + "With groupby and first, response includes items, endCursor, and hasNextPage for pagination.",
- InputSchema = JsonSerializer.Deserialize(
- @"{
- ""type"": ""object"",
- ""properties"": {
- ""entity"": {
- ""type"": ""string"",
- ""description"": ""Entity name from describe_entities with READ permission (case-sensitive).""
- },
- ""function"": {
- ""type"": ""string"",
- ""enum"": [""count"", ""avg"", ""sum"", ""min"", ""max""],
- ""description"": ""Aggregation function. count supports field '*'; avg, sum, min, max require a numeric field.""
- },
- ""field"": {
- ""type"": ""string"",
- ""description"": ""Field name to aggregate, or '*' with count to count all rows.""
- },
- ""distinct"": {
- ""type"": ""boolean"",
- ""description"": ""Remove duplicate values before aggregating. Not valid with field '*'."",
- ""default"": false
- },
- ""filter"": {
- ""type"": ""string"",
- ""description"": ""OData WHERE clause applied before aggregating. Operators: eq, ne, gt, ge, lt, le, and, or, not. Example: 'unitPrice lt 10'."",
- ""default"": """"
- },
- ""groupby"": {
- ""type"": ""array"",
- ""items"": { ""type"": ""string"" },
- ""description"": ""Field names to group by. Each unique combination produces one aggregated row. Enables orderby, having, first, and after."",
- ""default"": []
- },
- ""orderby"": {
- ""type"": ""string"",
- ""enum"": [""asc"", ""desc""],
- ""description"": ""Sort grouped results by the aggregated value. Requires groupby."",
- ""default"": ""desc""
- },
- ""having"": {
- ""type"": ""object"",
- ""description"": ""Filter groups by the aggregated value (HAVING clause). Requires groupby. Multiple operators are AND-ed."",
- ""properties"": {
- ""eq"": { ""type"": ""number"", ""description"": ""Equals."" },
- ""neq"": { ""type"": ""number"", ""description"": ""Not equals."" },
- ""gt"": { ""type"": ""number"", ""description"": ""Greater than."" },
- ""gte"": { ""type"": ""number"", ""description"": ""Greater than or equal."" },
- ""lt"": { ""type"": ""number"", ""description"": ""Less than."" },
- ""lte"": { ""type"": ""number"", ""description"": ""Less than or equal."" },
- ""in"": {
- ""type"": ""array"",
- ""items"": { ""type"": ""number"" },
- ""description"": ""Matches any value in the list.""
- }
+ Name = "aggregate_records",
+ Description = "Computes aggregations (count, avg, sum, min, max) on entity data. "
+ + "WORKFLOW: 1) Call describe_entities first to get entity names and field names. "
+ + "2) Call this tool with entity, function, and field from step 1. "
+ + "RULES: field '*' is ONLY valid with count. "
+ + "orderby, having, first, and after ONLY apply when groupby is provided. "
+ + "RESPONSE: Result is aliased as '{function}_{field}' (e.g. avg_unitPrice). "
+ + "For count(*), the alias is 'count'. "
+ + "With groupby and first, response includes items, endCursor, and hasNextPage for pagination.",
+ InputSchema = JsonSerializer.Deserialize(
+ @"{
+ ""type"": ""object"",
+ ""properties"": {
+ ""entity"": {
+ ""type"": ""string"",
+ ""description"": ""Entity name from describe_entities with READ permission (case-sensitive).""
+ },
+ ""function"": {
+ ""type"": ""string"",
+ ""enum"": [""count"", ""avg"", ""sum"", ""min"", ""max""],
+ ""description"": ""Aggregation function. count supports field '*'; avg, sum, min, max require a numeric field.""
+ },
+ ""field"": {
+ ""type"": ""string"",
+ ""description"": ""Field name to aggregate, or '*' with count to count all rows.""
+ },
+ ""distinct"": {
+ ""type"": ""boolean"",
+ ""description"": ""Remove duplicate values before aggregating. Not valid with field '*'."",
+ ""default"": false
+ },
+ ""filter"": {
+ ""type"": ""string"",
+ ""description"": ""OData WHERE clause applied before aggregating. Operators: eq, ne, gt, ge, lt, le, and, or, not. Example: 'unitPrice lt 10'."",
+ ""default"": """"
+ },
+ ""groupby"": {
+ ""type"": ""array"",
+ ""items"": { ""type"": ""string"" },
+ ""description"": ""Field names to group by. Each unique combination produces one aggregated row. Enables orderby, having, first, and after."",
+ ""default"": []
+ },
+ ""orderby"": {
+ ""type"": ""string"",
+ ""enum"": [""asc"", ""desc""],
+ ""description"": ""Sort grouped results by the aggregated value. Requires groupby."",
+ ""default"": ""desc""
+ },
+ ""having"": {
+ ""type"": ""object"",
+ ""description"": ""Filter groups by the aggregated value (HAVING clause). Requires groupby. Multiple operators are AND-ed."",
+ ""properties"": {
+ ""eq"": { ""type"": ""number"", ""description"": ""Equals."" },
+ ""neq"": { ""type"": ""number"", ""description"": ""Not equals."" },
+ ""gt"": { ""type"": ""number"", ""description"": ""Greater than."" },
+ ""gte"": { ""type"": ""number"", ""description"": ""Greater than or equal."" },
+ ""lt"": { ""type"": ""number"", ""description"": ""Less than."" },
+ ""lte"": { ""type"": ""number"", ""description"": ""Less than or equal."" },
+ ""in"": {
+ ""type"": ""array"",
+ ""items"": { ""type"": ""number"" },
+ ""description"": ""Matches any value in the list.""
}
- },
- ""first"": {
- ""type"": ""integer"",
- ""description"": ""Max grouped results to return. Requires groupby. Enables paginated response with endCursor and hasNextPage."",
- ""minimum"": 1
- },
- ""after"": {
- ""type"": ""string"",
- ""description"": ""Opaque cursor from a previous endCursor for next-page retrieval. Requires groupby and first. Do not construct manually.""
}
},
- ""required"": [""entity"", ""function"", ""field""]
- }"
- )
- };
+ ""first"": {
+ ""type"": ""integer"",
+ ""description"": ""Max grouped results to return. Requires groupby. Enables paginated response with endCursor and hasNextPage."",
+ ""minimum"": 1
+ },
+ ""after"": {
+ ""type"": ""string"",
+ ""description"": ""Opaque cursor from a previous endCursor for next-page retrieval. Requires groupby and first. Do not construct manually.""
+ }
+ },
+ ""required"": [""entity"", ""function"", ""field""]
+ }"
+ )
+ };
+
+ public Tool GetToolMetadata()
+ {
+ return _cachedToolMetadata;
}
public async Task ExecuteAsync(
@@ -232,6 +234,28 @@ public async Task ExecuteAsync(
}
}
+ // Validate that first, after, and non-default orderby require groupby
+ if (groupby.Count == 0)
+ {
+ if (first.HasValue)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'first' parameter requires 'groupby' to be specified. Pagination applies to grouped aggregation results.", logger);
+ }
+
+ if (!string.IsNullOrEmpty(after))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'after' parameter requires 'groupby' to be specified. Pagination applies to grouped aggregation results.", logger);
+ }
+ }
+
+ if (!string.IsNullOrEmpty(after) && !first.HasValue)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'after' parameter requires 'first' to be specified. Provide 'first' to enable pagination.", logger);
+ }
+
Dictionary? havingOperators = null;
List? havingInValues = null;
if (root.TryGetProperty("having", out JsonElement havingElement) && havingElement.ValueKind == JsonValueKind.Object)
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
index ac567d4d8c..31a92ef6e4 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
@@ -69,6 +69,12 @@ public static async Task ExecuteWithTelemetryAsync(
timeoutSeconds = config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds ?? McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS;
}
+ // Defensive runtime guard: clamp timeout to valid range [1, MAX_QUERY_TIMEOUT_SECONDS].
+ if (timeoutSeconds < 1 || timeoutSeconds > McpRuntimeOptions.MAX_QUERY_TIMEOUT_SECONDS)
+ {
+ timeoutSeconds = McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS;
+ }
+
// Wrap tool execution with the configured timeout using a linked CancellationTokenSource.
using CancellationTokenSource timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
timeoutCts.CancelAfter(TimeSpan.FromSeconds(timeoutSeconds));
diff --git a/src/Config/ObjectModel/McpRuntimeOptions.cs b/src/Config/ObjectModel/McpRuntimeOptions.cs
index f4b4281a14..5b48b2fcc3 100644
--- a/src/Config/ObjectModel/McpRuntimeOptions.cs
+++ b/src/Config/ObjectModel/McpRuntimeOptions.cs
@@ -11,6 +11,7 @@ public record McpRuntimeOptions
{
public const string DEFAULT_PATH = "/mcp";
public const int DEFAULT_QUERY_TIMEOUT_SECONDS = 30;
+ public const int MAX_QUERY_TIMEOUT_SECONDS = 600;
///
/// Whether MCP endpoints are enabled
diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs
index ea2299bc6f..90550fbaba 100644
--- a/src/Core/Configurations/RuntimeConfigValidator.cs
+++ b/src/Core/Configurations/RuntimeConfigValidator.cs
@@ -916,10 +916,11 @@ public void ValidateMcpUri(RuntimeConfig runtimeConfig)
}
// Validate query-timeout if provided
- if (runtimeConfig.Runtime.Mcp.QueryTimeout is not null && runtimeConfig.Runtime.Mcp.QueryTimeout < 1)
+ if (runtimeConfig.Runtime.Mcp.QueryTimeout is not null &&
+ (runtimeConfig.Runtime.Mcp.QueryTimeout < 1 || runtimeConfig.Runtime.Mcp.QueryTimeout > McpRuntimeOptions.MAX_QUERY_TIMEOUT_SECONDS))
{
HandleOrRecordException(new DataApiBuilderException(
- message: "MCP query-timeout must be a positive integer (>= 1 second). " +
+ message: $"MCP query-timeout must be between 1 and {McpRuntimeOptions.MAX_QUERY_TIMEOUT_SECONDS} seconds. " +
$"Provided value: {runtimeConfig.Runtime.Mcp.QueryTimeout}.",
statusCode: HttpStatusCode.ServiceUnavailable,
subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError));
From c7010ffd6b787e5cf03a7f25a5d919c20a595ca8 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Tue, 3 Mar 2026 17:13:44 +0530
Subject: [PATCH 27/43] Removing duplicate registration from stdio which is
failing runs
---
src/Service/Utilities/McpStdioHelper.cs | 11 ++---------
1 file changed, 2 insertions(+), 9 deletions(-)
diff --git a/src/Service/Utilities/McpStdioHelper.cs b/src/Service/Utilities/McpStdioHelper.cs
index 043e9dd85d..f22e12b02f 100644
--- a/src/Service/Utilities/McpStdioHelper.cs
+++ b/src/Service/Utilities/McpStdioHelper.cs
@@ -78,15 +78,8 @@ public static bool RunMcpStdioHost(IHost host)
{
host.Start();
- Mcp.Core.McpToolRegistry registry =
- host.Services.GetRequiredService();
- IEnumerable tools =
- host.Services.GetServices();
-
- foreach (Mcp.Model.IMcpTool tool in tools)
- {
- registry.RegisterTool(tool);
- }
+ // Tools are already registered by McpToolRegistryInitializer (IHostedService)
+ // during host.Start(). No need to register them again here.
IHostApplicationLifetime lifetime =
host.Services.GetRequiredService();
From 5038cc71e86f9ce24b5c3041d82c4b619e86f127 Mon Sep 17 00:00:00 2001
From: Souvik Ghosh
Date: Tue, 3 Mar 2026 17:01:08 +0000
Subject: [PATCH 28/43] update snapshot test files
---
...nTests.TestReadingRuntimeConfigForCosmos.verified.txt | 9 +++++++--
...onTests.TestReadingRuntimeConfigForMsSql.verified.txt | 9 +++++++--
...onTests.TestReadingRuntimeConfigForMySql.verified.txt | 9 +++++++--
...ts.TestReadingRuntimeConfigForPostgreSql.verified.txt | 9 +++++++--
4 files changed, 28 insertions(+), 8 deletions(-)
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
index 9279da9d59..0b2fd67066 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
@@ -28,14 +28,19 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedPath: false,
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
index 35fd562c87..4ee73b2b4a 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
@@ -32,14 +32,19 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedPath: false,
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
index 1490309ece..5522043d3f 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
@@ -24,14 +24,19 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedPath: false,
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
index ceba40ae63..b52c59df32 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
@@ -24,14 +24,19 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedPath: false,
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
From 88968f6c3ba794d211dfbc4eebb8b7d376da99b9 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sat, 28 Feb 2026 05:55:59 +0000
Subject: [PATCH 29/43] Initial plan
From b87be6f79cf93d4b79d46899bcf15579b2bd2e7e Mon Sep 17 00:00:00 2001
From: Souvik Ghosh
Date: Thu, 5 Mar 2026 06:13:54 +0000
Subject: [PATCH 30/43] Fixes from code reviews
---
.../BuiltInTools/AggregateRecordsTool.cs | 42 +++++++++-
.../Mcp/AggregateRecordsToolTests.cs | 82 ++++++++++++++++++-
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 2 -
3 files changed, 116 insertions(+), 10 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 806671fdaa..2d7e8e19d8 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -39,6 +39,7 @@ public class AggregateRecordsTool : IMcpTool
public ToolType ToolType { get; } = ToolType.BuiltIn;
private static readonly HashSet _validFunctions = new(StringComparer.OrdinalIgnoreCase) { "count", "avg", "sum", "min", "max" };
+ private static readonly HashSet _validHavingOperators = new(StringComparer.OrdinalIgnoreCase) { "eq", "neq", "gt", "gte", "lt", "lte", "in" };
private static readonly Tool _cachedToolMetadata = new()
{
@@ -207,7 +208,8 @@ public async Task ExecuteAsync(
}
string? filter = root.TryGetProperty("filter", out JsonElement filterElement) ? filterElement.GetString() : null;
- string orderby = root.TryGetProperty("orderby", out JsonElement orderbyElement) ? (orderbyElement.GetString() ?? "desc") : "desc";
+ bool userProvidedOrderby = root.TryGetProperty("orderby", out JsonElement orderbyElement) && !string.IsNullOrWhiteSpace(orderbyElement.GetString());
+ string orderby = userProvidedOrderby ? (orderbyElement.GetString() ?? "desc") : "desc";
int? first = null;
if (root.TryGetProperty("first", out JsonElement firstElement) && firstElement.ValueKind == JsonValueKind.Number)
@@ -234,9 +236,15 @@ public async Task ExecuteAsync(
}
}
- // Validate that first, after, and non-default orderby require groupby
+ // Validate that first, after, orderby, and having require groupby
if (groupby.Count == 0)
{
+ if (userProvidedOrderby)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'orderby' parameter requires 'groupby' to be specified. Sorting applies to grouped aggregation results.", logger);
+ }
+
if (first.HasValue)
{
return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
@@ -269,16 +277,42 @@ public async Task ExecuteAsync(
havingOperators = new Dictionary(StringComparer.OrdinalIgnoreCase);
foreach (JsonProperty prop in havingElement.EnumerateObject())
{
- if (prop.Name.Equals("in", StringComparison.OrdinalIgnoreCase) && prop.Value.ValueKind == JsonValueKind.Array)
+ // Reject unsupported operators (e.g. between, notIn, like)
+ if (!_validHavingOperators.Contains(prop.Name))
{
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"Unsupported having operator '{prop.Name}'. Supported operators: {string.Join(", ", _validHavingOperators)}.", logger);
+ }
+
+ if (prop.Name.Equals("in", StringComparison.OrdinalIgnoreCase))
+ {
+ if (prop.Value.ValueKind != JsonValueKind.Array)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'having.in' value must be a numeric array. Example: {\"in\": [5, 10]}.", logger);
+ }
+
havingInValues = new List();
foreach (JsonElement item in prop.Value.EnumerateArray())
{
+ if (item.ValueKind != JsonValueKind.Number)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"All values in 'having.in' must be numeric. Found non-numeric value: '{item}'.", logger);
+ }
+
havingInValues.Add(item.GetDouble());
}
}
- else if (prop.Value.ValueKind == JsonValueKind.Number)
+ else
{
+ // Scalar operators (eq, neq, gt, gte, lt, lte) must have numeric values
+ if (prop.Value.ValueKind != JsonValueKind.Number)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"The 'having.{prop.Name}' value must be numeric. Got: '{prop.Value}'. HAVING filters compare aggregated numeric results.", logger);
+ }
+
havingOperators[prop.Name] = prop.Value.GetDouble();
}
}
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 9898b16c68..bab8d68f2c 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -1,8 +1,6 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
-#nullable enable
-
using System;
using System.Collections.Generic;
using System.Text;
@@ -235,6 +233,82 @@ public async Task AggregateRecords_HavingWithoutGroupBy_ReturnsInvalidArguments(
Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("groupby"));
}
+ [TestMethod]
+ public async Task AggregateRecords_OrderByWithoutGroupBy_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"orderby\": \"desc\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("groupby"));
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_UnsupportedHavingOperator_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"between\": 5}}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("between"));
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("Supported operators"));
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_NonNumericHavingValue_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"eq\": \"ten\"}}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("numeric"));
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_NonNumericHavingInArray_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"in\": [5, \"abc\"]}}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("numeric"));
+ }
+
+ [TestMethod]
+ public async Task AggregateRecords_HavingInNotArray_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"in\": 5}}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("numeric array"));
+ }
+
#endregion
#region Alias Convention Tests
@@ -364,8 +438,8 @@ public async Task AggregateRecords_OperationCanceled_ReturnsExplicitCanceledMess
Assert.IsTrue(result.IsError == true);
JsonElement content = ParseContent(result);
Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
- string? errorType = error.GetProperty("type").GetString();
- string? errorMessage = error.GetProperty("message").GetString();
+ string errorType = error.GetProperty("type").GetString();
+ string errorMessage = error.GetProperty("message").GetString();
// Verify the error type identifies it as a cancellation
Assert.IsNotNull(errorType);
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
index 0f5ee3951a..237e40e57e 100644
--- a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -1,8 +1,6 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
-#nullable enable
-
using System;
using System.Collections.Generic;
using System.Text.Json;
From 38c773d3964d2b4c0020aef304913c1095310592 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Thu, 5 Mar 2026 15:52:22 +0530
Subject: [PATCH 31/43] Snapshot files and test fixes
---
...stMethodsAndGraphQLOperations.verified.txt | 10 +-
...tyWithSourceAsStoredProcedure.verified.txt | 10 +-
...tityWithSourceWithDefaultType.verified.txt | 10 +-
...dingEntityWithoutIEnumerables.verified.txt | 10 +-
...ests.TestInitForCosmosDBNoSql.verified.txt | 10 +-
...toredProcedureWithRestMethods.verified.txt | 8 +-
...stMethodsAndGraphQLOperations.verified.txt | 10 +-
...itTests.CosmosDbNoSqlDatabase.verified.txt | 10 +-
...ts.CosmosDbPostgreSqlDatabase.verified.txt | 10 +-
...ionProviders_171ea8114ff71814.verified.txt | 10 +-
...ionProviders_2df7a1794712f154.verified.txt | 10 +-
...ionProviders_59fe1a10aa78899d.verified.txt | 10 +-
...ionProviders_b95b637ea87f16a7.verified.txt | 10 +-
...ionProviders_daacbd948b7ef72f.verified.txt | 10 +-
...tStartingSlashWillHaveItAdded.verified.txt | 10 +-
.../InitTests.MsSQLDatabase.verified.txt | 10 +-
...tStartingSlashWillHaveItAdded.verified.txt | 10 +-
...ConfigWithoutConnectionString.verified.txt | 10 +-
...lCharactersInConnectionString.verified.txt | 10 +-
...ationOptions_0546bef37027a950.verified.txt | 10 +-
...ationOptions_0ac567dd32a2e8f5.verified.txt | 10 +-
...ationOptions_0c06949221514e77.verified.txt | 10 +-
...ationOptions_18667ab7db033e9d.verified.txt | 10 +-
...ationOptions_2f42f44c328eb020.verified.txt | 10 +-
...ationOptions_3243d3f3441fdcc1.verified.txt | 10 +-
...ationOptions_53350b8b47df2112.verified.txt | 10 +-
...ationOptions_6584e0ec46b8a11d.verified.txt | 10 +-
...ationOptions_81cc88db3d4eecfb.verified.txt | 10 +-
...ationOptions_8ea187616dbb5577.verified.txt | 10 +-
...ationOptions_905845c29560a3ef.verified.txt | 10 +-
...ationOptions_b2fd24fab5b80917.verified.txt | 10 +-
...ationOptions_bd7cd088755287c9.verified.txt | 10 +-
...ationOptions_d2eccba2f836b380.verified.txt | 10 +-
...ationOptions_d463eed7fe5e4bbe.verified.txt | 10 +-
...ationOptions_d5520dd5c33f7b8d.verified.txt | 10 +-
...ationOptions_eab4a6010e602b59.verified.txt | 10 +-
...ationOptions_ecaa688829b4030e.verified.txt | 10 +-
src/Cli/Commands/ConfigureOptions.cs | 2 +
...ReadingRuntimeConfigForCosmos.verified.txt | 25 --
...tReadingRuntimeConfigForMySql.verified.txt | 268 +++++-------------
...ingRuntimeConfigForPostgreSql.verified.txt | 25 --
41 files changed, 332 insertions(+), 356 deletions(-)
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
index 3fa1fbc14e..0fd0030402 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt
index 76ea01dfca..725eed7a83 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt
index 3a8c738a70..70cb42137b 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt
index df2cd4b009..46bec31cc9 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt
index 1b14a3a7f0..0932956d7a 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: planet,
@@ -28,14 +28,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt
index 62d9e237b5..fdda324d36 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
index fa8b16e739..2a4e8653a1 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt
index 9d5458c0ee..4870537837 100644
--- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -28,14 +28,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt
index 51f6ad8d95..e03973b91e 100644
--- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt
index 978d1a253b..d33247dcab 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt
index 402bf4d2bc..fa08aefa62 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt
index ab71a40f03..98fdb25c77 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt
index 25e3976685..74afea9ef6 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt
index 140f017b78..3145f775c0 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt
index a3a056ac0a..ae32e3b379 100644
--- a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt
index f40350c4da..0f2c151763 100644
--- a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt
index b792d41c9f..d9067e1b43 100644
--- a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt
index 173960d7b1..e48b87e1c8 100644
--- a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt
index 25e3976685..74afea9ef6 100644
--- a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt
index 63f0da701c..2cb50a06da 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: DWSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt
index f40350c4da..0f2c151763 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt
index e59070d692..bbea4aadd3 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -32,14 +32,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt
index f7de35b7ae..63f411cdb2 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: PostgreSQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt
index 63f0da701c..2cb50a06da 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: DWSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt
index f7de35b7ae..63f411cdb2 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: PostgreSQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt
index 75613db959..5af597f50a 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MySQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt
index d93aac7dc6..860fa1616c 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -28,14 +28,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt
index 640815babb..48f3d0ce51 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -32,14 +32,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt
index 5900015d5a..f56dcad7d7 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt
index 63f0da701c..2cb50a06da 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: DWSQL,
Options: {
@@ -27,14 +27,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt
index d93aac7dc6..860fa1616c 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -28,14 +28,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt
index d93aac7dc6..860fa1616c 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -28,14 +28,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt
index 75613db959..5af597f50a 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MySQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt
index 5900015d5a..f56dcad7d7 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt
index 75613db959..5af597f50a 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MySQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt
index f7de35b7ae..63f411cdb2 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: PostgreSQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt
index 5900015d5a..f56dcad7d7 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -24,14 +24,18 @@
UpdateRecord: true,
DeleteRecord: true,
ExecuteEntity: true,
+ AggregateRecords: true,
UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false
- }
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
},
Host: {
Cors: {
diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs
index bf12cd5199..99d7efc637 100644
--- a/src/Cli/Commands/ConfigureOptions.cs
+++ b/src/Cli/Commands/ConfigureOptions.cs
@@ -50,6 +50,7 @@ public ConfigureOptions(
bool? runtimeMcpDmlToolsUpdateRecordEnabled = null,
bool? runtimeMcpDmlToolsDeleteRecordEnabled = null,
bool? runtimeMcpDmlToolsExecuteEntityEnabled = null,
+ bool? runtimeMcpDmlToolsAggregateRecordsEnabled = null,
bool? runtimeCacheEnabled = null,
int? runtimeCacheTtl = null,
CompressionLevel? runtimeCompressionLevel = null,
@@ -111,6 +112,7 @@ public ConfigureOptions(
RuntimeMcpDmlToolsUpdateRecordEnabled = runtimeMcpDmlToolsUpdateRecordEnabled;
RuntimeMcpDmlToolsDeleteRecordEnabled = runtimeMcpDmlToolsDeleteRecordEnabled;
RuntimeMcpDmlToolsExecuteEntityEnabled = runtimeMcpDmlToolsExecuteEntityEnabled;
+ RuntimeMcpDmlToolsAggregateRecordsEnabled = runtimeMcpDmlToolsAggregateRecordsEnabled;
// Cache
RuntimeCacheEnabled = runtimeCacheEnabled;
RuntimeCacheTTL = runtimeCacheTtl;
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
index 0b2fd67066..d820e1b124 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
@@ -17,31 +17,6 @@
Path: /graphql,
AllowIntrospection: true
},
- Mcp: {
- Enabled: true,
- Path: /mcp,
- DmlTools: {
- AllToolsEnabled: true,
- DescribeEntities: true,
- CreateRecord: true,
- ReadRecords: true,
- UpdateRecord: true,
- DeleteRecord: true,
- ExecuteEntity: true,
- AggregateRecords: true,
- UserProvidedAllTools: false,
- UserProvidedDescribeEntities: false,
- UserProvidedCreateRecord: false,
- UserProvidedReadRecords: false,
- UserProvidedUpdateRecord: false,
- UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedPath: false,
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
- },
Host: {
Cors: {
Origins: [
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
index 5522043d3f..5320176e4c 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
@@ -13,31 +13,6 @@
Path: /graphql,
AllowIntrospection: true
},
- Mcp: {
- Enabled: true,
- Path: /mcp,
- DmlTools: {
- AllToolsEnabled: true,
- DescribeEntities: true,
- CreateRecord: true,
- ReadRecords: true,
- UpdateRecord: true,
- DeleteRecord: true,
- ExecuteEntity: true,
- AggregateRecords: true,
- UserProvidedAllTools: false,
- UserProvidedDescribeEntities: false,
- UserProvidedCreateRecord: false,
- UserProvidedReadRecords: false,
- UserProvidedUpdateRecord: false,
- UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedPath: false,
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
- },
Host: {
Cors: {
Origins: [
@@ -395,18 +370,6 @@
Object: books,
Type: Table
},
- Fields: [
- {
- Name: id,
- Alias: id,
- PrimaryKey: false
- },
- {
- Name: title,
- Alias: title,
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: book,
Plural: books,
@@ -768,6 +731,10 @@
]
}
],
+ Mappings: {
+ id: id,
+ title: title
+ },
Relationships: {
authors: {
Cardinality: Many,
@@ -799,41 +766,6 @@
}
}
},
- {
- Default_Books: {
- Source: {
- Object: default_books,
- Type: Table
- },
- GraphQL: {
- Singular: default_book,
- Plural: default_books,
- Enabled: true
- },
- Rest: {
- Enabled: true
- },
- Permissions: [
- {
- Role: anonymous,
- Actions: [
- {
- Action: Create
- },
- {
- Action: Read
- },
- {
- Action: Update
- },
- {
- Action: Delete
- }
- ]
- }
- ]
- }
- },
{
BookNF: {
Source: {
@@ -1172,13 +1104,6 @@
Object: type_table,
Type: Table
},
- Fields: [
- {
- Name: id,
- Alias: typeid,
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: SupportedType,
Plural: SupportedTypes,
@@ -1222,7 +1147,10 @@
}
]
}
- ]
+ ],
+ Mappings: {
+ id: typeid
+ }
}
},
{
@@ -1274,18 +1202,6 @@
Object: trees,
Type: Table
},
- Fields: [
- {
- Name: species,
- Alias: Scientific Name,
- PrimaryKey: false
- },
- {
- Name: region,
- Alias: United State's Region,
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: Tree,
Plural: Trees,
@@ -1329,7 +1245,11 @@
}
]
}
- ]
+ ],
+ Mappings: {
+ region: United State's Region,
+ species: Scientific Name
+ }
}
},
{
@@ -1338,13 +1258,6 @@
Object: trees,
Type: Table
},
- Fields: [
- {
- Name: species,
- Alias: fancyName,
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: Shrub,
Plural: Shrubs,
@@ -1390,6 +1303,9 @@
]
}
],
+ Mappings: {
+ species: fancyName
+ },
Relationships: {
fungus: {
TargetEntity: Fungus,
@@ -1409,13 +1325,6 @@
Object: fungi,
Type: Table
},
- Fields: [
- {
- Name: spores,
- Alias: hazards,
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: fungus,
Plural: fungi,
@@ -1476,8 +1385,11 @@
]
}
],
+ Mappings: {
+ spores: hazards
+ },
Relationships: {
- Shrub: {
+ shrub: {
TargetEntity: Shrub,
SourceFields: [
habitat
@@ -1493,14 +1405,11 @@
books_view_all: {
Source: {
Object: books_view_all,
- Type: View
+ Type: View,
+ KeyFields: [
+ id
+ ]
},
- Fields: [
- {
- Name: id,
- PrimaryKey: true
- }
- ],
GraphQL: {
Singular: books_view_all,
Plural: books_view_alls,
@@ -1542,15 +1451,11 @@
books_view_with_mapping: {
Source: {
Object: books_view_with_mapping,
- Type: View
+ Type: View,
+ KeyFields: [
+ id
+ ]
},
- Fields: [
- {
- Name: id,
- Alias: book_id,
- PrimaryKey: true
- }
- ],
GraphQL: {
Singular: books_view_with_mapping,
Plural: books_view_with_mappings,
@@ -1568,25 +1473,22 @@
}
]
}
- ]
+ ],
+ Mappings: {
+ id: book_id
+ }
}
},
{
stocks_view_selected: {
Source: {
Object: stocks_view_selected,
- Type: View
+ Type: View,
+ KeyFields: [
+ categoryid,
+ pieceid
+ ]
},
- Fields: [
- {
- Name: categoryid,
- PrimaryKey: true
- },
- {
- Name: pieceid,
- PrimaryKey: true
- }
- ],
GraphQL: {
Singular: stocks_view_selected,
Plural: stocks_view_selecteds,
@@ -1628,18 +1530,12 @@
books_publishers_view_composite: {
Source: {
Object: books_publishers_view_composite,
- Type: View
+ Type: View,
+ KeyFields: [
+ id,
+ pub_id
+ ]
},
- Fields: [
- {
- Name: id,
- PrimaryKey: true
- },
- {
- Name: pub_id,
- PrimaryKey: true
- }
- ],
GraphQL: {
Singular: books_publishers_view_composite,
Plural: books_publishers_view_composites,
@@ -1893,28 +1789,6 @@
Object: aow,
Type: Table
},
- Fields: [
- {
- Name: DetailAssessmentAndPlanning,
- Alias: 始計,
- PrimaryKey: false
- },
- {
- Name: WagingWar,
- Alias: 作戰,
- PrimaryKey: false
- },
- {
- Name: StrategicAttack,
- Alias: 謀攻,
- PrimaryKey: false
- },
- {
- Name: NoteNum,
- Alias: ┬─┬ノ( º _ ºノ),
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: ArtOfWar,
Plural: ArtOfWars,
@@ -1940,7 +1814,13 @@
}
]
}
- ]
+ ],
+ Mappings: {
+ DetailAssessmentAndPlanning: 始計,
+ NoteNum: ┬─┬ノ( º _ ºノ),
+ StrategicAttack: 謀攻,
+ WagingWar: 作戰
+ }
}
},
{
@@ -2154,18 +2034,6 @@
Object: GQLmappings,
Type: Table
},
- Fields: [
- {
- Name: __column1,
- Alias: column1,
- PrimaryKey: false
- },
- {
- Name: __column2,
- Alias: column2,
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: GQLmappings,
Plural: GQLmappings,
@@ -2191,7 +2059,11 @@
}
]
}
- ]
+ ],
+ Mappings: {
+ __column1: column1,
+ __column2: column2
+ }
}
},
{
@@ -2234,18 +2106,6 @@
Object: mappedbookmarks,
Type: Table
},
- Fields: [
- {
- Name: id,
- Alias: bkid,
- PrimaryKey: false
- },
- {
- Name: bkname,
- Alias: name,
- PrimaryKey: false
- }
- ],
GraphQL: {
Singular: MappedBookmarks,
Plural: MappedBookmarks,
@@ -2271,7 +2131,11 @@
}
]
}
- ]
+ ],
+ Mappings: {
+ bkname: name,
+ id: bkid
+ }
}
},
{
@@ -2324,6 +2188,9 @@
Exclude: [
current_date,
next_date
+ ],
+ Include: [
+ *
]
}
},
@@ -2360,7 +2227,16 @@
Role: anonymous,
Actions: [
{
- Action: *
+ Action: Read
+ },
+ {
+ Action: Create
+ },
+ {
+ Action: Update
+ },
+ {
+ Action: Delete
}
]
}
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
index b52c59df32..5d8dc31646 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
@@ -13,31 +13,6 @@
Path: /graphql,
AllowIntrospection: true
},
- Mcp: {
- Enabled: true,
- Path: /mcp,
- DmlTools: {
- AllToolsEnabled: true,
- DescribeEntities: true,
- CreateRecord: true,
- ReadRecords: true,
- UpdateRecord: true,
- DeleteRecord: true,
- ExecuteEntity: true,
- AggregateRecords: true,
- UserProvidedAllTools: false,
- UserProvidedDescribeEntities: false,
- UserProvidedCreateRecord: false,
- UserProvidedReadRecords: false,
- UserProvidedUpdateRecord: false,
- UserProvidedDeleteRecord: false,
- UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedPath: false,
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
- },
Host: {
Cors: {
Origins: [
From 7b85658dac307966f79e3bc70f0b3d0aa5b5a79f Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Thu, 5 Mar 2026 18:21:10 +0530
Subject: [PATCH 32/43] Add AggregateRecords and query-timeout properties to
Service.Tests snapshots
---
...ReadingRuntimeConfigForCosmos.verified.txt | 24 ++
...tReadingRuntimeConfigForMsSql.verified.txt | 1 -
...tReadingRuntimeConfigForMySql.verified.txt | 267 +++++++++++++-----
...ingRuntimeConfigForPostgreSql.verified.txt | 24 ++
4 files changed, 243 insertions(+), 73 deletions(-)
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
index d820e1b124..15f242605f 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
@@ -17,6 +17,30 @@
Path: /graphql,
AllowIntrospection: true
},
+ Mcp: {
+ Enabled: true,
+ Path: /mcp,
+ DmlTools: {
+ AllToolsEnabled: true,
+ DescribeEntities: true,
+ CreateRecord: true,
+ ReadRecords: true,
+ UpdateRecord: true,
+ DeleteRecord: true,
+ ExecuteEntity: true,
+ AggregateRecords: true,
+ UserProvidedAllTools: false,
+ UserProvidedDescribeEntities: false,
+ UserProvidedCreateRecord: false,
+ UserProvidedReadRecords: false,
+ UserProvidedUpdateRecord: false,
+ UserProvidedDeleteRecord: false,
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
+ },
Host: {
Cors: {
Origins: [
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
index 4ee73b2b4a..966af2777f 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
@@ -42,7 +42,6 @@
UserProvidedExecuteEntity: false,
UserProvidedAggregateRecords: false
},
- UserProvidedPath: false,
UserProvidedQueryTimeout: false,
EffectiveQueryTimeoutSeconds: 30
},
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
index 5320176e4c..0779215cd0 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
@@ -13,6 +13,30 @@
Path: /graphql,
AllowIntrospection: true
},
+ Mcp: {
+ Enabled: true,
+ Path: /mcp,
+ DmlTools: {
+ AllToolsEnabled: true,
+ DescribeEntities: true,
+ CreateRecord: true,
+ ReadRecords: true,
+ UpdateRecord: true,
+ DeleteRecord: true,
+ ExecuteEntity: true,
+ AggregateRecords: true,
+ UserProvidedAllTools: false,
+ UserProvidedDescribeEntities: false,
+ UserProvidedCreateRecord: false,
+ UserProvidedReadRecords: false,
+ UserProvidedUpdateRecord: false,
+ UserProvidedDeleteRecord: false,
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
+ },
Host: {
Cors: {
Origins: [
@@ -370,6 +394,18 @@
Object: books,
Type: Table
},
+ Fields: [
+ {
+ Name: id,
+ Alias: id,
+ PrimaryKey: false
+ },
+ {
+ Name: title,
+ Alias: title,
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: book,
Plural: books,
@@ -731,10 +767,6 @@
]
}
],
- Mappings: {
- id: id,
- title: title
- },
Relationships: {
authors: {
Cardinality: Many,
@@ -766,6 +798,41 @@
}
}
},
+ {
+ Default_Books: {
+ Source: {
+ Object: default_books,
+ Type: Table
+ },
+ GraphQL: {
+ Singular: default_book,
+ Plural: default_books,
+ Enabled: true
+ },
+ Rest: {
+ Enabled: true
+ },
+ Permissions: [
+ {
+ Role: anonymous,
+ Actions: [
+ {
+ Action: Create
+ },
+ {
+ Action: Read
+ },
+ {
+ Action: Update
+ },
+ {
+ Action: Delete
+ }
+ ]
+ }
+ ]
+ }
+ },
{
BookNF: {
Source: {
@@ -1104,6 +1171,13 @@
Object: type_table,
Type: Table
},
+ Fields: [
+ {
+ Name: id,
+ Alias: typeid,
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: SupportedType,
Plural: SupportedTypes,
@@ -1147,10 +1221,7 @@
}
]
}
- ],
- Mappings: {
- id: typeid
- }
+ ]
}
},
{
@@ -1202,6 +1273,18 @@
Object: trees,
Type: Table
},
+ Fields: [
+ {
+ Name: species,
+ Alias: Scientific Name,
+ PrimaryKey: false
+ },
+ {
+ Name: region,
+ Alias: United State's Region,
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: Tree,
Plural: Trees,
@@ -1245,11 +1328,7 @@
}
]
}
- ],
- Mappings: {
- region: United State's Region,
- species: Scientific Name
- }
+ ]
}
},
{
@@ -1258,6 +1337,13 @@
Object: trees,
Type: Table
},
+ Fields: [
+ {
+ Name: species,
+ Alias: fancyName,
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: Shrub,
Plural: Shrubs,
@@ -1303,9 +1389,6 @@
]
}
],
- Mappings: {
- species: fancyName
- },
Relationships: {
fungus: {
TargetEntity: Fungus,
@@ -1325,6 +1408,13 @@
Object: fungi,
Type: Table
},
+ Fields: [
+ {
+ Name: spores,
+ Alias: hazards,
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: fungus,
Plural: fungi,
@@ -1385,11 +1475,8 @@
]
}
],
- Mappings: {
- spores: hazards
- },
Relationships: {
- shrub: {
+ Shrub: {
TargetEntity: Shrub,
SourceFields: [
habitat
@@ -1405,11 +1492,14 @@
books_view_all: {
Source: {
Object: books_view_all,
- Type: View,
- KeyFields: [
- id
- ]
+ Type: View
},
+ Fields: [
+ {
+ Name: id,
+ PrimaryKey: true
+ }
+ ],
GraphQL: {
Singular: books_view_all,
Plural: books_view_alls,
@@ -1451,11 +1541,15 @@
books_view_with_mapping: {
Source: {
Object: books_view_with_mapping,
- Type: View,
- KeyFields: [
- id
- ]
+ Type: View
},
+ Fields: [
+ {
+ Name: id,
+ Alias: book_id,
+ PrimaryKey: true
+ }
+ ],
GraphQL: {
Singular: books_view_with_mapping,
Plural: books_view_with_mappings,
@@ -1473,22 +1567,25 @@
}
]
}
- ],
- Mappings: {
- id: book_id
- }
+ ]
}
},
{
stocks_view_selected: {
Source: {
Object: stocks_view_selected,
- Type: View,
- KeyFields: [
- categoryid,
- pieceid
- ]
+ Type: View
},
+ Fields: [
+ {
+ Name: categoryid,
+ PrimaryKey: true
+ },
+ {
+ Name: pieceid,
+ PrimaryKey: true
+ }
+ ],
GraphQL: {
Singular: stocks_view_selected,
Plural: stocks_view_selecteds,
@@ -1530,12 +1627,18 @@
books_publishers_view_composite: {
Source: {
Object: books_publishers_view_composite,
- Type: View,
- KeyFields: [
- id,
- pub_id
- ]
+ Type: View
},
+ Fields: [
+ {
+ Name: id,
+ PrimaryKey: true
+ },
+ {
+ Name: pub_id,
+ PrimaryKey: true
+ }
+ ],
GraphQL: {
Singular: books_publishers_view_composite,
Plural: books_publishers_view_composites,
@@ -1789,6 +1892,28 @@
Object: aow,
Type: Table
},
+ Fields: [
+ {
+ Name: DetailAssessmentAndPlanning,
+ Alias: 始計,
+ PrimaryKey: false
+ },
+ {
+ Name: WagingWar,
+ Alias: 作戰,
+ PrimaryKey: false
+ },
+ {
+ Name: StrategicAttack,
+ Alias: 謀攻,
+ PrimaryKey: false
+ },
+ {
+ Name: NoteNum,
+ Alias: ┬─┬ノ( º _ ºノ),
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: ArtOfWar,
Plural: ArtOfWars,
@@ -1814,13 +1939,7 @@
}
]
}
- ],
- Mappings: {
- DetailAssessmentAndPlanning: 始計,
- NoteNum: ┬─┬ノ( º _ ºノ),
- StrategicAttack: 謀攻,
- WagingWar: 作戰
- }
+ ]
}
},
{
@@ -2034,6 +2153,18 @@
Object: GQLmappings,
Type: Table
},
+ Fields: [
+ {
+ Name: __column1,
+ Alias: column1,
+ PrimaryKey: false
+ },
+ {
+ Name: __column2,
+ Alias: column2,
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: GQLmappings,
Plural: GQLmappings,
@@ -2059,11 +2190,7 @@
}
]
}
- ],
- Mappings: {
- __column1: column1,
- __column2: column2
- }
+ ]
}
},
{
@@ -2106,6 +2233,18 @@
Object: mappedbookmarks,
Type: Table
},
+ Fields: [
+ {
+ Name: id,
+ Alias: bkid,
+ PrimaryKey: false
+ },
+ {
+ Name: bkname,
+ Alias: name,
+ PrimaryKey: false
+ }
+ ],
GraphQL: {
Singular: MappedBookmarks,
Plural: MappedBookmarks,
@@ -2131,11 +2270,7 @@
}
]
}
- ],
- Mappings: {
- bkname: name,
- id: bkid
- }
+ ]
}
},
{
@@ -2188,9 +2323,6 @@
Exclude: [
current_date,
next_date
- ],
- Include: [
- *
]
}
},
@@ -2227,16 +2359,7 @@
Role: anonymous,
Actions: [
{
- Action: Read
- },
- {
- Action: Create
- },
- {
- Action: Update
- },
- {
- Action: Delete
+ Action: *
}
]
}
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
index 5d8dc31646..75077c22fa 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
@@ -13,6 +13,30 @@
Path: /graphql,
AllowIntrospection: true
},
+ Mcp: {
+ Enabled: true,
+ Path: /mcp,
+ DmlTools: {
+ AllToolsEnabled: true,
+ DescribeEntities: true,
+ CreateRecord: true,
+ ReadRecords: true,
+ UpdateRecord: true,
+ DeleteRecord: true,
+ ExecuteEntity: true,
+ AggregateRecords: true,
+ UserProvidedAllTools: false,
+ UserProvidedDescribeEntities: false,
+ UserProvidedCreateRecord: false,
+ UserProvidedReadRecords: false,
+ UserProvidedUpdateRecord: false,
+ UserProvidedDeleteRecord: false,
+ UserProvidedExecuteEntity: false,
+ UserProvidedAggregateRecords: false
+ },
+ UserProvidedQueryTimeout: false,
+ EffectiveQueryTimeoutSeconds: 30
+ },
Host: {
Cors: {
Origins: [
From 46134de27a1fd4e56c444eaf2f7fb41b805ee5ba Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Fri, 6 Mar 2026 14:41:22 +0530
Subject: [PATCH 33/43] Fix initial Github Copilot AI reviews
---
.../BuiltInTools/AggregateRecordsTool.cs | 74 +++++++++++++++----
src/Cli/ConfigGenerator.cs | 21 +++---
.../McpRuntimeOptionsConverterFactory.cs | 5 +-
.../Mcp/AggregateRecordsToolTests.cs | 73 +++++++++---------
.../UnitTests/AggregateRecordsToolTests.cs | 45 -----------
5 files changed, 112 insertions(+), 106 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 2d7e8e19d8..0ecdab7413 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -209,7 +209,21 @@ public async Task ExecuteAsync(
string? filter = root.TryGetProperty("filter", out JsonElement filterElement) ? filterElement.GetString() : null;
bool userProvidedOrderby = root.TryGetProperty("orderby", out JsonElement orderbyElement) && !string.IsNullOrWhiteSpace(orderbyElement.GetString());
- string orderby = userProvidedOrderby ? (orderbyElement.GetString() ?? "desc") : "desc";
+ string orderby = "desc";
+ if (userProvidedOrderby)
+ {
+ string normalizedOrderby = (orderbyElement.GetString() ?? string.Empty).Trim().ToLowerInvariant();
+ if (normalizedOrderby != "asc" && normalizedOrderby != "desc")
+ {
+ return McpResponseBuilder.BuildErrorResult(
+ toolName,
+ "InvalidArguments",
+ $"Argument 'orderby' must be either 'asc' or 'desc' when provided. Got: '{orderbyElement.GetString()}'.",
+ logger);
+ }
+
+ orderby = normalizedOrderby;
+ }
int? first = null;
if (root.TryGetProperty("first", out JsonElement firstElement) && firstElement.ValueKind == JsonValueKind.Number)
@@ -418,6 +432,13 @@ public async Task ExecuteAsync(
// Get database-specific components
DatabaseType databaseType = runtimeConfig.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType;
+ // Aggregation is only supported for tables and views, not stored procedures.
+ if (dbObject.SourceType != EntitySourceType.Table && dbObject.SourceType != EntitySourceType.View)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidEntity",
+ $"Entity '{entityName}' is not a table or view. Aggregation is not supported for stored procedures. Use 'execute_entity' for stored procedures.", logger);
+ }
+
// Aggregation is only supported for MsSql/DWSQL (matching engine's GraphQL aggregation support)
if (databaseType != DatabaseType.MSSQL && databaseType != DatabaseType.DWSQL)
{
@@ -441,10 +462,13 @@ public async Task ExecuteAsync(
// making COUNT(pk) equivalent to COUNT(*). The engine's Build(AggregationColumn)
// does not support "*" as a column name (it would produce invalid SQL like count([].[*])).
SourceDefinition sourceDefinition = sqlMetadataProvider.GetSourceDefinition(entityName);
- if (sourceDefinition.PrimaryKey.Count > 0)
+ if (sourceDefinition.PrimaryKey.Count == 0)
{
- backingField = sourceDefinition.PrimaryKey[0];
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidEntity",
+ $"Entity '{entityName}' has no primary key defined. COUNT(*) requires at least one primary key column.", logger);
}
+
+ backingField = sourceDefinition.PrimaryKey[0];
}
// Resolve backing column names for groupby fields (already validated early)
@@ -623,10 +647,7 @@ public async Task ExecuteAsync(
return McpResponseBuilder.BuildErrorResult(
toolName,
"TimeoutError",
- $"The aggregation query for entity '{entityName}' timed out. "
- + "This is NOT a tool error. The database did not respond in time. "
- + "This may occur with large datasets or complex aggregations. "
- + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.",
+ BuildTimeoutErrorMessage(entityName),
logger);
}
catch (TaskCanceledException taskCanceledException)
@@ -635,9 +656,7 @@ public async Task ExecuteAsync(
return McpResponseBuilder.BuildErrorResult(
toolName,
"TimeoutError",
- $"The aggregation query for entity '{entityName}' was canceled, likely due to a timeout. "
- + "This is NOT a tool error. The database did not respond in time. "
- + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.",
+ BuildTaskCanceledErrorMessage(entityName),
logger);
}
catch (OperationCanceledException)
@@ -646,9 +665,7 @@ public async Task ExecuteAsync(
return McpResponseBuilder.BuildErrorResult(
toolName,
"OperationCanceled",
- $"The aggregation query for entity '{entityName}' was canceled before completion. "
- + "This is NOT a tool error. The operation was interrupted, possibly due to a timeout or client disconnect. "
- + "No results were returned. You may retry the same request.",
+ BuildOperationCanceledErrorMessage(entityName),
logger);
}
catch (DbException dbException)
@@ -779,5 +796,36 @@ private static CallToolResult BuildSimpleResponse(
logger,
$"AggregateRecordsTool success for entity {entityName}.");
}
+
+ ///
+ /// Builds the error message for a TimeoutException during aggregation.
+ ///
+ internal static string BuildTimeoutErrorMessage(string entityName)
+ {
+ return $"The aggregation query for entity '{entityName}' timed out. "
+ + "This is NOT a tool error. The database did not respond in time. "
+ + "This may occur with large datasets or complex aggregations. "
+ + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+ }
+
+ ///
+ /// Builds the error message for a TaskCanceledException during aggregation (typically a timeout).
+ ///
+ internal static string BuildTaskCanceledErrorMessage(string entityName)
+ {
+ return $"The aggregation query for entity '{entityName}' was canceled, likely due to a timeout. "
+ + "This is NOT a tool error. The database did not respond in time. "
+ + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+ }
+
+ ///
+ /// Builds the error message for an OperationCanceledException during aggregation.
+ ///
+ internal static string BuildOperationCanceledErrorMessage(string entityName)
+ {
+ return $"The aggregation query for entity '{entityName}' was canceled before completion. "
+ + "This is NOT a tool error. The operation was interrupted, possibly due to a timeout or client disconnect. "
+ + "No results were returned. You may retry the same request.";
+ }
}
}
diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs
index 2dfe14796a..3810385177 100644
--- a/src/Cli/ConfigGenerator.cs
+++ b/src/Cli/ConfigGenerator.cs
@@ -1167,7 +1167,7 @@ private static bool TryUpdateConfiguredMcpValues(
updatedValue = options?.RuntimeMcpQueryTimeout;
if (updatedValue != null)
{
- updatedMcpOptions = updatedMcpOptions! with { QueryTimeout = (int)updatedValue };
+ updatedMcpOptions = updatedMcpOptions! with { QueryTimeout = (int)updatedValue, UserProvidedQueryTimeout = true };
_logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.QueryTimeout as '{updatedValue}'", updatedValue);
}
@@ -1253,17 +1253,14 @@ private static bool TryUpdateConfiguredMcpValues(
{
updatedMcpOptions = updatedMcpOptions! with
{
- DmlTools = new DmlToolsConfig
- {
- AllToolsEnabled = false,
- DescribeEntities = describeEntities,
- CreateRecord = createRecord,
- ReadRecords = readRecord,
- UpdateRecord = updateRecord,
- DeleteRecord = deleteRecord,
- ExecuteEntity = executeEntity,
- AggregateRecords = aggregateRecords
- }
+ DmlTools = new DmlToolsConfig(
+ describeEntities: describeEntities,
+ createRecord: createRecord,
+ readRecords: readRecord,
+ updateRecord: updateRecord,
+ deleteRecord: deleteRecord,
+ executeEntity: executeEntity,
+ aggregateRecords: aggregateRecords)
};
}
diff --git a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
index ad4edc229e..6329236aa8 100644
--- a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
+++ b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
@@ -159,8 +159,9 @@ public override void Write(Utf8JsonWriter writer, McpRuntimeOptions value, JsonS
JsonSerializer.Serialize(writer, value.Description, options);
}
- // Write query-timeout if it's user provided
- if (value?.UserProvidedQueryTimeout is true && value.QueryTimeout.HasValue)
+ // Write query-timeout whenever a value is present (null = not specified = use default).
+ // This covers both constructor-set (deserialization) and 'with' expression (CLI update) paths.
+ if (value?.QueryTimeout.HasValue is true)
{
writer.WriteNumber("query-timeout", value.QueryTimeout.Value);
}
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index bab8d68f2c..7fa97fa2fe 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -248,6 +248,21 @@ public async Task AggregateRecords_OrderByWithoutGroupBy_ReturnsInvalidArguments
Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("groupby"));
}
+ [TestMethod]
+ public async Task AggregateRecords_InvalidOrderByValue_ReturnsInvalidArguments()
+ {
+ RuntimeConfig config = CreateConfig();
+ IServiceProvider sp = CreateServiceProvider(config);
+ AggregateRecordsTool tool = new();
+
+ JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"orderby\": \"ascending\"}");
+ CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ Assert.IsTrue(result.IsError == true);
+ JsonElement content = ParseContent(result);
+ Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("'asc' or 'desc'"));
+ }
+
[TestMethod]
public async Task AggregateRecords_UnsupportedHavingOperator_ReturnsInvalidArguments()
{
@@ -458,83 +473,73 @@ public async Task AggregateRecords_OperationCanceled_ReturnsExplicitCanceledMess
///
/// Verifies that the timeout error message provides explicit guidance to the model
- /// about what happened and what to do next.
+ /// about what happened and what to do next, using the production message builder.
///
[TestMethod]
public void TimeoutErrorMessage_ContainsModelGuidance()
{
- // Simulate what the tool builds for a TimeoutException response
string entityName = "Product";
- string expectedMessage = $"The aggregation query for entity '{entityName}' timed out. "
- + "This is NOT a tool error. The database did not respond in time. "
- + "This may occur with large datasets or complex aggregations. "
- + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+ string message = AggregateRecordsTool.BuildTimeoutErrorMessage(entityName);
// Verify message explicitly states it's NOT a tool error
- Assert.IsTrue(expectedMessage.Contains("NOT a tool error"), "Timeout message must state this is NOT a tool error.");
+ Assert.IsTrue(message.Contains("NOT a tool error"), "Timeout message must state this is NOT a tool error.");
// Verify message explains the cause
- Assert.IsTrue(expectedMessage.Contains("database did not respond"), "Timeout message must explain the database didn't respond.");
+ Assert.IsTrue(message.Contains("database did not respond"), "Timeout message must explain the database didn't respond.");
// Verify message mentions large datasets
- Assert.IsTrue(expectedMessage.Contains("large datasets"), "Timeout message must mention large datasets as a possible cause.");
+ Assert.IsTrue(message.Contains("large datasets"), "Timeout message must mention large datasets as a possible cause.");
// Verify message provides actionable remediation steps
- Assert.IsTrue(expectedMessage.Contains("filter"), "Timeout message must suggest using a filter.");
- Assert.IsTrue(expectedMessage.Contains("groupby"), "Timeout message must suggest reducing groupby fields.");
- Assert.IsTrue(expectedMessage.Contains("first"), "Timeout message must suggest using pagination with first.");
+ Assert.IsTrue(message.Contains("filter"), "Timeout message must suggest using a filter.");
+ Assert.IsTrue(message.Contains("groupby"), "Timeout message must suggest reducing groupby fields.");
+ Assert.IsTrue(message.Contains("first"), "Timeout message must suggest using pagination with first.");
}
///
/// Verifies that TaskCanceledException (which typically signals HTTP/DB timeout)
- /// produces a TimeoutError, not a cancellation error.
+ /// produces a message referencing timeout, using the production message builder.
///
[TestMethod]
public void TaskCanceledErrorMessage_ContainsTimeoutGuidance()
{
- // Simulate what the tool builds for a TaskCanceledException response
string entityName = "Product";
- string expectedMessage = $"The aggregation query for entity '{entityName}' was canceled, likely due to a timeout. "
- + "This is NOT a tool error. The database did not respond in time. "
- + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+ string message = AggregateRecordsTool.BuildTaskCanceledErrorMessage(entityName);
- // TaskCanceledException should produce a TimeoutError, not OperationCanceled
- Assert.IsTrue(expectedMessage.Contains("NOT a tool error"), "TaskCanceled message must state this is NOT a tool error.");
- Assert.IsTrue(expectedMessage.Contains("timeout"), "TaskCanceled message must reference timeout as the cause.");
- Assert.IsTrue(expectedMessage.Contains("filter"), "TaskCanceled message must suggest filter as remediation.");
- Assert.IsTrue(expectedMessage.Contains("first"), "TaskCanceled message must suggest first for pagination.");
+ // TaskCanceledException should produce a message referencing timeout
+ Assert.IsTrue(message.Contains("NOT a tool error"), "TaskCanceled message must state this is NOT a tool error.");
+ Assert.IsTrue(message.Contains("timeout"), "TaskCanceled message must reference timeout as the cause.");
+ Assert.IsTrue(message.Contains("filter"), "TaskCanceled message must suggest filter as remediation.");
+ Assert.IsTrue(message.Contains("first"), "TaskCanceled message must suggest first for pagination.");
}
///
/// Verifies that the OperationCanceled error message for a specific entity
- /// includes the entity name so the model knows which aggregation failed.
+ /// includes the entity name so the model knows which aggregation failed,
+ /// using the production message builder.
///
[TestMethod]
public void CanceledErrorMessage_IncludesEntityName()
{
string entityName = "LargeProductCatalog";
- string expectedMessage = $"The aggregation query for entity '{entityName}' was canceled before completion. "
- + "This is NOT a tool error. The operation was interrupted, possibly due to a timeout or client disconnect. "
- + "No results were returned. You may retry the same request.";
+ string message = AggregateRecordsTool.BuildOperationCanceledErrorMessage(entityName);
- Assert.IsTrue(expectedMessage.Contains(entityName), "Canceled message must include the entity name.");
- Assert.IsTrue(expectedMessage.Contains("No results were returned"), "Canceled message must state no results were returned.");
+ Assert.IsTrue(message.Contains(entityName), "Canceled message must include the entity name.");
+ Assert.IsTrue(message.Contains("No results were returned"), "Canceled message must state no results were returned.");
}
///
/// Verifies that the timeout error message for a specific entity
- /// includes the entity name so the model knows which aggregation timed out.
+ /// includes the entity name so the model knows which aggregation timed out,
+ /// using the production message builder.
///
[TestMethod]
public void TimeoutErrorMessage_IncludesEntityName()
{
string entityName = "HugeTransactionLog";
- string expectedMessage = $"The aggregation query for entity '{entityName}' timed out. "
- + "This is NOT a tool error. The database did not respond in time. "
- + "This may occur with large datasets or complex aggregations. "
- + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
+ string message = AggregateRecordsTool.BuildTimeoutErrorMessage(entityName);
- Assert.IsTrue(expectedMessage.Contains(entityName), "Timeout message must include the entity name.");
+ Assert.IsTrue(message.Contains(entityName), "Timeout message must include the entity name.");
}
#endregion
diff --git a/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
index c291d87660..92f2c68a63 100644
--- a/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
@@ -86,51 +86,6 @@ public void DecodeCursorOffset_NegativeValue_ReturnsZero()
#endregion
- #region Validation logic tests
-
- [TestMethod]
- [DataRow("avg", "Validation: avg with star field should be rejected")]
- [DataRow("sum", "Validation: sum with star field should be rejected")]
- [DataRow("min", "Validation: min with star field should be rejected")]
- [DataRow("max", "Validation: max with star field should be rejected")]
- public void ValidateFieldFunctionCompat_StarWithNumericFunction_IsInvalid(string function, string description)
- {
- bool isCountStar = function == "count" && "*" == "*";
- bool isInvalidStarUsage = "*" == "*" && function != "count";
-
- Assert.IsFalse(isCountStar, $"{description}: should not be count-star");
- Assert.IsTrue(isInvalidStarUsage, $"{description}: should be identified as invalid star usage");
- }
-
- [TestMethod]
- public void ValidateFieldFunctionCompat_CountStar_IsValid()
- {
- bool isCountStar = "count" == "count" && "*" == "*";
- Assert.IsTrue(isCountStar, "count(*) should be valid");
- }
-
- [TestMethod]
- public void ValidateDistinctCountStar_IsInvalid()
- {
- bool isCountStar = "count" == "count" && "*" == "*";
- bool distinct = true;
-
- bool shouldReject = isCountStar && distinct;
- Assert.IsTrue(shouldReject, "count(*) with distinct=true should be rejected");
- }
-
- [TestMethod]
- public void ValidateDistinctCountField_IsValid()
- {
- bool isCountStar = "count" == "count" && "userId" == "*";
- bool distinct = true;
-
- bool shouldReject = isCountStar && distinct;
- Assert.IsFalse(shouldReject, "count(field) with distinct=true should be valid");
- }
-
- #endregion
-
#region Blog scenario tests - alias and type validation
///
From e3ee23824bdb1cd241b768a3e8db97e46ad7bde9 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Fri, 6 Mar 2026 15:52:51 +0530
Subject: [PATCH 34/43] Refactor core tool logic and tests
---
.../BuiltInTools/AggregateRecordsTool.cs | 1061 ++++++++++-------
.../BuiltInTools/AggregateRecordsTool.md | 51 -
.../Mcp/AggregateRecordsToolTests.cs | 1007 +++++-----------
.../EntityLevelDmlToolConfigurationTests.cs | 287 +----
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 92 +-
.../UnitTests/AggregateRecordsToolTests.cs | 150 +--
6 files changed, 1029 insertions(+), 1619 deletions(-)
delete mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 0ecdab7413..0225d43731 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -123,6 +123,31 @@ public class AggregateRecordsTool : IMcpTool
)
};
+ ///
+ /// Holds all validated arguments parsed from the tool invocation.
+ ///
+ internal sealed record AggregateArguments(
+ string EntityName,
+ string Function,
+ string Field,
+ bool IsCountStar,
+ bool Distinct,
+ string? Filter,
+ bool UserProvidedOrderby,
+ string Orderby,
+ int? First,
+ string? After,
+ List Groupby,
+ Dictionary? HavingOperators,
+ List? HavingInValues);
+
+ ///
+ /// Holds the result of a successful authorization and context-building step.
+ ///
+ private sealed record AuthorizedContext(
+ FindRequestContext RequestContext,
+ HttpContext HttpContext);
+
public Tool GetToolMetadata()
{
return _cachedToolMetadata;
@@ -135,6 +160,7 @@ public async Task ExecuteAsync(
{
ILogger? logger = serviceProvider.GetService>();
string toolName = GetToolMetadata().Name;
+ string entityName = string.Empty;
RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService();
RuntimeConfig runtimeConfig = runtimeConfigProvider.GetConfig();
@@ -144,550 +170,739 @@ public async Task ExecuteAsync(
return McpErrorHelpers.ToolDisabled(toolName, logger);
}
- string entityName = string.Empty;
-
try
{
cancellationToken.ThrowIfCancellationRequested();
- if (arguments == null)
+ // 1. Parse and validate all input arguments
+ CallToolResult? parseError = TryParseAndValidateArguments(arguments, runtimeConfig, toolName, out AggregateArguments args, logger);
+ if (parseError != null)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "No arguments provided.", logger);
+ return parseError;
}
- JsonElement root = arguments.RootElement;
+ entityName = args.EntityName;
- // Parse required arguments
- if (!McpArgumentParser.TryParseEntity(root, out string parsedEntityName, out string parseError))
+ // 2. Resolve metadata and validate entity source type
+ if (!McpMetadataHelper.TryResolveMetadata(
+ entityName, runtimeConfig, serviceProvider,
+ out ISqlMetadataProvider sqlMetadataProvider,
+ out DatabaseObject dbObject,
+ out string dataSourceName,
+ out string metadataError))
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", parseError, logger);
+ return McpResponseBuilder.BuildErrorResult(toolName, "EntityNotFound", metadataError, logger);
}
- entityName = parsedEntityName;
-
- if (runtimeConfig.Entities?.TryGetValue(entityName, out Entity? entity) == true &&
- entity.Mcp?.DmlToolEnabled == false)
+ CallToolResult? sourceTypeError = ValidateEntitySourceType(entityName, dbObject, toolName, logger);
+ if (sourceTypeError != null)
{
- return McpErrorHelpers.ToolDisabled(toolName, logger, $"DML tools are disabled for entity '{entityName}'.");
+ return sourceTypeError;
}
- if (!root.TryGetProperty("function", out JsonElement functionElement) || string.IsNullOrWhiteSpace(functionElement.GetString()))
+ // 3. Early field validation: check all user-supplied field names before authorization or query building
+ CallToolResult? fieldError = ValidateFieldsExist(args, entityName, sqlMetadataProvider, toolName, logger);
+ if (fieldError != null)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'function'.", logger);
+ return fieldError;
}
- string function = functionElement.GetString()!.ToLowerInvariant();
- if (!_validFunctions.Contains(function))
+ // 4. Authorize the request and build the query context
+ (AuthorizedContext? authCtx, CallToolResult? authError) = await AuthorizeRequestAsync(
+ args, entityName, dbObject, serviceProvider, runtimeConfigProvider, sqlMetadataProvider, toolName, logger);
+ if (authError != null)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", $"Invalid function '{function}'. Must be one of: count, avg, sum, min, max.", logger);
+ return authError;
}
- if (!root.TryGetProperty("field", out JsonElement fieldElement) || string.IsNullOrWhiteSpace(fieldElement.GetString()))
+ // 5. Validate database type support
+ DatabaseType databaseType = runtimeConfig.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType;
+ if (databaseType != DatabaseType.MSSQL && databaseType != DatabaseType.DWSQL)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'field'.", logger);
+ return McpResponseBuilder.BuildErrorResult(toolName, "UnsupportedDatabase",
+ $"Aggregation is not supported for database type '{databaseType}'. Aggregation is only available for Azure SQL, SQL Server, and SQL Data Warehouse.", logger);
}
- string field = fieldElement.GetString()!;
-
- // Validate field/function compatibility
- bool isCountStar = function == "count" && field == "*";
+ // 6. Build SQL query structure with aggregation, groupby, having
+ IAuthorizationResolver authResolver = serviceProvider.GetRequiredService();
+ GQLFilterParser gQLFilterParser = serviceProvider.GetRequiredService();
+ SqlQueryStructure structure = new(
+ authCtx!.RequestContext, sqlMetadataProvider, authResolver, runtimeConfigProvider, gQLFilterParser, authCtx.HttpContext);
- if (field == "*" && function != "count")
+ string? backingField = ResolveBackingField(args, entityName, sqlMetadataProvider, toolName, out CallToolResult? pkError, logger);
+ if (pkError != null)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- $"Field '*' is only valid with function 'count'. For function '{function}', provide a specific field name.", logger);
+ return pkError;
}
- bool distinct = root.TryGetProperty("distinct", out JsonElement distinctElement) && distinctElement.GetBoolean();
+ string alias = ComputeAlias(args.Function, args.Field);
+ BuildAggregationStructure(args, structure, dbObject, backingField!, alias, entityName, sqlMetadataProvider);
- // Reject count(*) with distinct as it is semantically undefined
- if (isCountStar && distinct)
+ // 7. Generate and post-process SQL
+ IAbstractQueryManagerFactory queryManagerFactory = serviceProvider.GetRequiredService();
+ IQueryBuilder queryBuilder = queryManagerFactory.GetQueryBuilder(databaseType);
+ IQueryExecutor queryExecutor = queryManagerFactory.GetQueryExecutor(databaseType);
+
+ string sql = queryBuilder.Build(structure);
+ if (args.Groupby.Count > 0)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- "Cannot use distinct=true with field='*'. DISTINCT requires a specific field name. Use a field name instead of '*' to count distinct values.", logger);
+ sql = ApplyOrderByAndPagination(sql, args, structure, queryBuilder, backingField!);
}
- string? filter = root.TryGetProperty("filter", out JsonElement filterElement) ? filterElement.GetString() : null;
- bool userProvidedOrderby = root.TryGetProperty("orderby", out JsonElement orderbyElement) && !string.IsNullOrWhiteSpace(orderbyElement.GetString());
- string orderby = "desc";
- if (userProvidedOrderby)
- {
- string normalizedOrderby = (orderbyElement.GetString() ?? string.Empty).Trim().ToLowerInvariant();
- if (normalizedOrderby != "asc" && normalizedOrderby != "desc")
- {
- return McpResponseBuilder.BuildErrorResult(
- toolName,
- "InvalidArguments",
- $"Argument 'orderby' must be either 'asc' or 'desc' when provided. Got: '{orderbyElement.GetString()}'.",
- logger);
- }
+ // 8. Execute query and return results
+ cancellationToken.ThrowIfCancellationRequested();
+ JsonDocument? queryResult = await queryExecutor.ExecuteQueryAsync(
+ sql, structure.Parameters, queryExecutor.GetJsonResultAsync,
+ dataSourceName, authCtx.HttpContext);
- orderby = normalizedOrderby;
- }
+ JsonArray? resultArray = queryResult != null
+ ? JsonSerializer.Deserialize(queryResult.RootElement.GetRawText())
+ : null;
- int? first = null;
- if (root.TryGetProperty("first", out JsonElement firstElement) && firstElement.ValueKind == JsonValueKind.Number)
- {
- first = firstElement.GetInt32();
- if (first < 1)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Argument 'first' must be at least 1.", logger);
- }
- }
+ return args.First.HasValue && args.Groupby.Count > 0
+ ? BuildPaginatedResponse(resultArray, args.First.Value, args.After, entityName, logger)
+ : BuildSimpleResponse(resultArray, entityName, alias, logger);
+ }
+ catch (TimeoutException timeoutException)
+ {
+ logger?.LogError(timeoutException, "Aggregation operation timed out for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(toolName, "TimeoutError", BuildTimeoutErrorMessage(entityName), logger);
+ }
+ catch (TaskCanceledException taskCanceledException)
+ {
+ logger?.LogError(taskCanceledException, "Aggregation task was canceled for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(toolName, "TimeoutError", BuildTaskCanceledErrorMessage(entityName), logger);
+ }
+ catch (OperationCanceledException)
+ {
+ logger?.LogWarning("Aggregation operation was canceled for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(toolName, "OperationCanceled", BuildOperationCanceledErrorMessage(entityName), logger);
+ }
+ catch (DbException dbException)
+ {
+ logger?.LogError(dbException, "Database error during aggregation for entity {Entity}.", entityName);
+ return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", dbException.Message, logger);
+ }
+ catch (ArgumentException argumentException)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", argumentException.Message, logger);
+ }
+ catch (DataApiBuilderException dabException)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, dabException.StatusCode.ToString(), dabException.Message, logger);
+ }
+ catch (Exception ex)
+ {
+ logger?.LogError(ex, "Unexpected error in AggregateRecordsTool.");
+ return McpResponseBuilder.BuildErrorResult(toolName, "UnexpectedError", "Unexpected error occurred in AggregateRecordsTool.", logger);
+ }
+ }
- string? after = root.TryGetProperty("after", out JsonElement afterElement) ? afterElement.GetString() : null;
+ #region Argument Parsing and Validation
- List groupby = new();
- if (root.TryGetProperty("groupby", out JsonElement groupbyElement) && groupbyElement.ValueKind == JsonValueKind.Array)
- {
- foreach (JsonElement groupbyItem in groupbyElement.EnumerateArray())
- {
- string? groupbyFieldName = groupbyItem.GetString();
- if (!string.IsNullOrWhiteSpace(groupbyFieldName))
- {
- groupby.Add(groupbyFieldName);
- }
- }
- }
+ ///
+ /// Parses and validates all arguments from the tool invocation.
+ /// Returns null on success with the parsed arguments in the out parameter,
+ /// or returns a error to return to the caller.
+ ///
+ private static CallToolResult? TryParseAndValidateArguments(
+ JsonDocument? arguments,
+ RuntimeConfig runtimeConfig,
+ string toolName,
+ out AggregateArguments args,
+ ILogger? logger)
+ {
+ args = default!;
- // Validate that first, after, orderby, and having require groupby
- if (groupby.Count == 0)
- {
- if (userProvidedOrderby)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- "The 'orderby' parameter requires 'groupby' to be specified. Sorting applies to grouped aggregation results.", logger);
- }
+ if (arguments == null)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "No arguments provided.", logger);
+ }
- if (first.HasValue)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- "The 'first' parameter requires 'groupby' to be specified. Pagination applies to grouped aggregation results.", logger);
- }
+ JsonElement root = arguments.RootElement;
- if (!string.IsNullOrEmpty(after))
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- "The 'after' parameter requires 'groupby' to be specified. Pagination applies to grouped aggregation results.", logger);
- }
- }
+ // Parse entity
+ if (!McpArgumentParser.TryParseEntity(root, out string entityName, out string parseError))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", parseError, logger);
+ }
- if (!string.IsNullOrEmpty(after) && !first.HasValue)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- "The 'after' parameter requires 'first' to be specified. Provide 'first' to enable pagination.", logger);
- }
+ if (runtimeConfig.Entities?.TryGetValue(entityName, out Entity? entity) == true &&
+ entity.Mcp?.DmlToolEnabled == false)
+ {
+ return McpErrorHelpers.ToolDisabled(toolName, logger, $"DML tools are disabled for entity '{entityName}'.");
+ }
- Dictionary? havingOperators = null;
- List? havingInValues = null;
- if (root.TryGetProperty("having", out JsonElement havingElement) && havingElement.ValueKind == JsonValueKind.Object)
- {
- if (groupby.Count == 0)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- "The 'having' parameter requires 'groupby' to be specified. HAVING filters groups after aggregation.", logger);
- }
+ // Parse function
+ if (!root.TryGetProperty("function", out JsonElement functionElement) || string.IsNullOrWhiteSpace(functionElement.GetString()))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'function'.", logger);
+ }
- havingOperators = new Dictionary(StringComparer.OrdinalIgnoreCase);
- foreach (JsonProperty prop in havingElement.EnumerateObject())
- {
- // Reject unsupported operators (e.g. between, notIn, like)
- if (!_validHavingOperators.Contains(prop.Name))
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- $"Unsupported having operator '{prop.Name}'. Supported operators: {string.Join(", ", _validHavingOperators)}.", logger);
- }
+ string function = functionElement.GetString()!.ToLowerInvariant();
+ if (!_validFunctions.Contains(function))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"Invalid function '{function}'. Must be one of: count, avg, sum, min, max.", logger);
+ }
- if (prop.Name.Equals("in", StringComparison.OrdinalIgnoreCase))
- {
- if (prop.Value.ValueKind != JsonValueKind.Array)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- "The 'having.in' value must be a numeric array. Example: {\"in\": [5, 10]}.", logger);
- }
+ // Parse field
+ if (!root.TryGetProperty("field", out JsonElement fieldElement) || string.IsNullOrWhiteSpace(fieldElement.GetString()))
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Missing required argument 'field'.", logger);
+ }
- havingInValues = new List();
- foreach (JsonElement item in prop.Value.EnumerateArray())
- {
- if (item.ValueKind != JsonValueKind.Number)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- $"All values in 'having.in' must be numeric. Found non-numeric value: '{item}'.", logger);
- }
+ string field = fieldElement.GetString()!;
+ bool isCountStar = function == "count" && field == "*";
- havingInValues.Add(item.GetDouble());
- }
- }
- else
- {
- // Scalar operators (eq, neq, gt, gte, lt, lte) must have numeric values
- if (prop.Value.ValueKind != JsonValueKind.Number)
- {
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
- $"The 'having.{prop.Name}' value must be numeric. Got: '{prop.Value}'. HAVING filters compare aggregated numeric results.", logger);
- }
+ if (field == "*" && function != "count")
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"Field '*' is only valid with function 'count'. For function '{function}', provide a specific field name.", logger);
+ }
- havingOperators[prop.Name] = prop.Value.GetDouble();
- }
- }
- }
+ // Parse distinct
+ bool distinct = root.TryGetProperty("distinct", out JsonElement distinctElement) && distinctElement.GetBoolean();
- // Resolve metadata
- if (!McpMetadataHelper.TryResolveMetadata(
- entityName,
- runtimeConfig,
- serviceProvider,
- out ISqlMetadataProvider sqlMetadataProvider,
- out DatabaseObject dbObject,
- out string dataSourceName,
- out string metadataError))
+ if (isCountStar && distinct)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "Cannot use distinct=true with field='*'. DISTINCT requires a specific field name. Use a field name instead of '*' to count distinct values.", logger);
+ }
+
+ // Parse filter
+ string? filter = root.TryGetProperty("filter", out JsonElement filterElement) ? filterElement.GetString() : null;
+
+ // Parse orderby
+ bool userProvidedOrderby = root.TryGetProperty("orderby", out JsonElement orderbyElement) && !string.IsNullOrWhiteSpace(orderbyElement.GetString());
+ string orderby = "desc";
+ if (userProvidedOrderby)
+ {
+ string normalizedOrderby = (orderbyElement.GetString() ?? string.Empty).Trim().ToLowerInvariant();
+ if (normalizedOrderby != "asc" && normalizedOrderby != "desc")
{
- return McpResponseBuilder.BuildErrorResult(toolName, "EntityNotFound", metadataError, logger);
+ return McpResponseBuilder.BuildErrorResult(
+ toolName,
+ "InvalidArguments",
+ $"Argument 'orderby' must be either 'asc' or 'desc' when provided. Got: '{orderbyElement.GetString()}'.",
+ logger);
}
- // Early field validation: check all user-supplied field names before authorization or query building.
- // This lets the model discover and fix typos immediately.
- if (!isCountStar)
+ orderby = normalizedOrderby;
+ }
+
+ // Parse first
+ int? first = null;
+ if (root.TryGetProperty("first", out JsonElement firstElement) && firstElement.ValueKind == JsonValueKind.Number)
+ {
+ first = firstElement.GetInt32();
+ if (first < 1)
{
- if (!sqlMetadataProvider.TryGetBackingColumn(entityName, field, out _))
- {
- return McpErrorHelpers.FieldNotFound(toolName, entityName, field, "field", logger);
- }
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", "Argument 'first' must be at least 1.", logger);
}
+ }
+
+ // Parse after
+ string? after = root.TryGetProperty("after", out JsonElement afterElement) ? afterElement.GetString() : null;
- foreach (string groupbyField in groupby)
+ // Parse groupby
+ List groupby = new();
+ if (root.TryGetProperty("groupby", out JsonElement groupbyElement) && groupbyElement.ValueKind == JsonValueKind.Array)
+ {
+ foreach (JsonElement groupbyItem in groupbyElement.EnumerateArray())
{
- if (!sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out _))
+ string? groupbyFieldName = groupbyItem.GetString();
+ if (!string.IsNullOrWhiteSpace(groupbyFieldName))
{
- return McpErrorHelpers.FieldNotFound(toolName, entityName, groupbyField, "groupby", logger);
+ groupby.Add(groupbyFieldName);
}
}
+ }
- // Authorization
- IAuthorizationResolver authResolver = serviceProvider.GetRequiredService();
- IAuthorizationService authorizationService = serviceProvider.GetRequiredService();
- IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService();
- HttpContext? httpContext = httpContextAccessor.HttpContext;
+ // Validate groupby-dependent parameters
+ CallToolResult? dependencyError = ValidateGroupByDependencies(
+ groupby.Count, userProvidedOrderby, first, after, toolName, logger);
+ if (dependencyError != null)
+ {
+ return dependencyError;
+ }
- if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleCtxError))
+ // Parse having clause
+ Dictionary? havingOperators = null;
+ List? havingInValues = null;
+ if (root.TryGetProperty("having", out JsonElement havingElement) && havingElement.ValueKind == JsonValueKind.Object)
+ {
+ CallToolResult? havingError = TryParseHaving(
+ havingElement, groupby.Count, toolName, out havingOperators, out havingInValues, logger);
+ if (havingError != null)
{
- return McpErrorHelpers.PermissionDenied(toolName, entityName, "read", roleCtxError, logger);
+ return havingError;
}
+ }
- if (!McpAuthorizationHelper.TryResolveAuthorizedRole(
- httpContext!,
- authResolver,
- entityName,
- EntityActionOperation.Read,
- out string? effectiveRole,
- out string readAuthError))
- {
- string finalError = readAuthError.StartsWith("You do not have permission", StringComparison.OrdinalIgnoreCase)
- ? $"You do not have permission to read records for entity '{entityName}'."
- : readAuthError;
- return McpErrorHelpers.PermissionDenied(toolName, entityName, "read", finalError, logger);
- }
+ args = new AggregateArguments(
+ EntityName: entityName,
+ Function: function,
+ Field: field,
+ IsCountStar: isCountStar,
+ Distinct: distinct,
+ Filter: filter,
+ UserProvidedOrderby: userProvidedOrderby,
+ Orderby: orderby,
+ First: first,
+ After: after,
+ Groupby: groupby,
+ HavingOperators: havingOperators,
+ HavingInValues: havingInValues);
+
+ return null;
+ }
- // Build select list for authorization: groupby fields + aggregation field
- List selectFields = new(groupby);
- if (!isCountStar && !selectFields.Contains(field, StringComparer.OrdinalIgnoreCase))
+ ///
+ /// Validates that parameters requiring groupby (orderby, first, after) are only used when groupby is present.
+ /// Also validates that 'after' requires 'first'.
+ ///
+ private static CallToolResult? ValidateGroupByDependencies(
+ int groupbyCount,
+ bool userProvidedOrderby,
+ int? first,
+ string? after,
+ string toolName,
+ ILogger? logger)
+ {
+ if (groupbyCount == 0)
+ {
+ if (userProvidedOrderby)
{
- selectFields.Add(field);
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'orderby' parameter requires 'groupby' to be specified. Sorting applies to grouped aggregation results.", logger);
}
- // Build and validate Find context (reuse for authorization and OData filter parsing)
- RequestValidator requestValidator = new(serviceProvider.GetRequiredService(), runtimeConfigProvider);
- FindRequestContext context = new(entityName, dbObject, true);
- httpContext!.Request.Method = "GET";
-
- requestValidator.ValidateEntity(entityName);
-
- if (selectFields.Count > 0)
+ if (first.HasValue)
{
- context.UpdateReturnFields(selectFields);
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'first' parameter requires 'groupby' to be specified. Pagination applies to grouped aggregation results.", logger);
}
- if (!string.IsNullOrWhiteSpace(filter))
+ if (!string.IsNullOrEmpty(after))
{
- string filterQueryString = $"?{RequestParser.FILTER_URL}={filter}";
- context.FilterClauseInUrl = sqlMetadataProvider.GetODataParser().GetFilterClause(filterQueryString, $"{context.EntityName}.{context.DatabaseObject.FullName}");
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'after' parameter requires 'groupby' to be specified. Pagination applies to grouped aggregation results.", logger);
}
+ }
- requestValidator.ValidateRequestContext(context);
+ if (!string.IsNullOrEmpty(after) && !first.HasValue)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'after' parameter requires 'first' to be specified. Provide 'first' to enable pagination.", logger);
+ }
- AuthorizationResult authorizationResult = await authorizationService.AuthorizeAsync(
- user: httpContext.User,
- resource: context,
- requirements: new[] { new ColumnsPermissionsRequirement() });
- if (!authorizationResult.Succeeded)
- {
- return McpErrorHelpers.PermissionDenied(toolName, entityName, "read", DataApiBuilderException.AUTHORIZATION_FAILURE, logger);
- }
+ return null;
+ }
- // Build SqlQueryStructure to get OData filter → SQL predicate translation and DB policies
- GQLFilterParser gQLFilterParser = serviceProvider.GetRequiredService();
- SqlQueryStructure structure = new(
- context, sqlMetadataProvider, authResolver, runtimeConfigProvider, gQLFilterParser, httpContext);
+ ///
+ /// Parses and validates the 'having' clause from the tool arguments.
+ ///
+ private static CallToolResult? TryParseHaving(
+ JsonElement havingElement,
+ int groupbyCount,
+ string toolName,
+ out Dictionary? havingOperators,
+ out List? havingInValues,
+ ILogger? logger)
+ {
+ havingOperators = null;
+ havingInValues = null;
- // Get database-specific components
- DatabaseType databaseType = runtimeConfig.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType;
+ if (groupbyCount == 0)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'having' parameter requires 'groupby' to be specified. HAVING filters groups after aggregation.", logger);
+ }
- // Aggregation is only supported for tables and views, not stored procedures.
- if (dbObject.SourceType != EntitySourceType.Table && dbObject.SourceType != EntitySourceType.View)
+ havingOperators = new Dictionary(StringComparer.OrdinalIgnoreCase);
+ foreach (JsonProperty prop in havingElement.EnumerateObject())
+ {
+ if (!_validHavingOperators.Contains(prop.Name))
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidEntity",
- $"Entity '{entityName}' is not a table or view. Aggregation is not supported for stored procedures. Use 'execute_entity' for stored procedures.", logger);
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"Unsupported having operator '{prop.Name}'. Supported operators: {string.Join(", ", _validHavingOperators)}.", logger);
}
- // Aggregation is only supported for MsSql/DWSQL (matching engine's GraphQL aggregation support)
- if (databaseType != DatabaseType.MSSQL && databaseType != DatabaseType.DWSQL)
+ if (prop.Name.Equals("in", StringComparison.OrdinalIgnoreCase))
{
- return McpResponseBuilder.BuildErrorResult(toolName, "UnsupportedDatabase",
- $"Aggregation is not supported for database type '{databaseType}'. Aggregation is only available for Azure SQL, SQL Server, and SQL Data Warehouse.", logger);
- }
+ if (prop.Value.ValueKind != JsonValueKind.Array)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'having.in' value must be a numeric array. Example: {\"in\": [5, 10]}.", logger);
+ }
- IAbstractQueryManagerFactory queryManagerFactory = serviceProvider.GetRequiredService();
- IQueryBuilder queryBuilder = queryManagerFactory.GetQueryBuilder(databaseType);
- IQueryExecutor queryExecutor = queryManagerFactory.GetQueryExecutor(databaseType);
+ havingInValues = new List();
+ foreach (JsonElement item in prop.Value.EnumerateArray())
+ {
+ if (item.ValueKind != JsonValueKind.Number)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"All values in 'having.in' must be numeric. Found non-numeric value: '{item}'.", logger);
+ }
- // Resolve backing column name for the aggregation field (already validated early)
- string? backingField = null;
- if (!isCountStar)
- {
- sqlMetadataProvider.TryGetBackingColumn(entityName, field, out backingField);
+ havingInValues.Add(item.GetDouble());
+ }
}
else
{
- // For COUNT(*), use primary key column since PK is always NOT NULL,
- // making COUNT(pk) equivalent to COUNT(*). The engine's Build(AggregationColumn)
- // does not support "*" as a column name (it would produce invalid SQL like count([].[*])).
- SourceDefinition sourceDefinition = sqlMetadataProvider.GetSourceDefinition(entityName);
- if (sourceDefinition.PrimaryKey.Count == 0)
+ if (prop.Value.ValueKind != JsonValueKind.Number)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidEntity",
- $"Entity '{entityName}' has no primary key defined. COUNT(*) requires at least one primary key column.", logger);
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"The 'having.{prop.Name}' value must be numeric. Got: '{prop.Value}'. HAVING filters compare aggregated numeric results.", logger);
}
- backingField = sourceDefinition.PrimaryKey[0];
+ havingOperators[prop.Name] = prop.Value.GetDouble();
}
+ }
- // Resolve backing column names for groupby fields (already validated early)
- List<(string entityField, string backingColumn)> groupbyMapping = new();
- foreach (string groupbyField in groupby)
- {
- sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out string? backingGroupbyColumn);
- groupbyMapping.Add((groupbyField, backingGroupbyColumn!));
- }
+ return null;
+ }
- string alias = ComputeAlias(function, field);
+ #endregion
- // Clear default columns from FindRequestContext
- structure.Columns.Clear();
+ #region Entity and Field Validation
- // Add groupby columns as LabelledColumns and GroupByMetadata.Fields
- foreach (var (entityField, backingColumn) in groupbyMapping)
- {
- structure.Columns.Add(new LabelledColumn(
- dbObject.SchemaName, dbObject.Name, backingColumn, entityField, structure.SourceAlias));
- structure.GroupByMetadata.Fields[backingColumn] = new Column(
- dbObject.SchemaName, dbObject.Name, backingColumn, structure.SourceAlias);
- }
+ ///
+ /// Validates that the entity is a table or view (not a stored procedure).
+ ///
+ private static CallToolResult? ValidateEntitySourceType(
+ string entityName, DatabaseObject dbObject, string toolName, ILogger? logger)
+ {
+ if (dbObject.SourceType != EntitySourceType.Table && dbObject.SourceType != EntitySourceType.View)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidEntity",
+ $"Entity '{entityName}' is not a table or view. Aggregation is not supported for stored procedures. Use 'execute_entity' for stored procedures.", logger);
+ }
- // Build aggregation column using engine's AggregationColumn type.
- // For COUNT(*), we use the primary key column (PK is always NOT NULL, so COUNT(pk) ≡ COUNT(*)).
- AggregationType aggregationType = Enum.Parse(function);
- AggregationColumn aggregationColumn = new(
- dbObject.SchemaName, dbObject.Name, backingField!, aggregationType, alias, distinct, structure.SourceAlias);
+ return null;
+ }
- // Build HAVING predicates using engine's Predicate model
- List havingPredicates = new();
- if (havingOperators != null)
+ ///
+ /// Validates that all user-supplied field names (aggregation field and groupby fields)
+ /// exist in the entity's metadata. This early validation lets the model discover typos immediately.
+ ///
+ private static CallToolResult? ValidateFieldsExist(
+ AggregateArguments args,
+ string entityName,
+ ISqlMetadataProvider sqlMetadataProvider,
+ string toolName,
+ ILogger? logger)
+ {
+ if (!args.IsCountStar && !sqlMetadataProvider.TryGetBackingColumn(entityName, args.Field, out _))
+ {
+ return McpErrorHelpers.FieldNotFound(toolName, entityName, args.Field, "field", logger);
+ }
+
+ foreach (string groupbyField in args.Groupby)
+ {
+ if (!sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out _))
{
- foreach (var havingOperator in havingOperators)
- {
- PredicateOperation predicateOperation = havingOperator.Key.ToLowerInvariant() switch
- {
- "eq" => PredicateOperation.Equal,
- "neq" => PredicateOperation.NotEqual,
- "gt" => PredicateOperation.GreaterThan,
- "gte" => PredicateOperation.GreaterThanOrEqual,
- "lt" => PredicateOperation.LessThan,
- "lte" => PredicateOperation.LessThanOrEqual,
- _ => throw new ArgumentException($"Invalid having operator: {havingOperator.Key}")
- };
- string paramName = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
- structure.Parameters.Add(paramName, new DbConnectionParam(havingOperator.Value));
- havingPredicates.Add(new Predicate(
- new PredicateOperand(aggregationColumn),
- predicateOperation,
- new PredicateOperand(paramName)));
- }
+ return McpErrorHelpers.FieldNotFound(toolName, entityName, groupbyField, "groupby", logger);
}
+ }
- if (havingInValues != null && havingInValues.Count > 0)
- {
- List inParams = new();
- foreach (double val in havingInValues)
- {
- string paramName = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
- structure.Parameters.Add(paramName, new DbConnectionParam(val));
- inParams.Add(paramName);
- }
+ return null;
+ }
- havingPredicates.Add(new Predicate(
- new PredicateOperand(aggregationColumn),
- PredicateOperation.IN,
- new PredicateOperand($"({string.Join(", ", inParams)})")));
- }
+ #endregion
- // Combine multiple HAVING predicates with AND
- Predicate? combinedHaving = null;
- foreach (var predicate in havingPredicates)
- {
- combinedHaving = combinedHaving == null
- ? predicate
- : new Predicate(new PredicateOperand(combinedHaving), PredicateOperation.AND, new PredicateOperand(predicate));
- }
+ #region Authorization
- structure.GroupByMetadata.Aggregations.Add(
- new AggregationOperation(aggregationColumn, having: combinedHaving != null ? new List { combinedHaving } : null));
- structure.GroupByMetadata.RequestedAggregations = true;
+ ///
+ /// Authorizes the request and builds the with validated fields and filters.
+ /// Returns a tuple of (AuthorizedContext on success, CallToolResult error on failure).
+ ///
+ private static async Task<(AuthorizedContext? context, CallToolResult? error)> AuthorizeRequestAsync(
+ AggregateArguments args,
+ string entityName,
+ DatabaseObject dbObject,
+ IServiceProvider serviceProvider,
+ RuntimeConfigProvider runtimeConfigProvider,
+ ISqlMetadataProvider sqlMetadataProvider,
+ string toolName,
+ ILogger? logger)
+ {
+ IAuthorizationResolver authResolver = serviceProvider.GetRequiredService();
+ IAuthorizationService authorizationService = serviceProvider.GetRequiredService();
+ IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService();
+ HttpContext? httpContext = httpContextAccessor.HttpContext;
- // Clear default OrderByColumns (PK-based)
- structure.OrderByColumns.Clear();
+ if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleCtxError))
+ {
+ return (null, McpErrorHelpers.PermissionDenied(toolName, entityName, "read", roleCtxError, logger));
+ }
- // Set pagination limit if using first
- if (first.HasValue && groupbyMapping.Count > 0)
- {
- structure.IsListQuery = true;
- }
+ if (!McpAuthorizationHelper.TryResolveAuthorizedRole(
+ httpContext!,
+ authResolver,
+ entityName,
+ EntityActionOperation.Read,
+ out string? effectiveRole,
+ out string readAuthError))
+ {
+ string finalError = readAuthError.StartsWith("You do not have permission", StringComparison.OrdinalIgnoreCase)
+ ? $"You do not have permission to read records for entity '{entityName}'."
+ : readAuthError;
+ return (null, McpErrorHelpers.PermissionDenied(toolName, entityName, "read", finalError, logger));
+ }
- // Use engine's query builder to generate SQL
- string sql = queryBuilder.Build(structure);
+ // Build select list for authorization: groupby fields + aggregation field
+ List selectFields = new(args.Groupby);
+ if (!args.IsCountStar && !selectFields.Contains(args.Field, StringComparer.OrdinalIgnoreCase))
+ {
+ selectFields.Add(args.Field);
+ }
- // For groupby queries: add ORDER BY aggregate expression and pagination
- if (groupbyMapping.Count > 0)
- {
- string direction = orderby.Equals("asc", StringComparison.OrdinalIgnoreCase) ? "ASC" : "DESC";
- string quotedCol = $"{queryBuilder.QuoteIdentifier(structure.SourceAlias)}.{queryBuilder.QuoteIdentifier(backingField!)}";
- string orderByAggExpr = distinct
- ? $"{function.ToUpperInvariant()}(DISTINCT {quotedCol})"
- : $"{function.ToUpperInvariant()}({quotedCol})";
- string orderByClause = $" ORDER BY {orderByAggExpr} {direction}";
-
- if (first.HasValue)
- {
- // With pagination: SQL Server requires ORDER BY for OFFSET/FETCH and
- // does not allow both TOP and OFFSET/FETCH. Remove TOP and add ORDER BY + OFFSET/FETCH.
- int offset = DecodeCursorOffset(after);
- int fetchCount = first.Value + 1;
- string offsetParam = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
- structure.Parameters.Add(offsetParam, new DbConnectionParam(offset));
- string limitParam = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
- structure.Parameters.Add(limitParam, new DbConnectionParam(fetchCount));
-
- string paginationClause = $" OFFSET {offsetParam} ROWS FETCH NEXT {limitParam} ROWS ONLY";
-
- // Remove TOP N from the SELECT clause (TOP conflicts with OFFSET/FETCH)
- sql = Regex.Replace(sql, @"SELECT TOP \d+", "SELECT");
-
- // Insert ORDER BY + pagination before FOR JSON PATH
- int jsonPathIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
- if (jsonPathIdx > 0)
- {
- sql = sql.Insert(jsonPathIdx, orderByClause + paginationClause);
- }
- else
- {
- sql += orderByClause + paginationClause;
- }
- }
- else
- {
- // Without pagination: insert ORDER BY before FOR JSON PATH
- int jsonPathIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
- if (jsonPathIdx > 0)
- {
- sql = sql.Insert(jsonPathIdx, orderByClause);
- }
- else
- {
- sql += orderByClause;
- }
- }
- }
+ // Build and validate FindRequestContext
+ RequestValidator requestValidator = new(serviceProvider.GetRequiredService(), runtimeConfigProvider);
+ FindRequestContext context = new(entityName, dbObject, true);
+ httpContext!.Request.Method = "GET";
- // Execute the SQL aggregate query against the database
- cancellationToken.ThrowIfCancellationRequested();
- JsonDocument? queryResult = await queryExecutor.ExecuteQueryAsync(
- sql,
- structure.Parameters,
- queryExecutor.GetJsonResultAsync,
- dataSourceName,
- httpContext);
-
- // Parse result
- JsonArray? resultArray = null;
- if (queryResult != null)
- {
- resultArray = JsonSerializer.Deserialize(queryResult.RootElement.GetRawText());
- }
+ requestValidator.ValidateEntity(entityName);
- // Format and return results
- if (first.HasValue && groupby.Count > 0)
- {
- return BuildPaginatedResponse(resultArray, first.Value, after, entityName, logger);
- }
+ if (selectFields.Count > 0)
+ {
+ context.UpdateReturnFields(selectFields);
+ }
- return BuildSimpleResponse(resultArray, entityName, alias, logger);
+ if (!string.IsNullOrWhiteSpace(args.Filter))
+ {
+ string filterQueryString = $"?{RequestParser.FILTER_URL}={args.Filter}";
+ context.FilterClauseInUrl = sqlMetadataProvider.GetODataParser().GetFilterClause(
+ filterQueryString, $"{context.EntityName}.{context.DatabaseObject.FullName}");
}
- catch (TimeoutException timeoutException)
+
+ requestValidator.ValidateRequestContext(context);
+
+ AuthorizationResult authorizationResult = await authorizationService.AuthorizeAsync(
+ user: httpContext.User,
+ resource: context,
+ requirements: new[] { new ColumnsPermissionsRequirement() });
+ if (!authorizationResult.Succeeded)
{
- logger?.LogError(timeoutException, "Aggregation operation timed out for entity {Entity}.", entityName);
- return McpResponseBuilder.BuildErrorResult(
- toolName,
- "TimeoutError",
- BuildTimeoutErrorMessage(entityName),
- logger);
+ return (null, McpErrorHelpers.PermissionDenied(toolName, entityName, "read", DataApiBuilderException.AUTHORIZATION_FAILURE, logger));
}
- catch (TaskCanceledException taskCanceledException)
+
+ return (new AuthorizedContext(context, httpContext), null);
+ }
+
+ #endregion
+
+ #region Query Building
+
+ ///
+ /// Resolves the backing database column name for the aggregation field.
+ /// For COUNT(*), uses the first primary key column (PK is always NOT NULL, so COUNT(pk) ≡ COUNT(*)).
+ ///
+ private static string? ResolveBackingField(
+ AggregateArguments args,
+ string entityName,
+ ISqlMetadataProvider sqlMetadataProvider,
+ string toolName,
+ out CallToolResult? error,
+ ILogger? logger)
+ {
+ error = null;
+
+ if (!args.IsCountStar)
{
- logger?.LogError(taskCanceledException, "Aggregation task was canceled for entity {Entity}.", entityName);
- return McpResponseBuilder.BuildErrorResult(
- toolName,
- "TimeoutError",
- BuildTaskCanceledErrorMessage(entityName),
- logger);
+ sqlMetadataProvider.TryGetBackingColumn(entityName, args.Field, out string? backingField);
+ return backingField;
}
- catch (OperationCanceledException)
+
+ // For COUNT(*), use primary key column since PK is always NOT NULL,
+ // making COUNT(pk) equivalent to COUNT(*). The engine's Build(AggregationColumn)
+ // does not support "*" as a column name (it would produce invalid SQL like count([].[*])).
+ SourceDefinition sourceDefinition = sqlMetadataProvider.GetSourceDefinition(entityName);
+ if (sourceDefinition.PrimaryKey.Count == 0)
{
- logger?.LogWarning("Aggregation operation was canceled for entity {Entity}.", entityName);
- return McpResponseBuilder.BuildErrorResult(
- toolName,
- "OperationCanceled",
- BuildOperationCanceledErrorMessage(entityName),
- logger);
+ error = McpResponseBuilder.BuildErrorResult(toolName, "InvalidEntity",
+ $"Entity '{entityName}' has no primary key defined. COUNT(*) requires at least one primary key column.", logger);
+ return null;
}
- catch (DbException dbException)
+
+ return sourceDefinition.PrimaryKey[0];
+ }
+
+ ///
+ /// Configures the with groupby columns, aggregation column,
+ /// and HAVING predicates based on the parsed arguments.
+ ///
+ private static void BuildAggregationStructure(
+ AggregateArguments args,
+ SqlQueryStructure structure,
+ DatabaseObject dbObject,
+ string backingField,
+ string alias,
+ string entityName,
+ ISqlMetadataProvider sqlMetadataProvider)
+ {
+ // Clear default columns from FindRequestContext
+ structure.Columns.Clear();
+
+ // Add groupby columns as LabelledColumns and GroupByMetadata.Fields
+ foreach (string groupbyField in args.Groupby)
{
- logger?.LogError(dbException, "Database error during aggregation for entity {Entity}.", entityName);
- return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", dbException.Message, logger);
+ sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out string? backingGroupbyColumn);
+ structure.Columns.Add(new LabelledColumn(
+ dbObject.SchemaName, dbObject.Name, backingGroupbyColumn!, groupbyField, structure.SourceAlias));
+ structure.GroupByMetadata.Fields[backingGroupbyColumn!] = new Column(
+ dbObject.SchemaName, dbObject.Name, backingGroupbyColumn!, structure.SourceAlias);
}
- catch (ArgumentException argumentException)
+
+ // Build aggregation column using engine's AggregationColumn type.
+ AggregationType aggregationType = Enum.Parse(args.Function);
+ AggregationColumn aggregationColumn = new(
+ dbObject.SchemaName, dbObject.Name, backingField, aggregationType, alias, args.Distinct, structure.SourceAlias);
+
+ // Build HAVING predicate and configure aggregation metadata
+ Predicate? combinedHaving = BuildHavingPredicate(args, aggregationColumn, structure);
+ structure.GroupByMetadata.Aggregations.Add(
+ new AggregationOperation(aggregationColumn, having: combinedHaving != null ? new List { combinedHaving } : null));
+ structure.GroupByMetadata.RequestedAggregations = true;
+
+ // Clear default OrderByColumns (PK-based) and configure pagination
+ structure.OrderByColumns.Clear();
+ if (args.First.HasValue && args.Groupby.Count > 0)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments", argumentException.Message, logger);
+ structure.IsListQuery = true;
}
- catch (DataApiBuilderException dabException)
+ }
+
+ ///
+ /// Builds a combined HAVING predicate from the parsed having operators and IN values.
+ /// Multiple conditions are AND-ed together.
+ ///
+ private static Predicate? BuildHavingPredicate(
+ AggregateArguments args,
+ AggregationColumn aggregationColumn,
+ SqlQueryStructure structure)
+ {
+ List havingPredicates = new();
+
+ if (args.HavingOperators != null)
{
- return McpResponseBuilder.BuildErrorResult(toolName, dabException.StatusCode.ToString(), dabException.Message, logger);
+ foreach (var havingOperator in args.HavingOperators)
+ {
+ PredicateOperation predicateOperation = havingOperator.Key.ToLowerInvariant() switch
+ {
+ "eq" => PredicateOperation.Equal,
+ "neq" => PredicateOperation.NotEqual,
+ "gt" => PredicateOperation.GreaterThan,
+ "gte" => PredicateOperation.GreaterThanOrEqual,
+ "lt" => PredicateOperation.LessThan,
+ "lte" => PredicateOperation.LessThanOrEqual,
+ _ => throw new ArgumentException($"Invalid having operator: {havingOperator.Key}")
+ };
+ string paramName = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
+ structure.Parameters.Add(paramName, new DbConnectionParam(havingOperator.Value));
+ havingPredicates.Add(new Predicate(
+ new PredicateOperand(aggregationColumn),
+ predicateOperation,
+ new PredicateOperand(paramName)));
+ }
}
- catch (Exception ex)
+
+ if (args.HavingInValues != null && args.HavingInValues.Count > 0)
{
- logger?.LogError(ex, "Unexpected error in AggregateRecordsTool.");
- return McpResponseBuilder.BuildErrorResult(toolName, "UnexpectedError", "Unexpected error occurred in AggregateRecordsTool.", logger);
+ List inParams = new();
+ foreach (double val in args.HavingInValues)
+ {
+ string paramName = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
+ structure.Parameters.Add(paramName, new DbConnectionParam(val));
+ inParams.Add(paramName);
+ }
+
+ havingPredicates.Add(new Predicate(
+ new PredicateOperand(aggregationColumn),
+ PredicateOperation.IN,
+ new PredicateOperand($"({string.Join(", ", inParams)})")));
+ }
+
+ // Combine multiple HAVING predicates with AND
+ Predicate? combinedHaving = null;
+ foreach (var predicate in havingPredicates)
+ {
+ combinedHaving = combinedHaving == null
+ ? predicate
+ : new Predicate(new PredicateOperand(combinedHaving), PredicateOperation.AND, new PredicateOperand(predicate));
+ }
+
+ return combinedHaving;
+ }
+
+ ///
+ /// Post-processes the generated SQL to add ORDER BY and OFFSET/FETCH pagination
+ /// for grouped aggregation queries.
+ ///
+ private static string ApplyOrderByAndPagination(
+ string sql,
+ AggregateArguments args,
+ SqlQueryStructure structure,
+ IQueryBuilder queryBuilder,
+ string backingField)
+ {
+ string direction = args.Orderby.Equals("asc", StringComparison.OrdinalIgnoreCase) ? "ASC" : "DESC";
+ string quotedCol = $"{queryBuilder.QuoteIdentifier(structure.SourceAlias)}.{queryBuilder.QuoteIdentifier(backingField)}";
+ string orderByAggExpr = args.Distinct
+ ? $"{args.Function.ToUpperInvariant()}(DISTINCT {quotedCol})"
+ : $"{args.Function.ToUpperInvariant()}({quotedCol})";
+ string orderByClause = $" ORDER BY {orderByAggExpr} {direction}";
+
+ if (args.First.HasValue)
+ {
+ // With pagination: SQL Server requires ORDER BY for OFFSET/FETCH and
+ // does not allow both TOP and OFFSET/FETCH. Remove TOP and add ORDER BY + OFFSET/FETCH.
+ int offset = DecodeCursorOffset(args.After);
+ int fetchCount = args.First.Value + 1;
+ string offsetParam = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
+ structure.Parameters.Add(offsetParam, new DbConnectionParam(offset));
+ string limitParam = BaseQueryStructure.GetEncodedParamName(structure.Counter.Next());
+ structure.Parameters.Add(limitParam, new DbConnectionParam(fetchCount));
+
+ string paginationClause = $" OFFSET {offsetParam} ROWS FETCH NEXT {limitParam} ROWS ONLY";
+
+ // Remove TOP N from the SELECT clause (TOP conflicts with OFFSET/FETCH)
+ sql = Regex.Replace(sql, @"SELECT TOP \d+", "SELECT");
+
+ // Insert ORDER BY + pagination before FOR JSON PATH
+ int jsonPathIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
+ if (jsonPathIdx > 0)
+ {
+ sql = sql.Insert(jsonPathIdx, orderByClause + paginationClause);
+ }
+ else
+ {
+ sql += orderByClause + paginationClause;
+ }
+ }
+ else
+ {
+ // Without pagination: insert ORDER BY before FOR JSON PATH
+ int jsonPathIdx = sql.IndexOf(" FOR JSON PATH", StringComparison.OrdinalIgnoreCase);
+ if (jsonPathIdx > 0)
+ {
+ sql = sql.Insert(jsonPathIdx, orderByClause);
+ }
+ else
+ {
+ sql += orderByClause;
+ }
}
+
+ return sql;
}
+ #endregion
+
+ #region Result Formatting and Helpers
+
///
/// Computes the response alias for the aggregation result.
/// For count with "*", the alias is "count". Otherwise it's "{function}_{field}".
@@ -797,6 +1012,10 @@ private static CallToolResult BuildSimpleResponse(
$"AggregateRecordsTool success for entity {entityName}.");
}
+ #endregion
+
+ #region Error Message Builders
+
///
/// Builds the error message for a TimeoutException during aggregation.
///
@@ -827,5 +1046,7 @@ internal static string BuildOperationCanceledErrorMessage(string entityName)
+ "This is NOT a tool error. The operation was interrupted, possibly due to a timeout or client disconnect. "
+ "No results were returned. You may retry the same request.";
}
+
+ #endregion
}
}
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
deleted file mode 100644
index 718aa360e1..0000000000
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.md
+++ /dev/null
@@ -1,51 +0,0 @@
-# AggregateRecordsTool
-
-MCP tool that computes SQL-level aggregations (COUNT, AVG, SUM, MIN, MAX) on DAB entities. All aggregation is pushed to the database engine — no in-memory computation.
-
-## Class Structure
-
-| Member | Kind | Purpose |
-|---|---|---|
-| `ToolType` | Property | Returns `ToolType.BuiltIn` for reflection-based discovery. |
-| `_validFunctions` | Static field | Allowlist of aggregation functions: count, avg, sum, min, max. |
-| `GetToolMetadata()` | Method | Returns the MCP `Tool` descriptor (name, description, JSON input schema). |
-| `ExecuteAsync()` | Method | Main entry point — validates input, resolves metadata, authorizes, builds the SQL query via the engine's `IQueryBuilder.Build(SqlQueryStructure)`, executes it, and formats the response. |
-| `ComputeAlias()` | Static method | Produces the result column alias: `"count"` for count(\*), otherwise `"{function}_{field}"`. |
-| `DecodeCursorOffset()` | Static method | Decodes a base64 opaque cursor string to an integer offset for OFFSET/FETCH pagination. Returns 0 on any invalid input. |
-| `BuildPaginatedResponse()` | Private method | Formats a grouped result set into `{ items, endCursor, hasNextPage }` when `first` is provided. |
-| `BuildSimpleResponse()` | Private method | Formats a scalar or grouped result set without pagination. |
-
-## ExecuteAsync Sequence
-
-```mermaid
-sequenceDiagram
- participant Client as MCP Client
- participant Tool as AggregateRecordsTool
- participant Engine as DAB Engine
- participant DB as Database
-
- Client->>Tool: ExecuteAsync(arguments)
- Tool->>Tool: Validate inputs & check tool enabled
- Tool->>Engine: Resolve entity metadata & validate fields
- Tool->>Engine: Authorize (column-level permissions)
- Tool->>Engine: Build SQL via queryBuilder.Build(SqlQueryStructure)
- Tool->>Tool: Post-process SQL (ORDER BY, pagination)
- Tool->>DB: ExecuteQueryAsync → JSON result
- alt Paginated (first provided)
- Tool-->>Client: { items, endCursor, hasNextPage }
- else Simple
- Tool-->>Client: { entity, result: [{alias: value}] }
- end
-
- Note over Tool,Client: On error: TimeoutError, OperationCanceled, or DatabaseOperationFailed
-```
-
-## Key Design Decisions
-
-- **No in-memory aggregation.** The engine's `GroupByMetadata` / `AggregationColumn` types drive SQL generation via `queryBuilder.Build(structure)`. All aggregation is performed by the database.
-- **COUNT(\*) workaround.** The engine's `Build(AggregationColumn)` doesn't support `*` as a column name (it produces invalid SQL like `count([].[*])`), so the primary key column is used instead. `COUNT(pk)` ≡ `COUNT(*)` since PK is NOT NULL.
-- **ORDER BY post-processing.** Neither the GraphQL nor REST code paths support ORDER BY on an aggregate expression, so this tool inserts `ORDER BY {func}({col}) ASC|DESC` into the generated SQL before `FOR JSON PATH`.
-- **TOP vs OFFSET/FETCH.** SQL Server forbids both in the same query. When pagination (`first`) is used, `TOP N` is stripped via regex before appending `OFFSET/FETCH NEXT`.
-- **Early field validation.** All user-supplied field names (aggregation field, groupby fields) are validated against the entity's metadata before authorization or query building, so typos surface immediately with actionable guidance.
-- **Timeout vs cancellation.** `TimeoutException` (from `query-timeout` config) and `OperationCanceledException` (from client disconnect) are handled separately with distinct model-facing messages. Timeouts guide the model to narrow filters or paginate; cancellations suggest retry.
-- **Database support.** Only MsSql / DWSQL — matches the engine's GraphQL aggregation support. PostgreSQL, MySQL, and CosmosDB return an `UnsupportedDatabase` error.
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 7fa97fa2fe..f198e9b595 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -23,15 +23,10 @@
namespace Azure.DataApiBuilder.Service.Tests.Mcp
{
///
- /// Tests for the AggregateRecordsTool MCP tool.
- /// Covers:
- /// - Tool metadata and schema validation
- /// - Runtime-level enabled/disabled configuration
- /// - Entity-level DML tool configuration
- /// - Input validation (missing/invalid arguments)
- /// - SQL expression generation (count, avg, sum, min, max, distinct)
- /// - Table reference quoting, cursor/pagination logic
- /// - Alias convention
+ /// Integration tests for the AggregateRecordsTool MCP tool.
+ /// Covers tool metadata/schema, configuration, input validation,
+ /// alias conventions, cursor/pagination, timeout/cancellation, spec examples,
+ /// and blog scenario validation.
///
[TestClass]
public class AggregateRecordsToolTests
@@ -39,29 +34,26 @@ public class AggregateRecordsToolTests
#region Tool Metadata Tests
[TestMethod]
- public void GetToolMetadata_ReturnsCorrectName()
+ public void GetToolMetadata_ReturnsCorrectNameAndType()
{
AggregateRecordsTool tool = new();
Tool metadata = tool.GetToolMetadata();
- Assert.AreEqual("aggregate_records", metadata.Name);
- }
- [TestMethod]
- public void GetToolMetadata_ReturnsCorrectToolType()
- {
- AggregateRecordsTool tool = new();
+ Assert.AreEqual("aggregate_records", metadata.Name);
Assert.AreEqual(McpEnums.ToolType.BuiltIn, tool.ToolType);
}
[TestMethod]
- public void GetToolMetadata_HasInputSchema()
+ public void GetToolMetadata_HasRequiredSchemaProperties()
{
AggregateRecordsTool tool = new();
Tool metadata = tool.GetToolMetadata();
+
Assert.AreEqual(JsonValueKind.Object, metadata.InputSchema.ValueKind);
Assert.IsTrue(metadata.InputSchema.TryGetProperty("properties", out JsonElement properties));
Assert.IsTrue(metadata.InputSchema.TryGetProperty("required", out JsonElement required));
+ // Verify required fields
List requiredFields = new();
foreach (JsonElement r in required.EnumerateArray())
{
@@ -72,325 +64,181 @@ public void GetToolMetadata_HasInputSchema()
CollectionAssert.Contains(requiredFields, "function");
CollectionAssert.Contains(requiredFields, "field");
- // Verify first and after properties exist in schema
- Assert.IsTrue(properties.TryGetProperty("first", out JsonElement firstProp));
- Assert.AreEqual("integer", firstProp.GetProperty("type").GetString());
- Assert.IsTrue(properties.TryGetProperty("after", out JsonElement afterProp));
- Assert.AreEqual("string", afterProp.GetProperty("type").GetString());
+ // Verify all schema properties exist with correct types
+ AssertSchemaProperty(properties, "entity", "string");
+ AssertSchemaProperty(properties, "function", "string");
+ AssertSchemaProperty(properties, "field", "string");
+ AssertSchemaProperty(properties, "distinct", "boolean");
+ AssertSchemaProperty(properties, "filter", "string");
+ AssertSchemaProperty(properties, "groupby", "array");
+ AssertSchemaProperty(properties, "orderby", "string");
+ AssertSchemaProperty(properties, "having", "object");
+ AssertSchemaProperty(properties, "first", "integer");
+ AssertSchemaProperty(properties, "after", "string");
}
- #endregion
-
- #region Configuration Tests
-
[TestMethod]
- public async Task AggregateRecords_DisabledAtRuntimeLevel_ReturnsToolDisabledError()
+ public void GetToolMetadata_DescriptionDocumentsWorkflowAndAlias()
{
- RuntimeConfig config = CreateConfig(aggregateRecordsEnabled: false);
- IServiceProvider sp = CreateServiceProvider(config);
AggregateRecordsTool tool = new();
+ Tool metadata = tool.GetToolMetadata();
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
-
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- AssertToolDisabledError(content);
- }
-
- [TestMethod]
- public async Task AggregateRecords_DisabledAtEntityLevel_ReturnsToolDisabledError()
- {
- RuntimeConfig config = CreateConfigWithEntityDmlDisabled();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
-
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- AssertToolDisabledError(content);
+ Assert.IsTrue(metadata.Description!.Contains("describe_entities"),
+ "Tool description must instruct models to call describe_entities first.");
+ Assert.IsTrue(metadata.Description.Contains("1)"),
+ "Tool description must use numbered workflow steps.");
+ Assert.IsTrue(metadata.Description.Contains("{function}_{field}"),
+ "Tool description must document the alias pattern '{function}_{field}'.");
+ Assert.IsTrue(metadata.Description.Contains("'count'"),
+ "Tool description must mention the special 'count' alias for count(*).");
}
#endregion
- #region Input Validation Tests
+ #region Configuration Tests
- [TestMethod]
- public async Task AggregateRecords_NullArguments_ReturnsInvalidArguments()
+ [DataTestMethod]
+ [DataRow(false, true, DisplayName = "Runtime-level disabled")]
+ [DataRow(true, false, DisplayName = "Entity-level DML disabled")]
+ public async Task AggregateRecords_Disabled_ReturnsToolDisabledError(bool runtimeEnabled, bool entityDmlEnabled)
{
- RuntimeConfig config = CreateConfig();
+ RuntimeConfig config = entityDmlEnabled
+ ? CreateConfig(aggregateRecordsEnabled: runtimeEnabled)
+ : CreateConfigWithEntityDmlDisabled();
IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- CallToolResult result = await tool.ExecuteAsync(null, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
- Assert.AreEqual("InvalidArguments", error.GetProperty("type").GetString());
- }
- [TestMethod]
- public async Task AggregateRecords_MissingEntity_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ CallToolResult result = await ExecuteToolAsync(sp, "{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\"}");
- JsonDocument args = JsonDocument.Parse("{\"function\": \"count\", \"field\": \"*\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
+ AssertErrorResult(result, "ToolDisabled");
}
- [TestMethod]
- public async Task AggregateRecords_MissingFunction_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"field\": \"*\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- }
-
- [TestMethod]
- public async Task AggregateRecords_MissingField_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ #endregion
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- }
+ #region Input Validation Tests - Missing/Invalid Arguments
- [TestMethod]
- public async Task AggregateRecords_InvalidFunction_ReturnsInvalidArguments()
+ [DataTestMethod]
+ [DataRow("{\"function\": \"count\", \"field\": \"*\"}", null, DisplayName = "Missing entity")]
+ [DataRow("{\"entity\": \"Book\", \"field\": \"*\"}", null, DisplayName = "Missing function")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\"}", null, DisplayName = "Missing field")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"median\", \"field\": \"price\"}", "median", DisplayName = "Invalid function 'median'")]
+ public async Task AggregateRecords_MissingOrInvalidRequiredArgs_ReturnsInvalidArguments(string json, string? expectedInMessage)
{
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ IServiceProvider sp = CreateDefaultServiceProvider();
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"median\", \"field\": \"price\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("median"));
- }
+ CallToolResult result = await ExecuteToolAsync(sp, json);
- [TestMethod]
- public async Task AggregateRecords_StarFieldWithAvg_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"avg\", \"field\": \"*\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("count"));
+ string message = AssertErrorResult(result, "InvalidArguments");
+ if (!string.IsNullOrEmpty(expectedInMessage))
+ {
+ Assert.IsTrue(message.Contains(expectedInMessage),
+ $"Error message must contain '{expectedInMessage}'. Actual: '{message}'");
+ }
}
[TestMethod]
- public async Task AggregateRecords_DistinctCountStar_ReturnsInvalidArguments()
+ public async Task AggregateRecords_NullArguments_ReturnsInvalidArguments()
{
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
+ IServiceProvider sp = CreateDefaultServiceProvider();
AggregateRecordsTool tool = new();
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"distinct\": true}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("DISTINCT"));
- }
-
- [TestMethod]
- public async Task AggregateRecords_HavingWithoutGroupBy_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ CallToolResult result = await tool.ExecuteAsync(null, sp, CancellationToken.None);
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"having\": {\"gt\": 5}}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("groupby"));
+ AssertErrorResult(result, "InvalidArguments");
}
- [TestMethod]
- public async Task AggregateRecords_OrderByWithoutGroupBy_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ #endregion
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"orderby\": \"desc\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("groupby"));
- }
+ #region Input Validation Tests - Field/Function Compatibility
- [TestMethod]
- public async Task AggregateRecords_InvalidOrderByValue_ReturnsInvalidArguments()
+ [DataTestMethod]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"avg\", \"field\": \"*\"}", "count",
+ DisplayName = "Star field with avg (must mention count)")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"distinct\": true}", "DISTINCT",
+ DisplayName = "Distinct with count(*)")]
+ public async Task AggregateRecords_InvalidFieldFunctionCombination_ReturnsInvalidArguments(string json, string expectedInMessage)
{
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ IServiceProvider sp = CreateDefaultServiceProvider();
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"orderby\": \"ascending\"}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("'asc' or 'desc'"));
- }
+ CallToolResult result = await ExecuteToolAsync(sp, json);
- [TestMethod]
- public async Task AggregateRecords_UnsupportedHavingOperator_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"between\": 5}}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("between"));
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("Supported operators"));
+ string message = AssertErrorResult(result, "InvalidArguments");
+ Assert.IsTrue(message.Contains(expectedInMessage),
+ $"Error message must contain '{expectedInMessage}'. Actual: '{message}'");
}
- [TestMethod]
- public async Task AggregateRecords_NonNumericHavingValue_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ #endregion
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"eq\": \"ten\"}}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("numeric"));
- }
+ #region Input Validation Tests - GroupBy Dependencies
- [TestMethod]
- public async Task AggregateRecords_NonNumericHavingInArray_ReturnsInvalidArguments()
+ [DataTestMethod]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"orderby\": \"desc\"}", "groupby",
+ DisplayName = "Orderby without groupby")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"having\": {\"gt\": 5}}", "groupby",
+ DisplayName = "Having without groupby")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"orderby\": \"ascending\"}", "'asc' or 'desc'",
+ DisplayName = "Invalid orderby value")]
+ public async Task AggregateRecords_GroupByDependencyViolation_ReturnsInvalidArguments(string json, string expectedInMessage)
{
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ IServiceProvider sp = CreateDefaultServiceProvider();
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"in\": [5, \"abc\"]}}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("numeric"));
- }
+ CallToolResult result = await ExecuteToolAsync(sp, json);
- [TestMethod]
- public async Task AggregateRecords_HavingInNotArray_ReturnsInvalidArguments()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- JsonDocument args = JsonDocument.Parse("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"in\": 5}}");
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- Assert.AreEqual("InvalidArguments", content.GetProperty("error").GetProperty("type").GetString());
- Assert.IsTrue(content.GetProperty("error").GetProperty("message").GetString()!.Contains("numeric array"));
+ string message = AssertErrorResult(result, "InvalidArguments");
+ Assert.IsTrue(message.Contains(expectedInMessage),
+ $"Error message must contain '{expectedInMessage}'. Actual: '{message}'");
}
#endregion
- #region Alias Convention Tests
+ #region Input Validation Tests - Having Clause
- [TestMethod]
- public void ComputeAlias_CountStar_ReturnsCount()
+ [DataTestMethod]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"between\": 5}}",
+ "between", DisplayName = "Unsupported having operator")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"eq\": \"ten\"}}",
+ "numeric", DisplayName = "Non-numeric having scalar")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"in\": [5, \"abc\"]}}",
+ "numeric", DisplayName = "Non-numeric value in having.in array")]
+ [DataRow("{\"entity\": \"Book\", \"function\": \"count\", \"field\": \"*\", \"groupby\": [\"title\"], \"having\": {\"in\": 5}}",
+ "numeric array", DisplayName = "Having.in not an array")]
+ public async Task AggregateRecords_InvalidHaving_ReturnsInvalidArguments(string json, string expectedInMessage)
{
- Assert.AreEqual("count", AggregateRecordsTool.ComputeAlias("count", "*"));
- }
+ IServiceProvider sp = CreateDefaultServiceProvider();
- [TestMethod]
- public void ComputeAlias_CountField_ReturnsFunctionField()
- {
- Assert.AreEqual("count_supplierId", AggregateRecordsTool.ComputeAlias("count", "supplierId"));
- }
+ CallToolResult result = await ExecuteToolAsync(sp, json);
- [TestMethod]
- public void ComputeAlias_AvgField_ReturnsFunctionField()
- {
- Assert.AreEqual("avg_unitPrice", AggregateRecordsTool.ComputeAlias("avg", "unitPrice"));
+ string message = AssertErrorResult(result, "InvalidArguments");
+ Assert.IsTrue(message.Contains(expectedInMessage),
+ $"Error message must contain '{expectedInMessage}'. Actual: '{message}'");
}
- [TestMethod]
- public void ComputeAlias_SumField_ReturnsFunctionField()
- {
- Assert.AreEqual("sum_unitPrice", AggregateRecordsTool.ComputeAlias("sum", "unitPrice"));
- }
+ #endregion
- [TestMethod]
- public void ComputeAlias_MinField_ReturnsFunctionField()
- {
- Assert.AreEqual("min_price", AggregateRecordsTool.ComputeAlias("min", "price"));
- }
+ #region Alias Convention Tests
- [TestMethod]
- public void ComputeAlias_MaxField_ReturnsFunctionField()
+ [DataTestMethod]
+ [DataRow("count", "*", "count", DisplayName = "count(*) → 'count'")]
+ [DataRow("count", "supplierId", "count_supplierId", DisplayName = "count(supplierId)")]
+ [DataRow("avg", "unitPrice", "avg_unitPrice", DisplayName = "avg(unitPrice)")]
+ [DataRow("sum", "unitPrice", "sum_unitPrice", DisplayName = "sum(unitPrice)")]
+ [DataRow("min", "price", "min_price", DisplayName = "min(price)")]
+ [DataRow("max", "price", "max_price", DisplayName = "max(price)")]
+ public void ComputeAlias_ReturnsExpectedAlias(string function, string field, string expectedAlias)
{
- Assert.AreEqual("max_price", AggregateRecordsTool.ComputeAlias("max", "price"));
+ Assert.AreEqual(expectedAlias, AggregateRecordsTool.ComputeAlias(function, field));
}
#endregion
#region Cursor and Pagination Tests
- [TestMethod]
- public void DecodeCursorOffset_NullCursor_ReturnsZero()
+ [DataTestMethod]
+ [DataRow(null, 0, DisplayName = "null → 0")]
+ [DataRow("", 0, DisplayName = "empty → 0")]
+ [DataRow(" ", 0, DisplayName = "whitespace → 0")]
+ public void DecodeCursorOffset_InvalidCursor_ReturnsZero(string? cursor, int expected)
{
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(null));
- }
-
- [TestMethod]
- public void DecodeCursorOffset_EmptyCursor_ReturnsZero()
- {
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(""));
- }
-
- [TestMethod]
- public void DecodeCursorOffset_WhitespaceCursor_ReturnsZero()
- {
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(" "));
- }
-
- [TestMethod]
- public void DecodeCursorOffset_ValidBase64Cursor_ReturnsDecodedOffset()
- {
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("5"));
- Assert.AreEqual(5, AggregateRecordsTool.DecodeCursorOffset(cursor));
+ Assert.AreEqual(expected, AggregateRecordsTool.DecodeCursorOffset(cursor));
}
[TestMethod]
@@ -399,51 +247,28 @@ public void DecodeCursorOffset_InvalidBase64_ReturnsZero()
Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset("not-valid-base64!!!"));
}
- [TestMethod]
- public void DecodeCursorOffset_NonNumericBase64_ReturnsZero()
+ [DataTestMethod]
+ [DataRow("abc", 0, DisplayName = "non-numeric → 0")]
+ [DataRow("0", 0, DisplayName = "zero → 0")]
+ [DataRow("5", 5, DisplayName = "5 round-trip")]
+ [DataRow("15", 15, DisplayName = "15 round-trip")]
+ [DataRow("1000", 1000, DisplayName = "1000 round-trip")]
+ public void DecodeCursorOffset_Base64Encoded_ReturnsExpectedOffset(string rawValue, int expectedOffset)
{
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("abc"));
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(cursor));
- }
-
- [TestMethod]
- public void DecodeCursorOffset_RoundTrip_PreservesOffset()
- {
- int expectedOffset = 15;
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes(expectedOffset.ToString()));
+ string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes(rawValue));
Assert.AreEqual(expectedOffset, AggregateRecordsTool.DecodeCursorOffset(cursor));
}
- [TestMethod]
- public void DecodeCursorOffset_ZeroOffset_ReturnsZero()
- {
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("0"));
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(cursor));
- }
-
- [TestMethod]
- public void DecodeCursorOffset_LargeOffset_ReturnsCorrectValue()
- {
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("1000"));
- Assert.AreEqual(1000, AggregateRecordsTool.DecodeCursorOffset(cursor));
- }
-
#endregion
#region Timeout and Cancellation Tests
- ///
- /// Verifies that OperationCanceledException produces a model-explicit error
- /// that clearly states the operation was canceled, not errored.
- ///
[TestMethod]
public async Task AggregateRecords_OperationCanceled_ReturnsExplicitCanceledMessage()
{
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
+ IServiceProvider sp = CreateDefaultServiceProvider();
AggregateRecordsTool tool = new();
- // Create a pre-canceled token
CancellationTokenSource cts = new();
cts.Cancel();
@@ -452,531 +277,241 @@ public async Task AggregateRecords_OperationCanceled_ReturnsExplicitCanceledMess
Assert.IsTrue(result.IsError == true);
JsonElement content = ParseContent(result);
- Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
- string errorType = error.GetProperty("type").GetString();
- string errorMessage = error.GetProperty("message").GetString();
-
- // Verify the error type identifies it as a cancellation
- Assert.IsNotNull(errorType);
- Assert.AreEqual("OperationCanceled", errorType);
-
- // Verify the message explicitly tells the model this is NOT a tool error
- Assert.IsNotNull(errorMessage);
- Assert.IsTrue(errorMessage!.Contains("NOT a tool error"), "Message must explicitly state this is NOT a tool error.");
+ JsonElement error = content.GetProperty("error");
- // Verify the message tells the model what happened
- Assert.IsTrue(errorMessage.Contains("canceled"), "Message must mention the operation was canceled.");
+ Assert.AreEqual("OperationCanceled", error.GetProperty("type").GetString());
+ string message = error.GetProperty("message").GetString()!;
- // Verify the message tells the model it can retry
- Assert.IsTrue(errorMessage.Contains("retry"), "Message must tell the model it can retry.");
+ AssertContainsAll(message,
+ ("NOT a tool error", "Message must state this is NOT a tool error."),
+ ("canceled", "Message must mention the operation was canceled."),
+ ("retry", "Message must tell the model it can retry."));
}
- ///
- /// Verifies that the timeout error message provides explicit guidance to the model
- /// about what happened and what to do next, using the production message builder.
- ///
- [TestMethod]
- public void TimeoutErrorMessage_ContainsModelGuidance()
+ [DataTestMethod]
+ [DataRow("Product", DisplayName = "Product entity")]
+ [DataRow("HugeTransactionLog", DisplayName = "HugeTransactionLog entity")]
+ public void BuildTimeoutErrorMessage_ContainsGuidance(string entityName)
{
- string entityName = "Product";
string message = AggregateRecordsTool.BuildTimeoutErrorMessage(entityName);
- // Verify message explicitly states it's NOT a tool error
- Assert.IsTrue(message.Contains("NOT a tool error"), "Timeout message must state this is NOT a tool error.");
-
- // Verify message explains the cause
- Assert.IsTrue(message.Contains("database did not respond"), "Timeout message must explain the database didn't respond.");
-
- // Verify message mentions large datasets
- Assert.IsTrue(message.Contains("large datasets"), "Timeout message must mention large datasets as a possible cause.");
-
- // Verify message provides actionable remediation steps
- Assert.IsTrue(message.Contains("filter"), "Timeout message must suggest using a filter.");
- Assert.IsTrue(message.Contains("groupby"), "Timeout message must suggest reducing groupby fields.");
- Assert.IsTrue(message.Contains("first"), "Timeout message must suggest using pagination with first.");
+ AssertContainsAll(message,
+ (entityName, "Must include entity name."),
+ ("NOT a tool error", "Must state this is NOT a tool error."),
+ ("database did not respond", "Must explain the cause."),
+ ("large datasets", "Must mention large datasets."),
+ ("filter", "Must suggest filter."),
+ ("groupby", "Must suggest reducing groupby."),
+ ("first", "Must suggest pagination."));
}
- ///
- /// Verifies that TaskCanceledException (which typically signals HTTP/DB timeout)
- /// produces a message referencing timeout, using the production message builder.
- ///
- [TestMethod]
- public void TaskCanceledErrorMessage_ContainsTimeoutGuidance()
+ [DataTestMethod]
+ [DataRow("Product", DisplayName = "Product entity")]
+ public void BuildTaskCanceledErrorMessage_ContainsGuidance(string entityName)
{
- string entityName = "Product";
string message = AggregateRecordsTool.BuildTaskCanceledErrorMessage(entityName);
- // TaskCanceledException should produce a message referencing timeout
- Assert.IsTrue(message.Contains("NOT a tool error"), "TaskCanceled message must state this is NOT a tool error.");
- Assert.IsTrue(message.Contains("timeout"), "TaskCanceled message must reference timeout as the cause.");
- Assert.IsTrue(message.Contains("filter"), "TaskCanceled message must suggest filter as remediation.");
- Assert.IsTrue(message.Contains("first"), "TaskCanceled message must suggest first for pagination.");
+ AssertContainsAll(message,
+ (entityName, "Must include entity name."),
+ ("NOT a tool error", "Must state this is NOT a tool error."),
+ ("timeout", "Must reference timeout."),
+ ("filter", "Must suggest filter."),
+ ("first", "Must suggest pagination."));
}
- ///
- /// Verifies that the OperationCanceled error message for a specific entity
- /// includes the entity name so the model knows which aggregation failed,
- /// using the production message builder.
- ///
- [TestMethod]
- public void CanceledErrorMessage_IncludesEntityName()
+ [DataTestMethod]
+ [DataRow("LargeProductCatalog", DisplayName = "LargeProductCatalog entity")]
+ public void BuildOperationCanceledErrorMessage_ContainsGuidance(string entityName)
{
- string entityName = "LargeProductCatalog";
string message = AggregateRecordsTool.BuildOperationCanceledErrorMessage(entityName);
- Assert.IsTrue(message.Contains(entityName), "Canceled message must include the entity name.");
- Assert.IsTrue(message.Contains("No results were returned"), "Canceled message must state no results were returned.");
- }
-
- ///
- /// Verifies that the timeout error message for a specific entity
- /// includes the entity name so the model knows which aggregation timed out,
- /// using the production message builder.
- ///
- [TestMethod]
- public void TimeoutErrorMessage_IncludesEntityName()
- {
- string entityName = "HugeTransactionLog";
- string message = AggregateRecordsTool.BuildTimeoutErrorMessage(entityName);
-
- Assert.IsTrue(message.Contains(entityName), "Timeout message must include the entity name.");
+ AssertContainsAll(message,
+ (entityName, "Must include entity name."),
+ ("No results were returned", "Must state no results."));
}
#endregion
- #region Spec Example Tests
-
- ///
- /// Spec Example 1: "How many products are there?"
- /// COUNT(*) - expects alias "count"
- ///
- [TestMethod]
- public void SpecExample01_CountStar_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("count", "*");
- Assert.AreEqual("count", alias);
- }
-
- ///
- /// Spec Example 2: "What is the average price of products under $10?"
- /// AVG(unitPrice) with filter
- ///
- [TestMethod]
- public void SpecExample02_AvgWithFilter_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
- Assert.AreEqual("avg_unitPrice", alias);
- }
-
- ///
- /// Spec Example 3: "Which categories have more than 20 products?"
- /// COUNT(*) GROUP BY categoryName HAVING gt 20
- ///
- [TestMethod]
- public void SpecExample03_CountGroupByHavingGt_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("count", "*");
- Assert.AreEqual("count", alias);
- }
+ #region Spec Example Tests - Alias Validation
///
- /// Spec Example 4: "For discontinued products, which categories have total revenue between $500 and $10,000?"
- /// SUM(unitPrice) GROUP BY categoryName HAVING gte 500 AND lte 10000
+ /// Validates the alias convention for all 13 spec examples.
+ /// Examples that compute count(*) expect "count"; all others expect "function_field".
///
- [TestMethod]
- public void SpecExample04_SumFilterGroupByHavingRange_CorrectAlias()
+ [DataTestMethod]
+ // Ex 1, 3, 6, 8, 11, 12: COUNT(*) → "count"
+ [DataRow("count", "*", "count", DisplayName = "Spec 01/03/06/08/11/12: count(*) → 'count'")]
+ // Ex 2, 7, 9, 13: AVG(unitPrice) → "avg_unitPrice"
+ [DataRow("avg", "unitPrice", "avg_unitPrice", DisplayName = "Spec 02/07/09/13: avg(unitPrice)")]
+ // Ex 4, 10: SUM(unitPrice) → "sum_unitPrice"
+ [DataRow("sum", "unitPrice", "sum_unitPrice", DisplayName = "Spec 04/10: sum(unitPrice)")]
+ // Ex 5: COUNT(supplierId) → "count_supplierId"
+ [DataRow("count", "supplierId", "count_supplierId", DisplayName = "Spec 05: count(supplierId)")]
+ public void SpecExamples_AliasConvention_IsCorrect(string function, string field, string expectedAlias)
{
- string alias = AggregateRecordsTool.ComputeAlias("sum", "unitPrice");
- Assert.AreEqual("sum_unitPrice", alias);
+ Assert.AreEqual(expectedAlias, AggregateRecordsTool.ComputeAlias(function, field));
}
///
- /// Spec Example 5: "How many distinct suppliers do we have?"
- /// COUNT(DISTINCT supplierId)
+ /// Spec Example 11-12: Cursor offset for first-page starts at 0, continuation decodes correctly.
///
[TestMethod]
- public void SpecExample05_CountDistinct_CorrectAlias()
+ public void SpecExample_PaginationCursor_DecodesCorrectly()
{
- string alias = AggregateRecordsTool.ComputeAlias("count", "supplierId");
- Assert.AreEqual("count_supplierId", alias);
- }
-
- ///
- /// Spec Example 6: "Which categories have exactly 5 or 10 products?"
- /// COUNT(*) GROUP BY categoryName HAVING IN (5, 10)
- ///
- [TestMethod]
- public void SpecExample06_CountGroupByHavingIn_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("count", "*");
- Assert.AreEqual("count", alias);
- }
-
- ///
- /// Spec Example 7: "Average distinct unit price per category, for categories averaging over $25"
- /// AVG(DISTINCT unitPrice) GROUP BY categoryName HAVING gt 25
- ///
- [TestMethod]
- public void SpecExample07_AvgDistinctGroupByHavingGt_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
- Assert.AreEqual("avg_unitPrice", alias);
- }
-
- ///
- /// Spec Example 8: "Which categories have the most products?"
- /// COUNT(*) GROUP BY categoryName ORDER BY DESC
- ///
- [TestMethod]
- public void SpecExample08_CountGroupByOrderByDesc_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("count", "*");
- Assert.AreEqual("count", alias);
- }
-
- ///
- /// Spec Example 9: "What are the cheapest categories by average price?"
- /// AVG(unitPrice) GROUP BY categoryName ORDER BY ASC
- ///
- [TestMethod]
- public void SpecExample09_AvgGroupByOrderByAsc_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
- Assert.AreEqual("avg_unitPrice", alias);
- }
-
- ///
- /// Spec Example 10: "For categories with over $500 revenue, which has the highest total?"
- /// SUM(unitPrice) GROUP BY categoryName HAVING gt 500 ORDER BY DESC
- ///
- [TestMethod]
- public void SpecExample10_SumFilterGroupByHavingGtOrderByDesc_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("sum", "unitPrice");
- Assert.AreEqual("sum_unitPrice", alias);
- }
-
- ///
- /// Spec Example 11: "Show me the first 5 categories by product count"
- /// COUNT(*) GROUP BY categoryName ORDER BY DESC FIRST 5
- ///
- [TestMethod]
- public void SpecExample11_CountGroupByOrderByDescFirst5_CorrectAliasAndCursor()
- {
- string alias = AggregateRecordsTool.ComputeAlias("count", "*");
- Assert.AreEqual("count", alias);
+ // First page: null cursor → offset 0
Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(null));
- }
- ///
- /// Spec Example 12: "Show me the next 5 categories" (continuation of Example 11)
- /// COUNT(*) GROUP BY categoryName ORDER BY DESC FIRST 5 AFTER cursor
- ///
- [TestMethod]
- public void SpecExample12_CountGroupByOrderByDescFirst5After_CorrectCursorDecode()
- {
+ // Continuation: cursor encoding "5" → offset 5
string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("5"));
- int offset = AggregateRecordsTool.DecodeCursorOffset(cursor);
- Assert.AreEqual(5, offset);
-
- string alias = AggregateRecordsTool.ComputeAlias("count", "*");
- Assert.AreEqual("count", alias);
- }
-
- ///
- /// Spec Example 13: "Show me the top 3 most expensive categories by average price"
- /// AVG(unitPrice) GROUP BY categoryName ORDER BY DESC FIRST 3
- ///
- [TestMethod]
- public void SpecExample13_AvgGroupByOrderByDescFirst3_CorrectAlias()
- {
- string alias = AggregateRecordsTool.ComputeAlias("avg", "unitPrice");
- Assert.AreEqual("avg_unitPrice", alias);
+ Assert.AreEqual(5, AggregateRecordsTool.DecodeCursorOffset(cursor));
}
#endregion
- #region Blog Scenario Tests (devblogs.microsoft.com/azure-sql/data-api-builder-mcp-questions)
-
- // These tests verify that the exact JSON payloads from the DAB MCP blog
- // pass input validation. The tool will fail at metadata resolution (no real DB)
- // but must NOT return "InvalidArguments", proving the input shape is valid.
+ #region Blog Scenario Tests
///
- /// Blog Scenario 1: Strategic customer importance
- /// "Who is our most important customer based on total revenue?"
- /// Uses: sum, totalRevenue, filter, groupby [customerId, customerName], orderby desc, first 1
+ /// Validates that exact JSON payloads from the DAB MCP blog pass input validation.
+ /// The tool will fail at metadata resolution (no real DB) but must NOT return "InvalidArguments".
///
- [TestMethod]
- public async Task BlogScenario1_StrategicCustomerImportance_PassesInputValidation()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- string json = @"{
- ""entity"": ""Book"",
- ""function"": ""sum"",
- ""field"": ""totalRevenue"",
- ""filter"": ""isActive eq true and orderDate ge 2025-01-01"",
- ""groupby"": [""customerId"", ""customerName""],
- ""orderby"": ""desc"",
- ""first"": 1
- }";
-
- JsonDocument args = JsonDocument.Parse(json);
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ [DataTestMethod]
+ [DataRow(
+ @"{""entity"":""Book"",""function"":""sum"",""field"":""totalRevenue"",""filter"":""isActive eq true and orderDate ge 2025-01-01"",""groupby"":[""customerId"",""customerName""],""orderby"":""desc"",""first"":1}",
+ "sum", "totalRevenue",
+ DisplayName = "Blog 1: Strategic customer importance")]
+ [DataRow(
+ @"{""entity"":""Book"",""function"":""sum"",""field"":""totalRevenue"",""filter"":""isActive eq true and inStock gt 0 and orderDate ge 2025-01-01"",""groupby"":[""productId"",""productName""],""orderby"":""asc"",""first"":1}",
+ "sum", "totalRevenue",
+ DisplayName = "Blog 2: Product discontinuation")]
+ [DataRow(
+ @"{""entity"":""Book"",""function"":""avg"",""field"":""quarterlyRevenue"",""filter"":""fiscalYear eq 2025"",""groupby"":[""region""],""having"":{""gt"":2000000},""orderby"":""desc""}",
+ "avg", "quarterlyRevenue",
+ DisplayName = "Blog 3: Quarterly performance")]
+ [DataRow(
+ @"{""entity"":""Book"",""function"":""sum"",""field"":""totalRevenue"",""filter"":""isActive eq true and customerType eq 'Retail' and (region eq 'Midwest' or region eq 'Southwest')"",""groupby"":[""region"",""customerTier""],""having"":{""gt"":5000000},""orderby"":""desc""}",
+ "sum", "totalRevenue",
+ DisplayName = "Blog 4: Revenue concentration")]
+ [DataRow(
+ @"{""entity"":""Book"",""function"":""sum"",""field"":""onHandValue"",""filter"":""discontinued eq true and onHandValue gt 0"",""groupby"":[""productLine"",""warehouseRegion""],""having"":{""gt"":2500000},""orderby"":""desc""}",
+ "sum", "onHandValue",
+ DisplayName = "Blog 5: Risk exposure")]
+ public async Task BlogScenario_PassesInputValidation(string json, string function, string field)
+ {
+ IServiceProvider sp = CreateDefaultServiceProvider();
+
+ CallToolResult result = await ExecuteToolAsync(sp, json);
Assert.IsTrue(result.IsError == true);
JsonElement content = ParseContent(result);
string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
Assert.AreNotEqual("InvalidArguments", errorType,
- "Blog scenario 1 JSON must pass input validation (sum/totalRevenue/groupby/orderby/first).");
- Assert.AreEqual("sum_totalRevenue", AggregateRecordsTool.ComputeAlias("sum", "totalRevenue"));
- }
-
- ///
- /// Blog Scenario 2: Product discontinuation candidate
- /// "Which product should we consider discontinuing based on lowest totalRevenue?"
- /// Uses: sum, totalRevenue, filter, groupby [productId, productName], orderby asc, first 1
- ///
- [TestMethod]
- public async Task BlogScenario2_ProductDiscontinuation_PassesInputValidation()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- string json = @"{
- ""entity"": ""Book"",
- ""function"": ""sum"",
- ""field"": ""totalRevenue"",
- ""filter"": ""isActive eq true and inStock gt 0 and orderDate ge 2025-01-01"",
- ""groupby"": [""productId"", ""productName""],
- ""orderby"": ""asc"",
- ""first"": 1
- }";
-
- JsonDocument args = JsonDocument.Parse(json);
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ $"Blog scenario JSON must pass input validation. Got error: {errorType}");
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
- Assert.AreNotEqual("InvalidArguments", errorType,
- "Blog scenario 2 JSON must pass input validation (sum/totalRevenue/groupby/orderby asc/first).");
- Assert.AreEqual("sum_totalRevenue", AggregateRecordsTool.ComputeAlias("sum", "totalRevenue"));
+ // Verify alias convention
+ string expectedAlias = $"{function}_{field}";
+ Assert.AreEqual(expectedAlias, AggregateRecordsTool.ComputeAlias(function, field));
}
- ///
- /// Blog Scenario 3: Forward-looking performance expectation
- /// "Average quarterlyRevenue per region, regions averaging > $2,000,000?"
- /// Uses: avg, quarterlyRevenue, filter, groupby [region], having {gt: 2000000}, orderby desc
- ///
- [TestMethod]
- public async Task BlogScenario3_QuarterlyPerformance_PassesInputValidation()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ #endregion
- string json = @"{
- ""entity"": ""Book"",
- ""function"": ""avg"",
- ""field"": ""quarterlyRevenue"",
- ""filter"": ""fiscalYear eq 2025"",
- ""groupby"": [""region""],
- ""having"": { ""gt"": 2000000 },
- ""orderby"": ""desc""
- }";
+ #region FieldNotFound Error Helper Tests
- JsonDocument args = JsonDocument.Parse(json);
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ [DataTestMethod]
+ [DataRow("Product", "badField", "field", DisplayName = "field parameter")]
+ [DataRow("Product", "invalidCol", "groupby", DisplayName = "groupby parameter")]
+ public void FieldNotFound_ReturnsCorrectErrorWithGuidance(string entity, string fieldName, string paramName)
+ {
+ CallToolResult result = McpErrorHelpers.FieldNotFound("aggregate_records", entity, fieldName, paramName, null);
Assert.IsTrue(result.IsError == true);
JsonElement content = ParseContent(result);
- string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
- Assert.AreNotEqual("InvalidArguments", errorType,
- "Blog scenario 3 JSON must pass input validation (avg/quarterlyRevenue/groupby/having gt).");
- Assert.AreEqual("avg_quarterlyRevenue", AggregateRecordsTool.ComputeAlias("avg", "quarterlyRevenue"));
- }
+ JsonElement error = content.GetProperty("error");
- ///
- /// Blog Scenario 4: Revenue concentration across regions
- /// "Total revenue of active retail customers in Midwest/Southwest, >$5M, by region and customerTier"
- /// Uses: sum, totalRevenue, complex filter with OR, groupby [region, customerTier], having {gt: 5000000}, orderby desc
- ///
- [TestMethod]
- public async Task BlogScenario4_RevenueConcentration_PassesInputValidation()
- {
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
+ Assert.AreEqual("FieldNotFound", error.GetProperty("type").GetString());
+ string message = error.GetProperty("message").GetString()!;
- string json = @"{
- ""entity"": ""Book"",
- ""function"": ""sum"",
- ""field"": ""totalRevenue"",
- ""filter"": ""isActive eq true and customerType eq 'Retail' and (region eq 'Midwest' or region eq 'Southwest')"",
- ""groupby"": [""region"", ""customerTier""],
- ""having"": { ""gt"": 5000000 },
- ""orderby"": ""desc""
- }";
+ AssertContainsAll(message,
+ (fieldName, "Must include the invalid field name."),
+ (entity, "Must include the entity name."),
+ (paramName, "Must identify which parameter was invalid."),
+ ("describe_entities", "Must guide the model to call describe_entities."));
+ }
- JsonDocument args = JsonDocument.Parse(json);
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
+ #endregion
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
- Assert.AreNotEqual("InvalidArguments", errorType,
- "Blog scenario 4 JSON must pass input validation (sum/totalRevenue/complex filter/multi-groupby/having).");
- Assert.AreEqual("sum_totalRevenue", AggregateRecordsTool.ComputeAlias("sum", "totalRevenue"));
- }
+ #region Reusable Assertion Helpers
///
- /// Blog Scenario 5: Risk exposure by product line
- /// "For discontinued products, total onHandValue by productLine and warehouseRegion, >$2.5M"
- /// Uses: sum, onHandValue, filter, groupby [productLine, warehouseRegion], having {gt: 2500000}, orderby desc
+ /// Parses the JSON content from a .
///
- [TestMethod]
- public async Task BlogScenario5_RiskExposure_PassesInputValidation()
+ private static JsonElement ParseContent(CallToolResult result)
{
- RuntimeConfig config = CreateConfig();
- IServiceProvider sp = CreateServiceProvider(config);
- AggregateRecordsTool tool = new();
-
- string json = @"{
- ""entity"": ""Book"",
- ""function"": ""sum"",
- ""field"": ""onHandValue"",
- ""filter"": ""discontinued eq true and onHandValue gt 0"",
- ""groupby"": [""productLine"", ""warehouseRegion""],
- ""having"": { ""gt"": 2500000 },
- ""orderby"": ""desc""
- }";
-
- JsonDocument args = JsonDocument.Parse(json);
- CallToolResult result = await tool.ExecuteAsync(args, sp, CancellationToken.None);
-
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- string errorType = content.GetProperty("error").GetProperty("type").GetString()!;
- Assert.AreNotEqual("InvalidArguments", errorType,
- "Blog scenario 5 JSON must pass input validation (sum/onHandValue/filter/multi-groupby/having).");
- Assert.AreEqual("sum_onHandValue", AggregateRecordsTool.ComputeAlias("sum", "onHandValue"));
+ TextContentBlock firstContent = (TextContentBlock)result.Content[0];
+ return JsonDocument.Parse(firstContent.Text).RootElement;
}
///
- /// Verifies that the tool schema supports all properties used across the 5 blog scenarios.
+ /// Asserts that the result is an error with the expected error type.
+ /// Returns the error message for further assertions.
///
- [TestMethod]
- public void BlogScenarios_ToolSchema_SupportsAllRequiredProperties()
+ private static string AssertErrorResult(CallToolResult result, string expectedErrorType)
{
- AggregateRecordsTool tool = new();
- Tool metadata = tool.GetToolMetadata();
- JsonElement properties = metadata.InputSchema.GetProperty("properties");
-
- string[] blogProperties = { "entity", "function", "field", "filter", "groupby", "orderby", "having", "first" };
- foreach (string prop in blogProperties)
- {
- Assert.IsTrue(properties.TryGetProperty(prop, out _),
- $"Tool schema must include '{prop}' property used in blog scenarios.");
- }
-
- // Additional schema properties used in spec but not blog
- Assert.IsTrue(properties.TryGetProperty("distinct", out _), "Tool schema must include 'distinct'.");
- Assert.IsTrue(properties.TryGetProperty("after", out _), "Tool schema must include 'after'.");
+ Assert.IsTrue(result.IsError == true, "Result should be an error.");
+ JsonElement content = ParseContent(result);
+ Assert.IsTrue(content.TryGetProperty("error", out JsonElement error), "Content must have an 'error' property.");
+ Assert.AreEqual(expectedErrorType, error.GetProperty("type").GetString(),
+ $"Expected error type '{expectedErrorType}'.");
+ return error.TryGetProperty("message", out JsonElement msg) ? msg.GetString() ?? string.Empty : string.Empty;
}
///
- /// Verifies that the tool description instructs models to call describe_entities first.
+ /// Asserts the schema property exists with the given type.
///
- [TestMethod]
- public void BlogScenarios_ToolDescription_ForcesDescribeEntitiesFirst()
+ private static void AssertSchemaProperty(JsonElement properties, string propertyName, string expectedType)
{
- AggregateRecordsTool tool = new();
- Tool metadata = tool.GetToolMetadata();
-
- Assert.IsTrue(metadata.Description!.Contains("describe_entities"),
- "Tool description must instruct models to call describe_entities first.");
- Assert.IsTrue(metadata.Description.Contains("1)"),
- "Tool description must use numbered workflow steps.");
+ Assert.IsTrue(properties.TryGetProperty(propertyName, out JsonElement prop),
+ $"Schema must include '{propertyName}' property.");
+ Assert.AreEqual(expectedType, prop.GetProperty("type").GetString(),
+ $"Schema property '{propertyName}' must have type '{expectedType}'.");
}
///
- /// Verifies that the tool description documents the alias convention used in blog examples.
+ /// Asserts that the given text contains all expected substrings, with per-assertion failure messages.
///
- [TestMethod]
- public void BlogScenarios_ToolDescription_DocumentsAliasConvention()
+ private static void AssertContainsAll(string text, params (string expected, string failMessage)[] checks)
{
- AggregateRecordsTool tool = new();
- Tool metadata = tool.GetToolMetadata();
-
- Assert.IsTrue(metadata.Description!.Contains("{function}_{field}"),
- "Tool description must document the alias pattern '{function}_{field}'.");
- Assert.IsTrue(metadata.Description.Contains("'count'"),
- "Tool description must mention the special 'count' alias for count(*).");
+ Assert.IsNotNull(text);
+ foreach (var (expected, failMessage) in checks)
+ {
+ Assert.IsTrue(text.Contains(expected), failMessage);
+ }
}
#endregion
- #region FieldNotFound Error Helper Tests
+ #region Reusable Execution Helpers
///
- /// Verifies the FieldNotFound error helper produces the correct error type
- /// and a model-friendly message that includes the field name, entity, and guidance.
+ /// Executes the AggregateRecordsTool with the given JSON arguments.
///
- [TestMethod]
- public void FieldNotFound_ReturnsCorrectErrorTypeAndMessage()
+ private static async Task ExecuteToolAsync(IServiceProvider sp, string json)
{
- CallToolResult result = McpErrorHelpers.FieldNotFound("aggregate_records", "Product", "badField", "field", null);
-
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- JsonElement error = content.GetProperty("error");
-
- Assert.AreEqual("FieldNotFound", error.GetProperty("type").GetString());
- string message = error.GetProperty("message").GetString()!;
- Assert.IsTrue(message.Contains("badField"), "Message must include the invalid field name.");
- Assert.IsTrue(message.Contains("Product"), "Message must include the entity name.");
- Assert.IsTrue(message.Contains("field"), "Message must identify which parameter was invalid.");
- Assert.IsTrue(message.Contains("describe_entities"), "Message must guide the model to call describe_entities.");
+ AggregateRecordsTool tool = new();
+ JsonDocument args = JsonDocument.Parse(json);
+ return await tool.ExecuteAsync(args, sp, CancellationToken.None);
}
///
- /// Verifies the FieldNotFound error helper identifies the groupby parameter.
+ /// Creates a default service provider with aggregate_records enabled.
///
- [TestMethod]
- public void FieldNotFound_GroupBy_IdentifiesParameter()
+ private static IServiceProvider CreateDefaultServiceProvider()
{
- CallToolResult result = McpErrorHelpers.FieldNotFound("aggregate_records", "Product", "invalidCol", "groupby", null);
-
- Assert.IsTrue(result.IsError == true);
- JsonElement content = ParseContent(result);
- string message = content.GetProperty("error").GetProperty("message").GetString()!;
-
- Assert.IsTrue(message.Contains("invalidCol"), "Message must include the invalid field name.");
- Assert.IsTrue(message.Contains("groupby"), "Message must identify 'groupby' as the parameter.");
- Assert.IsTrue(message.Contains("describe_entities"), "Message must guide the model to call describe_entities.");
+ return CreateServiceProvider(CreateConfig());
}
#endregion
- #region Helper Methods
-
- private static JsonElement ParseContent(CallToolResult result)
- {
- TextContentBlock firstContent = (TextContentBlock)result.Content[0];
- return JsonDocument.Parse(firstContent.Text).RootElement;
- }
-
- private static void AssertToolDisabledError(JsonElement content)
- {
- Assert.IsTrue(content.TryGetProperty("error", out JsonElement error));
- Assert.IsTrue(error.TryGetProperty("type", out JsonElement errorType));
- Assert.AreEqual("ToolDisabled", errorType.GetString());
- }
+ #region Test Infrastructure
private static RuntimeConfig CreateConfig(bool aggregateRecordsEnabled = true)
{
diff --git a/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs b/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
index b4ae074207..6c6a7b2f4b 100644
--- a/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
+++ b/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
@@ -53,8 +53,15 @@ public async Task DmlTool_RespectsEntityLevelDmlToolDisabled(string toolType, st
{
// Arrange
RuntimeConfig config = isStoredProcedure
- ? CreateConfigWithDmlToolDisabledStoredProcedure()
- : CreateConfigWithDmlToolDisabledEntity();
+ ? CreateConfig(
+ entityName: "GetBook", sourceObject: "get_book",
+ sourceType: EntitySourceType.StoredProcedure,
+ mcpOptions: new EntityMcpOptions(customToolEnabled: true, dmlToolsEnabled: false),
+ actions: new[] { EntityActionOperation.Execute })
+ : CreateConfig(
+ mcpOptions: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: false),
+ actions: new[] { EntityActionOperation.Read, EntityActionOperation.Create,
+ EntityActionOperation.Update, EntityActionOperation.Delete });
IServiceProvider serviceProvider = CreateServiceProvider(config);
IMcpTool tool = CreateTool(toolType);
@@ -85,8 +92,8 @@ public async Task ReadRecords_WorksWhenNotDisabledAtEntityLevel(string scenario,
{
// Arrange
RuntimeConfig config = useMcpConfig
- ? CreateConfigWithDmlToolEnabledEntity()
- : CreateConfigWithEntityWithoutMcpConfig();
+ ? CreateConfig(mcpOptions: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: true))
+ : CreateConfig();
IServiceProvider serviceProvider = CreateServiceProvider(config);
ReadRecordsTool tool = new();
@@ -121,7 +128,9 @@ public async Task ReadRecords_WorksWhenNotDisabledAtEntityLevel(string scenario,
public async Task ReadRecords_RuntimeDisabledTakesPrecedenceOverEntityEnabled()
{
// Arrange - Runtime has readRecords=false, but entity has DmlToolEnabled=true
- RuntimeConfig config = CreateConfigWithRuntimeDisabledButEntityEnabled();
+ RuntimeConfig config = CreateConfig(
+ mcpOptions: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: true),
+ readRecordsEnabled: false);
IServiceProvider serviceProvider = CreateServiceProvider(config);
ReadRecordsTool tool = new();
@@ -157,7 +166,11 @@ public async Task ReadRecords_RuntimeDisabledTakesPrecedenceOverEntityEnabled()
public async Task DynamicCustomTool_RespectsCustomToolDisabled()
{
// Arrange - Create a stored procedure entity with CustomToolEnabled=false
- RuntimeConfig config = CreateConfigWithCustomToolDisabled();
+ RuntimeConfig config = CreateConfig(
+ entityName: "GetBook", sourceObject: "get_book",
+ sourceType: EntitySourceType.StoredProcedure,
+ mcpOptions: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: true),
+ actions: new[] { EntityActionOperation.Execute });
IServiceProvider serviceProvider = CreateServiceProvider(config);
// Create the DynamicCustomTool with the entity that has CustomToolEnabled initially true
@@ -245,253 +258,37 @@ private static IMcpTool CreateTool(string toolType)
}
///
- /// Creates a runtime config with a table entity that has DmlToolEnabled=false.
+ /// Unified config factory. Creates a RuntimeConfig with a single entity.
+ /// Callers specify only the parameters that differ from their test scenario.
///
- private static RuntimeConfig CreateConfigWithDmlToolDisabledEntity()
+ /// Entity key name (default: "Book").
+ /// Database object (default: "books").
+ /// Table or StoredProcedure (default: Table).
+ /// Entity-level MCP options, or null for no MCP config.
+ /// Entity permissions. Defaults to Read-only.
+ /// Runtime-level readRecords flag (default: true).
+ private static RuntimeConfig CreateConfig(
+ string entityName = "Book",
+ string sourceObject = "books",
+ EntitySourceType sourceType = EntitySourceType.Table,
+ EntityMcpOptions mcpOptions = null,
+ EntityActionOperation[] actions = null,
+ bool readRecordsEnabled = true)
{
- Dictionary entities = new()
- {
- ["Book"] = new Entity(
- Source: new("books", EntitySourceType.Table, null, null),
- GraphQL: new("Book", "Books"),
- Fields: null,
- Rest: new(Enabled: true),
- Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
- new EntityAction(Action: EntityActionOperation.Read, Fields: null, Policy: null),
- new EntityAction(Action: EntityActionOperation.Create, Fields: null, Policy: null),
- new EntityAction(Action: EntityActionOperation.Update, Fields: null, Policy: null),
- new EntityAction(Action: EntityActionOperation.Delete, Fields: null, Policy: null)
- }) },
- Mappings: null,
- Relationships: null,
- Mcp: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: false)
- )
- };
-
- return new RuntimeConfig(
- Schema: "test-schema",
- DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
- Runtime: new(
- Rest: new(),
- GraphQL: new(),
- Mcp: new(
- Enabled: true,
- Path: "/mcp",
- DmlTools: new(
- describeEntities: true,
- readRecords: true,
- createRecord: true,
- updateRecord: true,
- deleteRecord: true,
- executeEntity: true
- )
- ),
- Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
- ),
- Entities: new(entities)
- );
- }
-
- ///
- /// Creates a runtime config with a stored procedure that has DmlToolEnabled=false.
- ///
- private static RuntimeConfig CreateConfigWithDmlToolDisabledStoredProcedure()
- {
- Dictionary entities = new()
- {
- ["GetBook"] = new Entity(
- Source: new("get_book", EntitySourceType.StoredProcedure, null, null),
- GraphQL: new("GetBook", "GetBook"),
- Fields: null,
- Rest: new(Enabled: true),
- Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
- new EntityAction(Action: EntityActionOperation.Execute, Fields: null, Policy: null)
- }) },
- Mappings: null,
- Relationships: null,
- Mcp: new EntityMcpOptions(customToolEnabled: true, dmlToolsEnabled: false)
- )
- };
-
- return new RuntimeConfig(
- Schema: "test-schema",
- DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
- Runtime: new(
- Rest: new(),
- GraphQL: new(),
- Mcp: new(
- Enabled: true,
- Path: "/mcp",
- DmlTools: new(
- describeEntities: true,
- readRecords: true,
- createRecord: true,
- updateRecord: true,
- deleteRecord: true,
- executeEntity: true
- )
- ),
- Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
- ),
- Entities: new(entities)
- );
- }
-
- ///
- /// Creates a runtime config with a table entity that has DmlToolEnabled=true.
- ///
- private static RuntimeConfig CreateConfigWithDmlToolEnabledEntity()
- {
- Dictionary entities = new()
- {
- ["Book"] = new Entity(
- Source: new("books", EntitySourceType.Table, null, null),
- GraphQL: new("Book", "Books"),
- Fields: null,
- Rest: new(Enabled: true),
- Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
- new EntityAction(Action: EntityActionOperation.Read, Fields: null, Policy: null)
- }) },
- Mappings: null,
- Relationships: null,
- Mcp: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: true)
- )
- };
-
- return new RuntimeConfig(
- Schema: "test-schema",
- DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
- Runtime: new(
- Rest: new(),
- GraphQL: new(),
- Mcp: new(
- Enabled: true,
- Path: "/mcp",
- DmlTools: new(
- describeEntities: true,
- readRecords: true,
- createRecord: true,
- updateRecord: true,
- deleteRecord: true,
- executeEntity: true
- )
- ),
- Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
- ),
- Entities: new(entities)
- );
- }
-
- ///
- /// Creates a runtime config with a table entity that has no MCP configuration.
- ///
- private static RuntimeConfig CreateConfigWithEntityWithoutMcpConfig()
- {
- Dictionary entities = new()
- {
- ["Book"] = new Entity(
- Source: new("books", EntitySourceType.Table, null, null),
- GraphQL: new("Book", "Books"),
- Fields: null,
- Rest: new(Enabled: true),
- Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
- new EntityAction(Action: EntityActionOperation.Read, Fields: null, Policy: null)
- }) },
- Mappings: null,
- Relationships: null,
- Mcp: null
- )
- };
+ actions ??= new[] { EntityActionOperation.Read };
- return new RuntimeConfig(
- Schema: "test-schema",
- DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
- Runtime: new(
- Rest: new(),
- GraphQL: new(),
- Mcp: new(
- Enabled: true,
- Path: "/mcp",
- DmlTools: new(
- describeEntities: true,
- readRecords: true,
- createRecord: true,
- updateRecord: true,
- deleteRecord: true,
- executeEntity: true
- )
- ),
- Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
- ),
- Entities: new(entities)
- );
- }
-
- ///
- /// Creates a runtime config with a stored procedure that has CustomToolEnabled=false.
- /// Used to test DynamicCustomTool runtime validation.
- ///
- private static RuntimeConfig CreateConfigWithCustomToolDisabled()
- {
- Dictionary entities = new()
- {
- ["GetBook"] = new Entity(
- Source: new("get_book", EntitySourceType.StoredProcedure, null, null),
- GraphQL: new("GetBook", "GetBook"),
- Fields: null,
- Rest: new(Enabled: true),
- Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
- new EntityAction(Action: EntityActionOperation.Execute, Fields: null, Policy: null)
- }) },
- Mappings: null,
- Relationships: null,
- Mcp: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: true)
- )
- };
-
- return new RuntimeConfig(
- Schema: "test-schema",
- DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
- Runtime: new(
- Rest: new(),
- GraphQL: new(),
- Mcp: new(
- Enabled: true,
- Path: "/mcp",
- DmlTools: new(
- describeEntities: true,
- readRecords: true,
- createRecord: true,
- updateRecord: true,
- deleteRecord: true,
- executeEntity: true
- )
- ),
- Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
- ),
- Entities: new(entities)
- );
- }
-
- ///
- /// Creates a runtime config where runtime-level readRecords is disabled,
- /// but entity-level DmlToolEnabled is true. This tests precedence behavior.
- ///
- private static RuntimeConfig CreateConfigWithRuntimeDisabledButEntityEnabled()
- {
Dictionary entities = new()
{
- ["Book"] = new Entity(
- Source: new("books", EntitySourceType.Table, null, null),
- GraphQL: new("Book", "Books"),
+ [entityName] = new Entity(
+ Source: new(sourceObject, sourceType, null, null),
+ GraphQL: new(entityName, entityName == "Book" ? "Books" : entityName),
Fields: null,
Rest: new(Enabled: true),
- Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: new[] {
- new EntityAction(Action: EntityActionOperation.Read, Fields: null, Policy: null)
- }) },
+ Permissions: new[] { new EntityPermission(Role: "anonymous",
+ Actions: Array.ConvertAll(actions, a => new EntityAction(Action: a, Fields: null, Policy: null))) },
Mappings: null,
Relationships: null,
- Mcp: new EntityMcpOptions(customToolEnabled: false, dmlToolsEnabled: true)
+ Mcp: mcpOptions
)
};
@@ -506,7 +303,7 @@ private static RuntimeConfig CreateConfigWithRuntimeDisabledButEntityEnabled()
Path: "/mcp",
DmlTools: new(
describeEntities: true,
- readRecords: false, // Runtime-level DISABLED
+ readRecords: readRecordsEnabled,
createRecord: true,
updateRecord: true,
deleteRecord: true,
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
index 237e40e57e..c69e3ed1b4 100644
--- a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -73,24 +73,20 @@ public void McpRuntimeOptions_UserProvidedQueryTimeout_TrueWhenSet()
#region Custom Value Tests
- [TestMethod]
- public void McpRuntimeOptions_CustomTimeout_1Second()
- {
- McpRuntimeOptions options = new(QueryTimeout: 1);
- Assert.AreEqual(1, options.EffectiveQueryTimeoutSeconds);
- }
-
- [TestMethod]
- public void McpRuntimeOptions_CustomTimeout_120Seconds()
+ [DataTestMethod]
+ [DataRow(1, DisplayName = "1 second")]
+ [DataRow(60, DisplayName = "60 seconds")]
+ [DataRow(120, DisplayName = "120 seconds")]
+ public void McpRuntimeOptions_CustomTimeout_ReturnsConfiguredValue(int timeoutSeconds)
{
- McpRuntimeOptions options = new(QueryTimeout: 120);
- Assert.AreEqual(120, options.EffectiveQueryTimeoutSeconds);
+ McpRuntimeOptions options = new(QueryTimeout: timeoutSeconds);
+ Assert.AreEqual(timeoutSeconds, options.EffectiveQueryTimeoutSeconds);
}
[TestMethod]
public void RuntimeConfig_McpQueryTimeout_ExposedInConfig()
{
- RuntimeConfig config = CreateConfigWithQueryTimeout(45);
+ RuntimeConfig config = CreateConfig(queryTimeout: 45);
Assert.AreEqual(45, config.Runtime?.Mcp?.QueryTimeout);
Assert.AreEqual(45, config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds);
}
@@ -98,7 +94,7 @@ public void RuntimeConfig_McpQueryTimeout_ExposedInConfig()
[TestMethod]
public void RuntimeConfig_McpQueryTimeout_DefaultWhenNotSet()
{
- RuntimeConfig config = CreateConfigWithoutQueryTimeout();
+ RuntimeConfig config = CreateConfig();
Assert.IsNull(config.Runtime?.Mcp?.QueryTimeout);
Assert.AreEqual(30, config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds);
}
@@ -111,7 +107,7 @@ public void RuntimeConfig_McpQueryTimeout_DefaultWhenNotSet()
public async Task ExecuteWithTelemetry_CompletesSuccessfully_WithinTimeout()
{
// A tool that completes immediately should succeed
- RuntimeConfig config = CreateConfigWithQueryTimeout(30);
+ RuntimeConfig config = CreateConfig(queryTimeout: 30);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
IMcpTool tool = new ImmediateCompletionTool();
@@ -127,7 +123,7 @@ public async Task ExecuteWithTelemetry_CompletesSuccessfully_WithinTimeout()
public async Task ExecuteWithTelemetry_ThrowsTimeoutException_WhenToolExceedsTimeout()
{
// Configure a very short timeout (1 second) and a tool that takes longer
- RuntimeConfig config = CreateConfigWithQueryTimeout(1);
+ RuntimeConfig config = CreateConfig(queryTimeout: 1);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
IMcpTool tool = new SlowTool(delaySeconds: 30);
@@ -141,7 +137,7 @@ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
[TestMethod]
public async Task ExecuteWithTelemetry_TimeoutMessage_ContainsToolName()
{
- RuntimeConfig config = CreateConfigWithQueryTimeout(1);
+ RuntimeConfig config = CreateConfig(queryTimeout: 1);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
IMcpTool tool = new SlowTool(delaySeconds: 30);
@@ -164,7 +160,7 @@ public async Task ExecuteWithTelemetry_ClientCancellation_PropagatesAsCancellati
{
// Client cancellation (not timeout) should propagate as OperationCanceledException
// rather than being converted to TimeoutException.
- RuntimeConfig config = CreateConfigWithQueryTimeout(30);
+ RuntimeConfig config = CreateConfig(queryTimeout: 30);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
IMcpTool tool = new SlowTool(delaySeconds: 30);
@@ -192,7 +188,7 @@ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
public async Task ExecuteWithTelemetry_AppliesTimeout_ToAllToolTypes()
{
// Verify timeout applies to both built-in and custom tool types
- RuntimeConfig config = CreateConfigWithQueryTimeout(1);
+ RuntimeConfig config = CreateConfig(queryTimeout: 1);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
// Test with built-in tool type
@@ -220,7 +216,7 @@ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
public async Task ExecuteWithTelemetry_ReadsConfigPerInvocation_HotReload()
{
// First invocation with long timeout should succeed
- RuntimeConfig config1 = CreateConfigWithQueryTimeout(30);
+ RuntimeConfig config1 = CreateConfig(queryTimeout: 30);
IServiceProvider sp1 = CreateServiceProviderWithConfig(config1);
IMcpTool fastTool = new ImmediateCompletionTool();
@@ -230,7 +226,7 @@ public async Task ExecuteWithTelemetry_ReadsConfigPerInvocation_HotReload()
// Second invocation with very short timeout and a slow tool should timeout.
// This demonstrates that each invocation reads the current config value.
- RuntimeConfig config2 = CreateConfigWithQueryTimeout(1);
+ RuntimeConfig config2 = CreateConfig(queryTimeout: 1);
IServiceProvider sp2 = CreateServiceProviderWithConfig(config2);
IMcpTool slowTool = new SlowTool(delaySeconds: 30);
@@ -243,30 +239,7 @@ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
#endregion
- #region Telemetry Tests
-
- [TestMethod]
- public void MapExceptionToErrorCode_TimeoutException_ReturnsTIMEOUT()
- {
- string errorCode = McpTelemetryHelper.MapExceptionToErrorCode(new TimeoutException());
- Assert.AreEqual(McpTelemetryErrorCodes.TIMEOUT, errorCode);
- }
-
- [TestMethod]
- public void MapExceptionToErrorCode_OperationCanceled_ReturnsOPERATION_CANCELLED()
- {
- string errorCode = McpTelemetryHelper.MapExceptionToErrorCode(new OperationCanceledException());
- Assert.AreEqual(McpTelemetryErrorCodes.OPERATION_CANCELLED, errorCode);
- }
-
- [TestMethod]
- public void MapExceptionToErrorCode_TaskCanceled_ReturnsOPERATION_CANCELLED()
- {
- string errorCode = McpTelemetryHelper.MapExceptionToErrorCode(new TaskCanceledException());
- Assert.AreEqual(McpTelemetryErrorCodes.OPERATION_CANCELLED, errorCode);
- }
-
- #endregion
+ // Note: MapExceptionToErrorCode tests are in McpTelemetryTests (covers all exception types via DataRow).
#region JSON Serialization Tests
@@ -305,35 +278,7 @@ public void McpRuntimeOptions_Deserialization_DefaultsWhenOmitted()
#region Helpers
- private static RuntimeConfig CreateConfigWithQueryTimeout(int timeoutSeconds)
- {
- return new RuntimeConfig(
- Schema: "test-schema",
- DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, ConnectionString: "", Options: null),
- Runtime: new(
- Rest: new(),
- GraphQL: new(),
- Mcp: new(
- Enabled: true,
- Path: "/mcp",
- QueryTimeout: timeoutSeconds,
- DmlTools: new(
- describeEntities: true,
- readRecords: true,
- createRecord: true,
- updateRecord: true,
- deleteRecord: true,
- executeEntity: true,
- aggregateRecords: true
- )
- ),
- Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
- ),
- Entities: new(new Dictionary())
- );
- }
-
- private static RuntimeConfig CreateConfigWithoutQueryTimeout()
+ private static RuntimeConfig CreateConfig(int? queryTimeout = null)
{
return new RuntimeConfig(
Schema: "test-schema",
@@ -344,6 +289,7 @@ private static RuntimeConfig CreateConfigWithoutQueryTimeout()
Mcp: new(
Enabled: true,
Path: "/mcp",
+ QueryTimeout: queryTimeout,
DmlTools: new(
describeEntities: true,
readRecords: true,
diff --git a/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
index 92f2c68a63..454f9e8c37 100644
--- a/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
@@ -1,8 +1,6 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
-#nullable enable
-
using System;
using System.Text;
using Azure.DataApiBuilder.Mcp.BuiltInTools;
@@ -12,7 +10,7 @@ namespace Azure.DataApiBuilder.Service.Tests.UnitTests
{
///
/// Unit tests for AggregateRecordsTool helper methods.
- /// Validates alias computation, cursor decoding, and input validation logic.
+ /// Validates alias computation, cursor decoding, and error message builders.
/// SQL generation is delegated to the engine's query builder (GroupByMetadata/AggregationColumn).
///
[TestClass]
@@ -20,128 +18,92 @@ public class AggregateRecordsToolTests
{
#region ComputeAlias tests
- [TestMethod]
- [DataRow("count", "*", "count", DisplayName = "count(*) alias is 'count'")]
- [DataRow("count", "userId", "count_userId", DisplayName = "count(field) alias is 'count_field'")]
- [DataRow("avg", "price", "avg_price", DisplayName = "avg alias")]
- [DataRow("sum", "amount", "sum_amount", DisplayName = "sum alias")]
- [DataRow("min", "age", "min_age", DisplayName = "min alias")]
- [DataRow("max", "score", "max_score", DisplayName = "max alias")]
+ [DataTestMethod]
+ [DataRow("count", "*", "count", DisplayName = "count(*) → 'count'")]
+ [DataRow("count", "userId", "count_userId", DisplayName = "count(userId) → 'count_userId'")]
+ [DataRow("avg", "price", "avg_price", DisplayName = "avg(price) → 'avg_price'")]
+ [DataRow("sum", "amount", "sum_amount", DisplayName = "sum(amount) → 'sum_amount'")]
+ [DataRow("min", "age", "min_age", DisplayName = "min(age) → 'min_age'")]
+ [DataRow("max", "score", "max_score", DisplayName = "max(score) → 'max_score'")]
+ // Blog scenario aliases
+ [DataRow("sum", "totalRevenue", "sum_totalRevenue", DisplayName = "Blog: sum(totalRevenue) → 'sum_totalRevenue'")]
+ [DataRow("avg", "quarterlyRevenue", "avg_quarterlyRevenue", DisplayName = "Blog: avg(quarterlyRevenue) → 'avg_quarterlyRevenue'")]
+ [DataRow("sum", "onHandValue", "sum_onHandValue", DisplayName = "Blog: sum(onHandValue) → 'sum_onHandValue'")]
public void ComputeAlias_ReturnsExpectedAlias(string function, string field, string expectedAlias)
{
- string result = AggregateRecordsTool.ComputeAlias(function, field);
- Assert.AreEqual(expectedAlias, result);
+ Assert.AreEqual(expectedAlias, AggregateRecordsTool.ComputeAlias(function, field));
}
#endregion
#region DecodeCursorOffset tests
- [TestMethod]
- public void DecodeCursorOffset_NullCursor_ReturnsZero()
+ [DataTestMethod]
+ [DataRow(null, 0, DisplayName = "null cursor → 0")]
+ [DataRow("", 0, DisplayName = "empty cursor → 0")]
+ [DataRow("not-valid-base64!!", 0, DisplayName = "invalid base64 → 0")]
+ public void DecodeCursorOffset_InvalidInput_ReturnsZero(string? cursor, int expected)
{
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(null));
+ Assert.AreEqual(expected, AggregateRecordsTool.DecodeCursorOffset(cursor));
}
- [TestMethod]
- public void DecodeCursorOffset_EmptyCursor_ReturnsZero()
+ [DataTestMethod]
+ [DataRow("abc", 0, DisplayName = "non-numeric base64 → 0")]
+ [DataRow("-5", 0, DisplayName = "negative offset → 0")]
+ [DataRow("0", 0, DisplayName = "zero offset → 0")]
+ [DataRow("3", 3, DisplayName = "offset 3 round-trip")]
+ [DataRow("5", 5, DisplayName = "offset 5 round-trip")]
+ [DataRow("1000", 1000, DisplayName = "large offset round-trip")]
+ public void DecodeCursorOffset_Base64EncodedValue_ReturnsExpectedOffset(string rawValue, int expectedOffset)
{
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(""));
+ string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes(rawValue));
+ Assert.AreEqual(expectedOffset, AggregateRecordsTool.DecodeCursorOffset(cursor));
}
- [TestMethod]
- public void DecodeCursorOffset_ValidBase64_ReturnsOffset()
- {
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("5"));
- Assert.AreEqual(5, AggregateRecordsTool.DecodeCursorOffset(cursor));
- }
+ #endregion
- [TestMethod]
- public void DecodeCursorOffset_InvalidBase64_ReturnsZero()
- {
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset("not-valid-base64!!"));
- }
+ #region Error message builder tests
- [TestMethod]
- public void DecodeCursorOffset_NonNumericBase64_ReturnsZero()
+ [DataTestMethod]
+ [DataRow("Product", DisplayName = "Product entity")]
+ [DataRow("LargeProductCatalog", DisplayName = "LargeProductCatalog entity")]
+ public void BuildTimeoutErrorMessage_ContainsExpectedContent(string entityName)
{
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("abc"));
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(cursor));
+ string message = AggregateRecordsTool.BuildTimeoutErrorMessage(entityName);
+ AssertErrorMessageContains(message, entityName, "NOT a tool error", "filter", "groupby", "first");
}
- [TestMethod]
- public void DecodeCursorOffset_RoundTrip_FirstPage()
+ [DataTestMethod]
+ [DataRow("Product", DisplayName = "Product entity")]
+ public void BuildTaskCanceledErrorMessage_ContainsExpectedContent(string entityName)
{
- int offset = 3;
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes(offset.ToString()));
- Assert.AreEqual(offset, AggregateRecordsTool.DecodeCursorOffset(cursor));
+ string message = AggregateRecordsTool.BuildTaskCanceledErrorMessage(entityName);
+ AssertErrorMessageContains(message, entityName, "NOT a tool error", "timeout", "filter", "first");
}
- [TestMethod]
- public void DecodeCursorOffset_NegativeValue_ReturnsZero()
+ [DataTestMethod]
+ [DataRow("LargeProductCatalog", DisplayName = "LargeProductCatalog entity")]
+ public void BuildOperationCanceledErrorMessage_ContainsExpectedContent(string entityName)
{
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("-5"));
- Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(cursor));
+ string message = AggregateRecordsTool.BuildOperationCanceledErrorMessage(entityName);
+ AssertErrorMessageContains(message, entityName, "NOT a tool error", "No results were returned");
}
#endregion
- #region Blog scenario tests - alias and type validation
-
- ///
- /// Blog Example 1: Strategic customer importance
- /// "Who is our most important customer based on total revenue?"
- /// SUM(totalRevenue) grouped by customerId, customerName, ORDER BY DESC, FIRST 1
- ///
- [TestMethod]
- public void BlogScenario_StrategicCustomerImportance_AliasAndTypeCorrect()
- {
- string alias = AggregateRecordsTool.ComputeAlias("sum", "totalRevenue");
- Assert.AreEqual("sum_totalRevenue", alias);
- }
-
- ///
- /// Blog Example 2: Product discontinuation candidate
- /// Lowest totalRevenue with orderby=asc, first=1
- ///
- [TestMethod]
- public void BlogScenario_ProductDiscontinuation_AliasAndTypeCorrect()
- {
- string alias = AggregateRecordsTool.ComputeAlias("sum", "totalRevenue");
- Assert.AreEqual("sum_totalRevenue", alias);
- }
-
- ///
- /// Blog Example 3: Forward-looking performance expectation
- /// AVG quarterlyRevenue with HAVING gt 2000000
- ///
- [TestMethod]
- public void BlogScenario_QuarterlyPerformance_AliasAndTypeCorrect()
- {
- string alias = AggregateRecordsTool.ComputeAlias("avg", "quarterlyRevenue");
- Assert.AreEqual("avg_quarterlyRevenue", alias);
- }
-
- ///
- /// Blog Example 4: Revenue concentration across regions
- /// SUM totalRevenue grouped by region and customerTier, HAVING gt 5000000
- ///
- [TestMethod]
- public void BlogScenario_RevenueConcentration_AliasAndTypeCorrect()
- {
- string alias = AggregateRecordsTool.ComputeAlias("sum", "totalRevenue");
- Assert.AreEqual("sum_totalRevenue", alias);
- }
+ #region Helper Methods
///
- /// Blog Example 5: Risk exposure by product line
- /// SUM onHandValue grouped by productLine and warehouseRegion, HAVING gt 2500000
+ /// Asserts that the error message contains all expected substrings.
///
- [TestMethod]
- public void BlogScenario_RiskExposure_AliasAndTypeCorrect()
+ private static void AssertErrorMessageContains(string message, params string[] expectedSubstrings)
{
- string alias = AggregateRecordsTool.ComputeAlias("sum", "onHandValue");
- Assert.AreEqual("sum_onHandValue", alias);
+ Assert.IsNotNull(message);
+ foreach (string expected in expectedSubstrings)
+ {
+ Assert.IsTrue(message.Contains(expected),
+ $"Error message must contain '{expected}'. Actual: '{message}'");
+ }
}
#endregion
From 987294e001bf8a7c96bb68eeace58654c824e146 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Fri, 6 Mar 2026 16:13:43 +0530
Subject: [PATCH 35/43] Additional self review fixes
---
.../BuiltInTools/AggregateRecordsTool.cs | 30 ++++++++++++++++---
1 file changed, 26 insertions(+), 4 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 0225d43731..31493359b7 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -362,7 +362,17 @@ public async Task ExecuteAsync(
}
// Parse distinct
- bool distinct = root.TryGetProperty("distinct", out JsonElement distinctElement) && distinctElement.GetBoolean();
+ bool distinct = false;
+ if (root.TryGetProperty("distinct", out JsonElement distinctElement))
+ {
+ if (distinctElement.ValueKind != JsonValueKind.True && distinctElement.ValueKind != JsonValueKind.False)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ $"Argument 'distinct' must be a boolean (true or false). Got: '{distinctElement}'.", logger);
+ }
+
+ distinct = distinctElement.GetBoolean();
+ }
if (isCountStar && distinct)
{
@@ -405,14 +415,15 @@ public async Task ExecuteAsync(
// Parse after
string? after = root.TryGetProperty("after", out JsonElement afterElement) ? afterElement.GetString() : null;
- // Parse groupby
+ // Parse groupby (deduplicate to avoid redundant GROUP BY columns)
List groupby = new();
+ HashSet seenGroupby = new(StringComparer.OrdinalIgnoreCase);
if (root.TryGetProperty("groupby", out JsonElement groupbyElement) && groupbyElement.ValueKind == JsonValueKind.Array)
{
foreach (JsonElement groupbyItem in groupbyElement.EnumerateArray())
{
string? groupbyFieldName = groupbyItem.GetString();
- if (!string.IsNullOrWhiteSpace(groupbyFieldName))
+ if (!string.IsNullOrWhiteSpace(groupbyFieldName) && seenGroupby.Add(groupbyFieldName))
{
groupby.Add(groupbyFieldName);
}
@@ -548,6 +559,12 @@ public async Task ExecuteAsync(
havingInValues.Add(item.GetDouble());
}
+
+ if (havingInValues.Count == 0)
+ {
+ return McpResponseBuilder.BuildErrorResult(toolName, "InvalidArguments",
+ "The 'having.in' array must contain at least one numeric value.", logger);
+ }
}
else
{
@@ -712,7 +729,12 @@ public async Task ExecuteAsync(
if (!args.IsCountStar)
{
- sqlMetadataProvider.TryGetBackingColumn(entityName, args.Field, out string? backingField);
+ if (!sqlMetadataProvider.TryGetBackingColumn(entityName, args.Field, out string? backingField))
+ {
+ error = McpErrorHelpers.FieldNotFound(toolName, entityName, args.Field, "field", logger);
+ return null;
+ }
+
return backingField;
}
From d5de2b496fab243c98029433b55756d889a63d13 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Fri, 6 Mar 2026 16:38:50 +0530
Subject: [PATCH 36/43] Format and consistency fixing
---
.../BuiltInTools/AggregateRecordsTool.cs | 10 +++-------
1 file changed, 3 insertions(+), 7 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index 31493359b7..db46a10ee6 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -263,24 +263,20 @@ public async Task ExecuteAsync(
? BuildPaginatedResponse(resultArray, args.First.Value, args.After, entityName, logger)
: BuildSimpleResponse(resultArray, entityName, alias, logger);
}
- catch (TimeoutException timeoutException)
+ catch (TimeoutException)
{
- logger?.LogError(timeoutException, "Aggregation operation timed out for entity {Entity}.", entityName);
return McpResponseBuilder.BuildErrorResult(toolName, "TimeoutError", BuildTimeoutErrorMessage(entityName), logger);
}
- catch (TaskCanceledException taskCanceledException)
+ catch (TaskCanceledException)
{
- logger?.LogError(taskCanceledException, "Aggregation task was canceled for entity {Entity}.", entityName);
return McpResponseBuilder.BuildErrorResult(toolName, "TimeoutError", BuildTaskCanceledErrorMessage(entityName), logger);
}
catch (OperationCanceledException)
{
- logger?.LogWarning("Aggregation operation was canceled for entity {Entity}.", entityName);
return McpResponseBuilder.BuildErrorResult(toolName, "OperationCanceled", BuildOperationCanceledErrorMessage(entityName), logger);
}
catch (DbException dbException)
{
- logger?.LogError(dbException, "Database error during aggregation for entity {Entity}.", entityName);
return McpResponseBuilder.BuildErrorResult(toolName, "DatabaseOperationFailed", dbException.Message, logger);
}
catch (ArgumentException argumentException)
@@ -660,7 +656,7 @@ public async Task ExecuteAsync(
authResolver,
entityName,
EntityActionOperation.Read,
- out string? effectiveRole,
+ out string? _,
out string readAuthError))
{
string finalError = readAuthError.StartsWith("You do not have permission", StringComparison.OrdinalIgnoreCase)
From a08da11b556bd64b6533a94b5cb26fa6868c8a61 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Mon, 9 Mar 2026 18:34:37 +0530
Subject: [PATCH 37/43] Review comments fixes
---
.../Core/McpToolRegistry.cs | 8 --
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 40 --------
src/Service.Tests/Mcp/McpToolRegistryTests.cs | 44 ---------
.../UnitTests/AggregateRecordsToolTests.cs | 94 +++----------------
src/Service/Utilities/McpStdioHelper.cs | 11 ++-
5 files changed, 22 insertions(+), 175 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs
index 626ddc9125..0ba182b6db 100644
--- a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs
@@ -37,14 +37,6 @@ public void RegisterTool(IMcpTool tool)
// Check for duplicate tool names (case-insensitive)
if (_tools.TryGetValue(toolName, out IMcpTool? existingTool))
{
- // If the same tool instance is already registered, skip silently.
- // This can happen when both McpToolRegistryInitializer (hosted service)
- // and McpStdioHelper register tools during stdio mode startup.
- if (ReferenceEquals(existingTool, tool))
- {
- return;
- }
-
string existingToolType = existingTool.ToolType == ToolType.BuiltIn ? "built-in" : "custom";
string newToolType = tool.ToolType == ToolType.BuiltIn ? "built-in" : "custom";
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
index c69e3ed1b4..6eae5b97fb 100644
--- a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -31,46 +31,6 @@ namespace Azure.DataApiBuilder.Service.Tests.Mcp
[TestClass]
public class McpQueryTimeoutTests
{
- #region Default Value Tests
-
- [TestMethod]
- public void McpRuntimeOptions_DefaultQueryTimeout_Is30Seconds()
- {
- Assert.AreEqual(30, McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS);
- }
-
- [TestMethod]
- public void McpRuntimeOptions_EffectiveTimeout_ReturnsDefault_WhenNotConfigured()
- {
- McpRuntimeOptions options = new();
- Assert.IsNull(options.QueryTimeout);
- Assert.AreEqual(30, options.EffectiveQueryTimeoutSeconds);
- }
-
- [TestMethod]
- public void McpRuntimeOptions_EffectiveTimeout_ReturnsConfiguredValue()
- {
- McpRuntimeOptions options = new(QueryTimeout: 60);
- Assert.AreEqual(60, options.QueryTimeout);
- Assert.AreEqual(60, options.EffectiveQueryTimeoutSeconds);
- }
-
- [TestMethod]
- public void McpRuntimeOptions_UserProvidedQueryTimeout_FalseByDefault()
- {
- McpRuntimeOptions options = new();
- Assert.IsFalse(options.UserProvidedQueryTimeout);
- }
-
- [TestMethod]
- public void McpRuntimeOptions_UserProvidedQueryTimeout_TrueWhenSet()
- {
- McpRuntimeOptions options = new(QueryTimeout: 45);
- Assert.IsTrue(options.UserProvidedQueryTimeout);
- }
-
- #endregion
-
#region Custom Value Tests
[DataTestMethod]
diff --git a/src/Service.Tests/Mcp/McpToolRegistryTests.cs b/src/Service.Tests/Mcp/McpToolRegistryTests.cs
index 7bbd91341c..c8fa6a9768 100644
--- a/src/Service.Tests/Mcp/McpToolRegistryTests.cs
+++ b/src/Service.Tests/Mcp/McpToolRegistryTests.cs
@@ -141,50 +141,6 @@ public void RegisterTool_WithDifferentCasing_ThrowsException()
Assert.AreEqual(DataApiBuilderException.SubStatusCodes.ErrorInInitialization, exception.SubStatusCode);
}
- ///
- /// Test that registering the same tool instance twice is silently ignored (idempotent).
- /// This supports stdio mode where both McpToolRegistryInitializer and McpStdioHelper may register the same tools.
- ///
- [TestMethod]
- public void RegisterTool_SameInstanceTwice_IsIdempotent()
- {
- // Arrange
- McpToolRegistry registry = new();
- IMcpTool tool = new MockMcpTool("my_tool", ToolType.BuiltIn);
-
- // Act - Register the same instance twice
- registry.RegisterTool(tool);
- registry.RegisterTool(tool);
-
- // Assert - Tool should be registered only once
- IEnumerable allTools = registry.GetAllTools();
- Assert.AreEqual(1, allTools.Count());
- }
-
- ///
- /// Test that registering a different instance with the same name throws an exception,
- /// even though a same-instance re-registration would be allowed.
- ///
- [TestMethod]
- public void RegisterTool_DifferentInstanceSameName_ThrowsException()
- {
- // Arrange
- McpToolRegistry registry = new();
- IMcpTool tool1 = new MockMcpTool("my_tool", ToolType.BuiltIn);
- IMcpTool tool2 = new MockMcpTool("my_tool", ToolType.BuiltIn);
-
- // Act - Register first instance
- registry.RegisterTool(tool1);
-
- // Assert - Different instance with same name should throw
- DataApiBuilderException exception = Assert.ThrowsException(
- () => registry.RegisterTool(tool2)
- );
-
- Assert.IsTrue(exception.Message.Contains("Duplicate MCP tool name 'my_tool' detected"));
- Assert.AreEqual(DataApiBuilderException.SubStatusCodes.ErrorInInitialization, exception.SubStatusCode);
- }
-
///
/// Test that GetAllTools returns all registered tools.
///
diff --git a/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
index 454f9e8c37..da53e0adcc 100644
--- a/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/UnitTests/AggregateRecordsToolTests.cs
@@ -9,101 +9,33 @@
namespace Azure.DataApiBuilder.Service.Tests.UnitTests
{
///
- /// Unit tests for AggregateRecordsTool helper methods.
- /// Validates alias computation, cursor decoding, and error message builders.
- /// SQL generation is delegated to the engine's query builder (GroupByMetadata/AggregationColumn).
+ /// Unit tests for AggregateRecordsTool helper methods that supplement
+ /// the integration tests in Service.Tests/Mcp/AggregateRecordsToolTests.cs.
+ /// Covers edge cases (blog aliases, negative offsets) not present in the Mcp test suite.
///
[TestClass]
public class AggregateRecordsToolTests
{
- #region ComputeAlias tests
+ #region ComputeAlias - Blog scenario aliases (not covered in Mcp tests)
[DataTestMethod]
- [DataRow("count", "*", "count", DisplayName = "count(*) → 'count'")]
- [DataRow("count", "userId", "count_userId", DisplayName = "count(userId) → 'count_userId'")]
- [DataRow("avg", "price", "avg_price", DisplayName = "avg(price) → 'avg_price'")]
- [DataRow("sum", "amount", "sum_amount", DisplayName = "sum(amount) → 'sum_amount'")]
- [DataRow("min", "age", "min_age", DisplayName = "min(age) → 'min_age'")]
- [DataRow("max", "score", "max_score", DisplayName = "max(score) → 'max_score'")]
- // Blog scenario aliases
- [DataRow("sum", "totalRevenue", "sum_totalRevenue", DisplayName = "Blog: sum(totalRevenue) → 'sum_totalRevenue'")]
- [DataRow("avg", "quarterlyRevenue", "avg_quarterlyRevenue", DisplayName = "Blog: avg(quarterlyRevenue) → 'avg_quarterlyRevenue'")]
- [DataRow("sum", "onHandValue", "sum_onHandValue", DisplayName = "Blog: sum(onHandValue) → 'sum_onHandValue'")]
- public void ComputeAlias_ReturnsExpectedAlias(string function, string field, string expectedAlias)
+ [DataRow("sum", "totalRevenue", "sum_totalRevenue", DisplayName = "Blog: sum(totalRevenue)")]
+ [DataRow("avg", "quarterlyRevenue", "avg_quarterlyRevenue", DisplayName = "Blog: avg(quarterlyRevenue)")]
+ [DataRow("sum", "onHandValue", "sum_onHandValue", DisplayName = "Blog: sum(onHandValue)")]
+ public void ComputeAlias_BlogScenarios_ReturnsExpectedAlias(string function, string field, string expectedAlias)
{
Assert.AreEqual(expectedAlias, AggregateRecordsTool.ComputeAlias(function, field));
}
#endregion
- #region DecodeCursorOffset tests
+ #region DecodeCursorOffset - Negative offset edge case (not covered in Mcp tests)
- [DataTestMethod]
- [DataRow(null, 0, DisplayName = "null cursor → 0")]
- [DataRow("", 0, DisplayName = "empty cursor → 0")]
- [DataRow("not-valid-base64!!", 0, DisplayName = "invalid base64 → 0")]
- public void DecodeCursorOffset_InvalidInput_ReturnsZero(string? cursor, int expected)
- {
- Assert.AreEqual(expected, AggregateRecordsTool.DecodeCursorOffset(cursor));
- }
-
- [DataTestMethod]
- [DataRow("abc", 0, DisplayName = "non-numeric base64 → 0")]
- [DataRow("-5", 0, DisplayName = "negative offset → 0")]
- [DataRow("0", 0, DisplayName = "zero offset → 0")]
- [DataRow("3", 3, DisplayName = "offset 3 round-trip")]
- [DataRow("5", 5, DisplayName = "offset 5 round-trip")]
- [DataRow("1000", 1000, DisplayName = "large offset round-trip")]
- public void DecodeCursorOffset_Base64EncodedValue_ReturnsExpectedOffset(string rawValue, int expectedOffset)
- {
- string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes(rawValue));
- Assert.AreEqual(expectedOffset, AggregateRecordsTool.DecodeCursorOffset(cursor));
- }
-
- #endregion
-
- #region Error message builder tests
-
- [DataTestMethod]
- [DataRow("Product", DisplayName = "Product entity")]
- [DataRow("LargeProductCatalog", DisplayName = "LargeProductCatalog entity")]
- public void BuildTimeoutErrorMessage_ContainsExpectedContent(string entityName)
- {
- string message = AggregateRecordsTool.BuildTimeoutErrorMessage(entityName);
- AssertErrorMessageContains(message, entityName, "NOT a tool error", "filter", "groupby", "first");
- }
-
- [DataTestMethod]
- [DataRow("Product", DisplayName = "Product entity")]
- public void BuildTaskCanceledErrorMessage_ContainsExpectedContent(string entityName)
- {
- string message = AggregateRecordsTool.BuildTaskCanceledErrorMessage(entityName);
- AssertErrorMessageContains(message, entityName, "NOT a tool error", "timeout", "filter", "first");
- }
-
- [DataTestMethod]
- [DataRow("LargeProductCatalog", DisplayName = "LargeProductCatalog entity")]
- public void BuildOperationCanceledErrorMessage_ContainsExpectedContent(string entityName)
- {
- string message = AggregateRecordsTool.BuildOperationCanceledErrorMessage(entityName);
- AssertErrorMessageContains(message, entityName, "NOT a tool error", "No results were returned");
- }
-
- #endregion
-
- #region Helper Methods
-
- ///
- /// Asserts that the error message contains all expected substrings.
- ///
- private static void AssertErrorMessageContains(string message, params string[] expectedSubstrings)
+ [TestMethod]
+ public void DecodeCursorOffset_NegativeOffset_ReturnsZero()
{
- Assert.IsNotNull(message);
- foreach (string expected in expectedSubstrings)
- {
- Assert.IsTrue(message.Contains(expected),
- $"Error message must contain '{expected}'. Actual: '{message}'");
- }
+ string cursor = Convert.ToBase64String(Encoding.UTF8.GetBytes("-5"));
+ Assert.AreEqual(0, AggregateRecordsTool.DecodeCursorOffset(cursor));
}
#endregion
diff --git a/src/Service/Utilities/McpStdioHelper.cs b/src/Service/Utilities/McpStdioHelper.cs
index f22e12b02f..043e9dd85d 100644
--- a/src/Service/Utilities/McpStdioHelper.cs
+++ b/src/Service/Utilities/McpStdioHelper.cs
@@ -78,8 +78,15 @@ public static bool RunMcpStdioHost(IHost host)
{
host.Start();
- // Tools are already registered by McpToolRegistryInitializer (IHostedService)
- // during host.Start(). No need to register them again here.
+ Mcp.Core.McpToolRegistry registry =
+ host.Services.GetRequiredService();
+ IEnumerable tools =
+ host.Services.GetServices();
+
+ foreach (Mcp.Model.IMcpTool tool in tools)
+ {
+ registry.RegisterTool(tool);
+ }
IHostApplicationLifetime lifetime =
host.Services.GetRequiredService();
From 3add20aceb9cddb4eb31cd60b783f50dc1e4bd8e Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Mon, 9 Mar 2026 18:56:40 +0530
Subject: [PATCH 38/43] Revert unwanted changes
---
.../Core/McpToolRegistry.cs | 8 ++++
src/Service.Tests/Mcp/McpToolRegistryTests.cs | 44 +++++++++++++++++++
2 files changed, 52 insertions(+)
diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs
index 0ba182b6db..626ddc9125 100644
--- a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs
@@ -37,6 +37,14 @@ public void RegisterTool(IMcpTool tool)
// Check for duplicate tool names (case-insensitive)
if (_tools.TryGetValue(toolName, out IMcpTool? existingTool))
{
+ // If the same tool instance is already registered, skip silently.
+ // This can happen when both McpToolRegistryInitializer (hosted service)
+ // and McpStdioHelper register tools during stdio mode startup.
+ if (ReferenceEquals(existingTool, tool))
+ {
+ return;
+ }
+
string existingToolType = existingTool.ToolType == ToolType.BuiltIn ? "built-in" : "custom";
string newToolType = tool.ToolType == ToolType.BuiltIn ? "built-in" : "custom";
diff --git a/src/Service.Tests/Mcp/McpToolRegistryTests.cs b/src/Service.Tests/Mcp/McpToolRegistryTests.cs
index c8fa6a9768..7bbd91341c 100644
--- a/src/Service.Tests/Mcp/McpToolRegistryTests.cs
+++ b/src/Service.Tests/Mcp/McpToolRegistryTests.cs
@@ -141,6 +141,50 @@ public void RegisterTool_WithDifferentCasing_ThrowsException()
Assert.AreEqual(DataApiBuilderException.SubStatusCodes.ErrorInInitialization, exception.SubStatusCode);
}
+ ///
+ /// Test that registering the same tool instance twice is silently ignored (idempotent).
+ /// This supports stdio mode where both McpToolRegistryInitializer and McpStdioHelper may register the same tools.
+ ///
+ [TestMethod]
+ public void RegisterTool_SameInstanceTwice_IsIdempotent()
+ {
+ // Arrange
+ McpToolRegistry registry = new();
+ IMcpTool tool = new MockMcpTool("my_tool", ToolType.BuiltIn);
+
+ // Act - Register the same instance twice
+ registry.RegisterTool(tool);
+ registry.RegisterTool(tool);
+
+ // Assert - Tool should be registered only once
+ IEnumerable allTools = registry.GetAllTools();
+ Assert.AreEqual(1, allTools.Count());
+ }
+
+ ///
+ /// Test that registering a different instance with the same name throws an exception,
+ /// even though a same-instance re-registration would be allowed.
+ ///
+ [TestMethod]
+ public void RegisterTool_DifferentInstanceSameName_ThrowsException()
+ {
+ // Arrange
+ McpToolRegistry registry = new();
+ IMcpTool tool1 = new MockMcpTool("my_tool", ToolType.BuiltIn);
+ IMcpTool tool2 = new MockMcpTool("my_tool", ToolType.BuiltIn);
+
+ // Act - Register first instance
+ registry.RegisterTool(tool1);
+
+ // Assert - Different instance with same name should throw
+ DataApiBuilderException exception = Assert.ThrowsException(
+ () => registry.RegisterTool(tool2)
+ );
+
+ Assert.IsTrue(exception.Message.Contains("Duplicate MCP tool name 'my_tool' detected"));
+ Assert.AreEqual(DataApiBuilderException.SubStatusCodes.ErrorInInitialization, exception.SubStatusCode);
+ }
+
///
/// Test that GetAllTools returns all registered tools.
///
From c5ceded6983c97456566e600240f3b955b09c904 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Mon, 9 Mar 2026 19:26:09 +0530
Subject: [PATCH 39/43] Fix failing tests
---
src/Service.Tests/Mcp/AggregateRecordsToolTests.cs | 4 ++--
.../Mcp/EntityLevelDmlToolConfigurationTests.cs | 2 +-
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 8 ++++----
3 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index f198e9b595..31e9b498cd 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -121,7 +121,7 @@ public async Task AggregateRecords_Disabled_ReturnsToolDisabledError(bool runtim
[DataRow("{\"entity\": \"Book\", \"field\": \"*\"}", null, DisplayName = "Missing function")]
[DataRow("{\"entity\": \"Book\", \"function\": \"count\"}", null, DisplayName = "Missing field")]
[DataRow("{\"entity\": \"Book\", \"function\": \"median\", \"field\": \"price\"}", "median", DisplayName = "Invalid function 'median'")]
- public async Task AggregateRecords_MissingOrInvalidRequiredArgs_ReturnsInvalidArguments(string json, string? expectedInMessage)
+ public async Task AggregateRecords_MissingOrInvalidRequiredArgs_ReturnsInvalidArguments(string json, string expectedInMessage)
{
IServiceProvider sp = CreateDefaultServiceProvider();
@@ -236,7 +236,7 @@ public void ComputeAlias_ReturnsExpectedAlias(string function, string field, str
[DataRow(null, 0, DisplayName = "null → 0")]
[DataRow("", 0, DisplayName = "empty → 0")]
[DataRow(" ", 0, DisplayName = "whitespace → 0")]
- public void DecodeCursorOffset_InvalidCursor_ReturnsZero(string? cursor, int expected)
+ public void DecodeCursorOffset_InvalidCursor_ReturnsZero(string cursor, int expected)
{
Assert.AreEqual(expected, AggregateRecordsTool.DecodeCursorOffset(cursor));
}
diff --git a/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs b/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
index 7539bbcd42..aa3ae118dc 100644
--- a/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
+++ b/src/Service.Tests/Mcp/EntityLevelDmlToolConfigurationTests.cs
@@ -231,7 +231,7 @@ public async Task DmlTool_AllowsTablesAndViews(string toolType, string sourceTyp
// Arrange
RuntimeConfig config = sourceType == "View"
? CreateConfigWithViewEntity()
- : CreateConfigWithDmlToolEnabledEntity();
+ : CreateConfig();
IServiceProvider serviceProvider = CreateServiceProvider(config);
IMcpTool tool = CreateTool(toolType);
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
index 6eae5b97fb..158402c264 100644
--- a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -217,7 +217,7 @@ public void McpRuntimeOptions_Deserialization_ReadsQueryTimeout()
{
string json = @"{""enabled"": true, ""query-timeout"": 60}";
JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
- McpRuntimeOptions? options = JsonSerializer.Deserialize(json, serializerOptions);
+ McpRuntimeOptions options = JsonSerializer.Deserialize(json, serializerOptions);
Assert.IsNotNull(options);
Assert.AreEqual(60, options.QueryTimeout);
Assert.AreEqual(60, options.EffectiveQueryTimeoutSeconds);
@@ -228,7 +228,7 @@ public void McpRuntimeOptions_Deserialization_DefaultsWhenOmitted()
{
string json = @"{""enabled"": true}";
JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
- McpRuntimeOptions? options = JsonSerializer.Deserialize(json, serializerOptions);
+ McpRuntimeOptions options = JsonSerializer.Deserialize(json, serializerOptions);
Assert.IsNotNull(options);
Assert.IsNull(options.QueryTimeout);
Assert.AreEqual(30, options.EffectiveQueryTimeoutSeconds);
@@ -294,7 +294,7 @@ public Tool GetToolMetadata()
}
public Task ExecuteAsync(
- JsonDocument? arguments,
+ JsonDocument arguments,
IServiceProvider serviceProvider,
CancellationToken cancellationToken = default)
{
@@ -336,7 +336,7 @@ public Tool GetToolMetadata()
}
public async Task ExecuteAsync(
- JsonDocument? arguments,
+ JsonDocument arguments,
IServiceProvider serviceProvider,
CancellationToken cancellationToken = default)
{
From 8520d53c4940e9bb5a29dffe61e0a2d0272269fb Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Tue, 10 Mar 2026 13:54:32 +0530
Subject: [PATCH 40/43] use query-timeout only for aggregate_records
---
schemas/dab.draft.schema.json | 34 +++-
.../BuiltInTools/AggregateRecordsTool.cs | 31 ++--
.../Utils/McpTelemetryHelper.cs | 34 +---
src/Cli.Tests/InitTests.cs | 68 +++++++
...stMethodsAndGraphQLOperations.verified.txt | 10 +-
...tyWithSourceAsStoredProcedure.verified.txt | 10 +-
...tityWithSourceWithDefaultType.verified.txt | 10 +-
...dingEntityWithoutIEnumerables.verified.txt | 10 +-
...ests.TestInitForCosmosDBNoSql.verified.txt | 10 +-
...toredProcedureWithRestMethods.verified.txt | 10 +-
...stMethodsAndGraphQLOperations.verified.txt | 10 +-
...itTests.CosmosDbNoSqlDatabase.verified.txt | 10 +-
...ts.CosmosDbPostgreSqlDatabase.verified.txt | 10 +-
...ionProviders_171ea8114ff71814.verified.txt | 10 +-
...ionProviders_2df7a1794712f154.verified.txt | 10 +-
...ionProviders_59fe1a10aa78899d.verified.txt | 10 +-
...ionProviders_b95b637ea87f16a7.verified.txt | 10 +-
...ionProviders_daacbd948b7ef72f.verified.txt | 10 +-
...tStartingSlashWillHaveItAdded.verified.txt | 10 +-
.../InitTests.MsSQLDatabase.verified.txt | 10 +-
...tStartingSlashWillHaveItAdded.verified.txt | 10 +-
...ConfigWithoutConnectionString.verified.txt | 10 +-
...lCharactersInConnectionString.verified.txt | 10 +-
...ationOptions_0546bef37027a950.verified.txt | 10 +-
...ationOptions_0ac567dd32a2e8f5.verified.txt | 10 +-
...ationOptions_0c06949221514e77.verified.txt | 10 +-
...ationOptions_18667ab7db033e9d.verified.txt | 10 +-
...ationOptions_2f42f44c328eb020.verified.txt | 10 +-
...ationOptions_3243d3f3441fdcc1.verified.txt | 10 +-
...ationOptions_53350b8b47df2112.verified.txt | 10 +-
...ationOptions_6584e0ec46b8a11d.verified.txt | 10 +-
...ationOptions_81cc88db3d4eecfb.verified.txt | 10 +-
...ationOptions_8ea187616dbb5577.verified.txt | 10 +-
...ationOptions_905845c29560a3ef.verified.txt | 10 +-
...ationOptions_b2fd24fab5b80917.verified.txt | 10 +-
...ationOptions_bd7cd088755287c9.verified.txt | 10 +-
...ationOptions_d2eccba2f836b380.verified.txt | 10 +-
...ationOptions_d463eed7fe5e4bbe.verified.txt | 10 +-
...ationOptions_d5520dd5c33f7b8d.verified.txt | 10 +-
...ationOptions_eab4a6010e602b59.verified.txt | 10 +-
...ationOptions_ecaa688829b4030e.verified.txt | 10 +-
src/Cli/Commands/ConfigureOptions.cs | 10 +-
src/Cli/Commands/InitOptions.cs | 5 +
src/Cli/ConfigGenerator.cs | 31 ++--
.../Converters/DmlToolsConfigConverter.cs | 88 +++++++--
.../McpRuntimeOptionsConverterFactory.cs | 18 +-
src/Config/ObjectModel/DmlToolsConfig.cs | 45 ++++-
src/Config/ObjectModel/McpRuntimeOptions.cs | 33 +---
.../Configurations/RuntimeConfigValidator.cs | 10 +-
.../Mcp/AggregateRecordsToolTests.cs | 14 --
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 174 ++++++------------
...ReadingRuntimeConfigForCosmos.verified.txt | 8 +-
...tReadingRuntimeConfigForMsSql.verified.txt | 10 +-
...tReadingRuntimeConfigForMySql.verified.txt | 8 +-
...ingRuntimeConfigForPostgreSql.verified.txt | 8 +-
.../UnitTests/McpTelemetryTests.cs | 153 ---------------
56 files changed, 528 insertions(+), 624 deletions(-)
diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json
index 8f283fba36..fa5208af66 100644
--- a/schemas/dab.draft.schema.json
+++ b/schemas/dab.draft.schema.json
@@ -275,13 +275,6 @@
"description": "Allow enabling/disabling MCP requests for all entities.",
"default": true
},
- "query-timeout": {
- "type": "integer",
- "description": "Execution timeout in seconds for MCP tool operations. Applies to all MCP tools. Range: 1-600.",
- "default": 30,
- "minimum": 1,
- "maximum": 600
- },
"dml-tools": {
"oneOf": [
{
@@ -324,8 +317,31 @@
"default": false
},
"aggregate-records": {
- "type": "boolean",
- "description": "Enable/disable the aggregate-records tool.",
+ "oneOf": [
+ {
+ "type": "boolean",
+ "description": "Enable/disable the aggregate-records tool."
+ },
+ {
+ "type": "object",
+ "description": "Aggregate records tool configuration",
+ "additionalProperties": false,
+ "properties": {
+ "enabled": {
+ "type": "boolean",
+ "description": "Enable/disable the aggregate-records tool.",
+ "default": true
+ },
+ "query-timeout": {
+ "type": "integer",
+ "description": "Execution timeout in seconds for aggregate queries. Range: 1-600.",
+ "default": 30,
+ "minimum": 1,
+ "maximum": 600
+ }
+ }
+ }
+ ],
"default": false
}
}
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index db46a10ee6..a08a54ae75 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -174,6 +174,20 @@ public async Task ExecuteAsync(
{
cancellationToken.ThrowIfCancellationRequested();
+ // Read aggregate-records query-timeout from current config per invocation (hot-reload safe).
+ int timeoutSeconds = runtimeConfig.McpDmlTools?.EffectiveAggregateRecordsQueryTimeoutSeconds
+ ?? DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS;
+
+ // Defensive runtime guard: clamp timeout to valid range [1, MAX_QUERY_TIMEOUT_SECONDS].
+ if (timeoutSeconds < 1 || timeoutSeconds > DmlToolsConfig.MAX_QUERY_TIMEOUT_SECONDS)
+ {
+ timeoutSeconds = DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS;
+ }
+
+ // Wrap tool execution with the configured timeout using a linked CancellationTokenSource.
+ using CancellationTokenSource timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
+ timeoutCts.CancelAfter(TimeSpan.FromSeconds(timeoutSeconds));
+
// 1. Parse and validate all input arguments
CallToolResult? parseError = TryParseAndValidateArguments(arguments, runtimeConfig, toolName, out AggregateArguments args, logger);
if (parseError != null)
@@ -250,7 +264,7 @@ public async Task ExecuteAsync(
}
// 8. Execute query and return results
- cancellationToken.ThrowIfCancellationRequested();
+ timeoutCts.Token.ThrowIfCancellationRequested();
JsonDocument? queryResult = await queryExecutor.ExecuteQueryAsync(
sql, structure.Parameters, queryExecutor.GetJsonResultAsync,
dataSourceName, authCtx.HttpContext);
@@ -267,9 +281,10 @@ public async Task ExecuteAsync(
{
return McpResponseBuilder.BuildErrorResult(toolName, "TimeoutError", BuildTimeoutErrorMessage(entityName), logger);
}
- catch (TaskCanceledException)
+ catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested)
{
- return McpResponseBuilder.BuildErrorResult(toolName, "TimeoutError", BuildTaskCanceledErrorMessage(entityName), logger);
+ // The timeout CTS fired, not the caller's token. Treat as timeout error.
+ return McpResponseBuilder.BuildErrorResult(toolName, "TimeoutError", BuildTimeoutErrorMessage(entityName), logger);
}
catch (OperationCanceledException)
{
@@ -1045,16 +1060,6 @@ internal static string BuildTimeoutErrorMessage(string entityName)
+ "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
}
- ///
- /// Builds the error message for a TaskCanceledException during aggregation (typically a timeout).
- ///
- internal static string BuildTaskCanceledErrorMessage(string entityName)
- {
- return $"The aggregation query for entity '{entityName}' was canceled, likely due to a timeout. "
- + "This is NOT a tool error. The database did not respond in time. "
- + "Try narrowing results with a 'filter', reducing 'groupby' fields, or adding 'first' for pagination.";
- }
-
///
/// Builds the error message for an OperationCanceledException during aggregation.
///
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
index 31a92ef6e4..faf8b1d434 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
@@ -60,39 +60,7 @@ public static async Task ExecuteWithTelemetryAsync(
operation: operation,
dbProcedure: dbProcedure);
- // Read query-timeout from current config per invocation (hot-reload safe).
- int timeoutSeconds = McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS;
- RuntimeConfigProvider? runtimeConfigProvider = serviceProvider.GetService();
- if (runtimeConfigProvider is not null)
- {
- RuntimeConfig config = runtimeConfigProvider.GetConfig();
- timeoutSeconds = config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds ?? McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS;
- }
-
- // Defensive runtime guard: clamp timeout to valid range [1, MAX_QUERY_TIMEOUT_SECONDS].
- if (timeoutSeconds < 1 || timeoutSeconds > McpRuntimeOptions.MAX_QUERY_TIMEOUT_SECONDS)
- {
- timeoutSeconds = McpRuntimeOptions.DEFAULT_QUERY_TIMEOUT_SECONDS;
- }
-
- // Wrap tool execution with the configured timeout using a linked CancellationTokenSource.
- using CancellationTokenSource timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
- timeoutCts.CancelAfter(TimeSpan.FromSeconds(timeoutSeconds));
-
- CallToolResult result;
- try
- {
- result = await tool.ExecuteAsync(arguments, serviceProvider, timeoutCts.Token);
- }
- catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested)
- {
- // The timeout CTS fired, not the caller's token. Surface as TimeoutException
- // so downstream telemetry and tool handlers see TIMEOUT, not cancellation.
- throw new TimeoutException(
- $"The MCP tool '{toolName}' did not complete within {timeoutSeconds} {(timeoutSeconds == 1 ? "second" : "seconds")}. "
- + "This is NOT a tool error. The operation exceeded the configured query-timeout. "
- + "Try narrowing results with a filter, reducing groupby fields, or using pagination.");
- }
+ CallToolResult result = await tool.ExecuteAsync(arguments, serviceProvider, cancellationToken);
// Check if the tool returned an error result (tools catch exceptions internally
// and return CallToolResult with IsError=true instead of throwing)
diff --git a/src/Cli.Tests/InitTests.cs b/src/Cli.Tests/InitTests.cs
index 051bfdf7a7..e92b6f09d1 100644
--- a/src/Cli.Tests/InitTests.cs
+++ b/src/Cli.Tests/InitTests.cs
@@ -493,6 +493,74 @@ public Task VerifyCorrectConfigGenerationWithMultipleMutationOptions(DatabaseTyp
return ExecuteVerifyTest(options, verifySettings);
}
+ ///
+ /// Test that init with/without --mcp.aggregate-records.query-timeout produces a config
+ /// with the correct aggregate-records query-timeout in the DmlTools section.
+ /// When null (not specified), defaults to 30 seconds. When provided, the config reflects the value.
+ ///
+ [DataTestMethod]
+ [DataRow(null, false, DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS, DisplayName = "Init without query-timeout uses default 30s")]
+ [DataRow(1, true, 1, DisplayName = "Init with query-timeout 1s (minimum)")]
+ [DataRow(120, true, 120, DisplayName = "Init with query-timeout 120s")]
+ [DataRow(600, true, 600, DisplayName = "Init with query-timeout 600s (maximum)")]
+ public void InitWithAggregateRecordsQueryTimeout_SetsOrDefaultsTimeout(int? inputTimeout, bool expectedUserProvided, int expectedEffectiveTimeout)
+ {
+ InitOptions options = new(
+ databaseType: DatabaseType.MSSQL,
+ connectionString: "testconnectionstring",
+ cosmosNoSqlDatabase: null,
+ cosmosNoSqlContainer: null,
+ graphQLSchemaPath: null,
+ setSessionContext: false,
+ hostMode: HostMode.Development,
+ corsOrigin: null,
+ authenticationProvider: EasyAuthType.AppService.ToString(),
+ mcpAggregateRecordsQueryTimeout: inputTimeout,
+ config: TEST_RUNTIME_CONFIG_FILE);
+
+ Assert.IsTrue(TryCreateRuntimeConfig(options, _runtimeConfigLoader!, _fileSystem!, out RuntimeConfig? runtimeConfig));
+ Assert.IsNotNull(runtimeConfig?.Runtime?.Mcp?.DmlTools);
+ Assert.AreEqual(inputTimeout, runtimeConfig.Runtime.Mcp.DmlTools.AggregateRecordsQueryTimeout);
+ Assert.AreEqual(expectedUserProvided, runtimeConfig.Runtime.Mcp.DmlTools.UserProvidedAggregateRecordsQueryTimeout);
+ Assert.AreEqual(expectedEffectiveTimeout, runtimeConfig.Runtime.Mcp.DmlTools.EffectiveAggregateRecordsQueryTimeoutSeconds);
+ }
+
+ ///
+ /// Test that init with --mcp.aggregate-records.query-timeout produces valid JSON
+ /// that round-trips correctly through serialization/deserialization.
+ ///
+ [TestMethod]
+ public void InitWithAggregateRecordsQueryTimeout_RoundTripsCorrectly()
+ {
+ InitOptions options = new(
+ databaseType: DatabaseType.MSSQL,
+ connectionString: "testconnectionstring",
+ cosmosNoSqlDatabase: null,
+ cosmosNoSqlContainer: null,
+ graphQLSchemaPath: null,
+ setSessionContext: false,
+ hostMode: HostMode.Development,
+ corsOrigin: null,
+ authenticationProvider: EasyAuthType.AppService.ToString(),
+ mcpAggregateRecordsQueryTimeout: 90,
+ config: TEST_RUNTIME_CONFIG_FILE);
+
+ Assert.IsTrue(TryCreateRuntimeConfig(options, _runtimeConfigLoader!, _fileSystem!, out RuntimeConfig? runtimeConfig));
+
+ // Serialize to JSON and deserialize back
+ JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
+ string json = JsonSerializer.Serialize(runtimeConfig, serializerOptions);
+ RuntimeConfig? deserialized = JsonSerializer.Deserialize(json, serializerOptions);
+
+ Assert.IsNotNull(deserialized?.Runtime?.Mcp?.DmlTools);
+ Assert.AreEqual(90, deserialized.Runtime.Mcp.DmlTools.AggregateRecordsQueryTimeout);
+ Assert.AreEqual(90, deserialized.Runtime.Mcp.DmlTools.EffectiveAggregateRecordsQueryTimeoutSeconds);
+
+ // Verify the JSON contains the object format for aggregate-records
+ Assert.IsTrue(json.Contains("\"query-timeout\""), $"Expected 'query-timeout' in serialized JSON. Got: {json}");
+ Assert.IsTrue(json.Contains("90"), $"Expected timeout value 90 in serialized JSON. Got: {json}");
+ }
+
private Task ExecuteVerifyTest(InitOptions options, VerifySettings? settings = null)
{
Assert.IsTrue(TryCreateRuntimeConfig(options, _runtimeConfigLoader!, _fileSystem!, out RuntimeConfig? runtimeConfig));
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
index 0fd0030402..11bf762b42 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt
index 725eed7a83..475149674b 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt
index 70cb42137b..19c590a7f7 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt
index 46bec31cc9..2b4583dc36 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt
index 0932956d7a..9be903b3bd 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: planet,
@@ -36,10 +36,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt
index fdda324d36..eae623f5a8 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
index 2a4e8653a1..4be8d89e14 100644
--- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
+++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt
index 4870537837..034276178e 100644
--- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -36,10 +36,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt
index e03973b91e..64b71e6c59 100644
--- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt
index d33247dcab..b89b9c70d1 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt
index fa08aefa62..ce2d025be4 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt
index 98fdb25c77..02e6387637 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt
index 74afea9ef6..cca682e8c3 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt
index 3145f775c0..657afc9ce6 100644
--- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt
index ae32e3b379..86ba02fbcd 100644
--- a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt
index 0f2c151763..777642d9e0 100644
--- a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt
index d9067e1b43..5a19301e74 100644
--- a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt
index e48b87e1c8..e40b268f89 100644
--- a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt
index 74afea9ef6..cca682e8c3 100644
--- a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt
index 2cb50a06da..e8193e5f14 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: DWSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt
index 0f2c151763..777642d9e0 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt
index bbea4aadd3..d5b44393ec 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -40,10 +40,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt
index 63f411cdb2..9a28bccd06 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: PostgreSQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt
index 2cb50a06da..e8193e5f14 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: DWSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt
index 63f411cdb2..9a28bccd06 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: PostgreSQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt
index 5af597f50a..a8c2329c65 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MySQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt
index 860fa1616c..5ed3cbcffd 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -36,10 +36,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt
index 48f3d0ce51..09377f9c51 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MSSQL,
Options: {
@@ -40,10 +40,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt
index f56dcad7d7..51c90e1666 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt
index 2cb50a06da..e8193e5f14 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: DWSQL,
Options: {
@@ -35,10 +35,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt
index 860fa1616c..5ed3cbcffd 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -36,10 +36,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt
index 860fa1616c..5ed3cbcffd 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
Options: {
container: testcontainer,
@@ -36,10 +36,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt
index 5af597f50a..a8c2329c65 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MySQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt
index f56dcad7d7..51c90e1666 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt
index 5af597f50a..a8c2329c65 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: MySQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt
index 63f411cdb2..9a28bccd06 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: PostgreSQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt
index f56dcad7d7..51c90e1666 100644
--- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt
+++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt
@@ -1,4 +1,4 @@
-{
+{
DataSource: {
DatabaseType: CosmosDB_PostgreSQL
},
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs
index 99d7efc637..a800591aa3 100644
--- a/src/Cli/Commands/ConfigureOptions.cs
+++ b/src/Cli/Commands/ConfigureOptions.cs
@@ -42,7 +42,6 @@ public ConfigureOptions(
bool? runtimeMcpEnabled = null,
string? runtimeMcpPath = null,
string? runtimeMcpDescription = null,
- int? runtimeMcpQueryTimeout = null,
bool? runtimeMcpDmlToolsEnabled = null,
bool? runtimeMcpDmlToolsDescribeEntitiesEnabled = null,
bool? runtimeMcpDmlToolsCreateRecordEnabled = null,
@@ -51,6 +50,7 @@ public ConfigureOptions(
bool? runtimeMcpDmlToolsDeleteRecordEnabled = null,
bool? runtimeMcpDmlToolsExecuteEntityEnabled = null,
bool? runtimeMcpDmlToolsAggregateRecordsEnabled = null,
+ int? runtimeMcpDmlToolsAggregateRecordsQueryTimeout = null,
bool? runtimeCacheEnabled = null,
int? runtimeCacheTtl = null,
CompressionLevel? runtimeCompressionLevel = null,
@@ -104,7 +104,6 @@ public ConfigureOptions(
RuntimeMcpEnabled = runtimeMcpEnabled;
RuntimeMcpPath = runtimeMcpPath;
RuntimeMcpDescription = runtimeMcpDescription;
- RuntimeMcpQueryTimeout = runtimeMcpQueryTimeout;
RuntimeMcpDmlToolsEnabled = runtimeMcpDmlToolsEnabled;
RuntimeMcpDmlToolsDescribeEntitiesEnabled = runtimeMcpDmlToolsDescribeEntitiesEnabled;
RuntimeMcpDmlToolsCreateRecordEnabled = runtimeMcpDmlToolsCreateRecordEnabled;
@@ -113,6 +112,7 @@ public ConfigureOptions(
RuntimeMcpDmlToolsDeleteRecordEnabled = runtimeMcpDmlToolsDeleteRecordEnabled;
RuntimeMcpDmlToolsExecuteEntityEnabled = runtimeMcpDmlToolsExecuteEntityEnabled;
RuntimeMcpDmlToolsAggregateRecordsEnabled = runtimeMcpDmlToolsAggregateRecordsEnabled;
+ RuntimeMcpDmlToolsAggregateRecordsQueryTimeout = runtimeMcpDmlToolsAggregateRecordsQueryTimeout;
// Cache
RuntimeCacheEnabled = runtimeCacheEnabled;
RuntimeCacheTTL = runtimeCacheTtl;
@@ -207,9 +207,6 @@ public ConfigureOptions(
[Option("runtime.mcp.description", Required = false, HelpText = "Set the MCP server description to be exposed in the initialize response.")]
public string? RuntimeMcpDescription { get; }
- [Option("runtime.mcp.query-timeout", Required = false, HelpText = "Set the execution timeout in seconds for MCP tool operations. Applies to all MCP tools. Default: 30 (integer). Must be >= 1.")]
- public int? RuntimeMcpQueryTimeout { get; }
-
[Option("runtime.mcp.dml-tools.enabled", Required = false, HelpText = "Enable DAB's MCP DML tools endpoint. Default: true (boolean).")]
public bool? RuntimeMcpDmlToolsEnabled { get; }
@@ -234,6 +231,9 @@ public ConfigureOptions(
[Option("runtime.mcp.dml-tools.aggregate-records.enabled", Required = false, HelpText = "Enable DAB's MCP aggregate records tool. Default: true (boolean).")]
public bool? RuntimeMcpDmlToolsAggregateRecordsEnabled { get; }
+ [Option("runtime.mcp.dml-tools.aggregate-records.query-timeout", Required = false, HelpText = "Set the execution timeout in seconds for the aggregate-records MCP tool. Default: 30 (integer). Range: 1-600.")]
+ public int? RuntimeMcpDmlToolsAggregateRecordsQueryTimeout { get; }
+
[Option("runtime.cache.enabled", Required = false, HelpText = "Enable DAB's cache globally. (You must also enable each entity's cache separately.). Default: false (boolean).")]
public bool? RuntimeCacheEnabled { get; }
diff --git a/src/Cli/Commands/InitOptions.cs b/src/Cli/Commands/InitOptions.cs
index e01ad1b774..008d9af87c 100644
--- a/src/Cli/Commands/InitOptions.cs
+++ b/src/Cli/Commands/InitOptions.cs
@@ -42,6 +42,7 @@ public InitOptions(
CliBool mcpEnabled = CliBool.None,
CliBool restRequestBodyStrict = CliBool.None,
CliBool multipleCreateOperationEnabled = CliBool.None,
+ int? mcpAggregateRecordsQueryTimeout = null,
string? config = null)
: base(config)
{
@@ -68,6 +69,7 @@ public InitOptions(
McpEnabled = mcpEnabled;
RestRequestBodyStrict = restRequestBodyStrict;
MultipleCreateOperationEnabled = multipleCreateOperationEnabled;
+ McpAggregateRecordsQueryTimeout = mcpAggregateRecordsQueryTimeout;
}
[Option("database-type", Required = true, HelpText = "Type of database to connect. Supported values: mssql, cosmosdb_nosql, cosmosdb_postgresql, mysql, postgresql, dwsql")]
@@ -141,6 +143,9 @@ public InitOptions(
[Option("graphql.multiple-create.enabled", Required = false, HelpText = "(Default: false) Enables multiple create operation for GraphQL. Supported values: true, false.")]
public CliBool MultipleCreateOperationEnabled { get; }
+ [Option("mcp.aggregate-records.query-timeout", Required = false, HelpText = "Set the execution timeout in seconds for the aggregate-records MCP tool. Default: 30 (integer). Range: 1-600.")]
+ public int? McpAggregateRecordsQueryTimeout { get; }
+
public int Handler(ILogger logger, FileSystemRuntimeConfigLoader loader, IFileSystem fileSystem)
{
logger.LogInformation("{productName} {version}", PRODUCT_NAME, ProductInfo.GetProductVersion());
diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs
index e9717a8911..d97971ecb5 100644
--- a/src/Cli/ConfigGenerator.cs
+++ b/src/Cli/ConfigGenerator.cs
@@ -268,7 +268,12 @@ public static bool TryCreateRuntimeConfig(InitOptions options, FileSystemRuntime
Runtime: new(
Rest: new(restEnabled, restPath ?? RestRuntimeOptions.DEFAULT_PATH, options.RestRequestBodyStrict is CliBool.False ? false : true),
GraphQL: new(Enabled: graphQLEnabled, Path: graphQLPath, MultipleMutationOptions: multipleMutationOptions),
- Mcp: new(mcpEnabled, mcpPath ?? McpRuntimeOptions.DEFAULT_PATH),
+ Mcp: new(
+ Enabled: mcpEnabled,
+ Path: mcpPath ?? McpRuntimeOptions.DEFAULT_PATH,
+ DmlTools: options.McpAggregateRecordsQueryTimeout is not null
+ ? new DmlToolsConfig(aggregateRecordsQueryTimeout: options.McpAggregateRecordsQueryTimeout)
+ : null),
Host: new(
Cors: new(options.CorsOrigin?.ToArray() ?? Array.Empty()),
Authentication: new(
@@ -880,7 +885,6 @@ private static bool TryUpdateConfiguredRuntimeOptions(
if (options.RuntimeMcpEnabled != null ||
options.RuntimeMcpPath != null ||
options.RuntimeMcpDescription != null ||
- options.RuntimeMcpQueryTimeout != null ||
options.RuntimeMcpDmlToolsEnabled != null ||
options.RuntimeMcpDmlToolsDescribeEntitiesEnabled != null ||
options.RuntimeMcpDmlToolsCreateRecordEnabled != null ||
@@ -888,7 +892,8 @@ private static bool TryUpdateConfiguredRuntimeOptions(
options.RuntimeMcpDmlToolsUpdateRecordEnabled != null ||
options.RuntimeMcpDmlToolsDeleteRecordEnabled != null ||
options.RuntimeMcpDmlToolsExecuteEntityEnabled != null ||
- options.RuntimeMcpDmlToolsAggregateRecordsEnabled != null)
+ options.RuntimeMcpDmlToolsAggregateRecordsEnabled != null ||
+ options.RuntimeMcpDmlToolsAggregateRecordsQueryTimeout != null)
{
McpRuntimeOptions updatedMcpOptions = runtimeConfig?.Runtime?.Mcp ?? new();
bool status = TryUpdateConfiguredMcpValues(options, ref updatedMcpOptions);
@@ -1167,14 +1172,6 @@ private static bool TryUpdateConfiguredMcpValues(
_logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Description as '{updatedValue}'", updatedValue);
}
- // Runtime.Mcp.QueryTimeout
- updatedValue = options?.RuntimeMcpQueryTimeout;
- if (updatedValue != null)
- {
- updatedMcpOptions = updatedMcpOptions! with { QueryTimeout = (int)updatedValue, UserProvidedQueryTimeout = true };
- _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.QueryTimeout as '{updatedValue}'", updatedValue);
- }
-
// Handle DML tools configuration
bool hasToolUpdates = false;
DmlToolsConfig? currentDmlTools = updatedMcpOptions?.DmlTools;
@@ -1196,6 +1193,7 @@ private static bool TryUpdateConfiguredMcpValues(
bool? deleteRecord = currentDmlTools?.DeleteRecord;
bool? executeEntity = currentDmlTools?.ExecuteEntity;
bool? aggregateRecords = currentDmlTools?.AggregateRecords;
+ int? aggregateRecordsQueryTimeout = currentDmlTools?.AggregateRecordsQueryTimeout;
updatedValue = options?.RuntimeMcpDmlToolsDescribeEntitiesEnabled;
if (updatedValue != null)
@@ -1253,6 +1251,14 @@ private static bool TryUpdateConfiguredMcpValues(
_logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.aggregate-records as '{updatedValue}'", updatedValue);
}
+ updatedValue = options?.RuntimeMcpDmlToolsAggregateRecordsQueryTimeout;
+ if (updatedValue != null)
+ {
+ aggregateRecordsQueryTimeout = (int)updatedValue;
+ hasToolUpdates = true;
+ _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.aggregate-records.query-timeout as '{updatedValue}'", updatedValue);
+ }
+
if (hasToolUpdates)
{
updatedMcpOptions = updatedMcpOptions! with
@@ -1264,7 +1270,8 @@ private static bool TryUpdateConfiguredMcpValues(
updateRecord: updateRecord,
deleteRecord: deleteRecord,
executeEntity: executeEntity,
- aggregateRecords: aggregateRecords)
+ aggregateRecords: aggregateRecords,
+ aggregateRecordsQueryTimeout: aggregateRecordsQueryTimeout)
};
}
diff --git a/src/Config/Converters/DmlToolsConfigConverter.cs b/src/Config/Converters/DmlToolsConfigConverter.cs
index 7e049c7926..16bc0a81c9 100644
--- a/src/Config/Converters/DmlToolsConfigConverter.cs
+++ b/src/Config/Converters/DmlToolsConfigConverter.cs
@@ -45,6 +45,7 @@ internal class DmlToolsConfigConverter : JsonConverter
bool? deleteRecord = null;
bool? executeEntity = null;
bool? aggregateRecords = null;
+ int? aggregateRecordsQueryTimeout = null;
while (reader.Read())
{
@@ -58,8 +59,54 @@ internal class DmlToolsConfigConverter : JsonConverter
string? property = reader.GetString();
reader.Read();
- // Handle the property value
- if (reader.TokenType is JsonTokenType.True || reader.TokenType is JsonTokenType.False)
+ // aggregate-records supports both boolean and object formats
+ if (property?.ToLowerInvariant() == "aggregate-records")
+ {
+ if (reader.TokenType is JsonTokenType.True || reader.TokenType is JsonTokenType.False)
+ {
+ aggregateRecords = reader.GetBoolean();
+ }
+ else if (reader.TokenType is JsonTokenType.StartObject)
+ {
+ // Handle object format: { "enabled": true, "query-timeout": 60 }
+ while (reader.Read())
+ {
+ if (reader.TokenType is JsonTokenType.EndObject)
+ {
+ break;
+ }
+
+ if (reader.TokenType is JsonTokenType.PropertyName)
+ {
+ string? subProperty = reader.GetString();
+ reader.Read();
+
+ switch (subProperty?.ToLowerInvariant())
+ {
+ case "enabled":
+ aggregateRecords = reader.GetBoolean();
+ break;
+ case "query-timeout":
+ if (reader.TokenType is not JsonTokenType.Null)
+ {
+ aggregateRecordsQueryTimeout = reader.GetInt32();
+ }
+
+ break;
+ default:
+ reader.Skip();
+ break;
+ }
+ }
+ }
+ }
+ else
+ {
+ throw new JsonException("Property 'aggregate-records' must be a boolean or object value.");
+ }
+ }
+ // Handle other properties (must be boolean)
+ else if (reader.TokenType is JsonTokenType.True || reader.TokenType is JsonTokenType.False)
{
bool value = reader.GetBoolean();
@@ -83,9 +130,6 @@ internal class DmlToolsConfigConverter : JsonConverter
case "execute-entity":
executeEntity = value;
break;
- case "aggregate-records":
- aggregateRecords = value;
- break;
default:
// Skip unknown properties
break;
@@ -95,8 +139,7 @@ internal class DmlToolsConfigConverter : JsonConverter
{
// Error on non-boolean values for known properties
if (property?.ToLowerInvariant() is "describe-entities" or "create-record"
- or "read-records" or "update-record" or "delete-record" or "execute-entity"
- or "aggregate-records")
+ or "read-records" or "update-record" or "delete-record" or "execute-entity")
{
throw new JsonException($"Property '{property}' must be a boolean value.");
}
@@ -116,7 +159,8 @@ internal class DmlToolsConfigConverter : JsonConverter
updateRecord: updateRecord,
deleteRecord: deleteRecord,
executeEntity: executeEntity,
- aggregateRecords: aggregateRecords);
+ aggregateRecords: aggregateRecords,
+ aggregateRecordsQueryTimeout: aggregateRecordsQueryTimeout);
}
// For any other unexpected token type, return default (all enabled)
@@ -142,7 +186,8 @@ public override void Write(Utf8JsonWriter writer, DmlToolsConfig? value, JsonSer
value.UserProvidedUpdateRecord ||
value.UserProvidedDeleteRecord ||
value.UserProvidedExecuteEntity ||
- value.UserProvidedAggregateRecords;
+ value.UserProvidedAggregateRecords ||
+ value.UserProvidedAggregateRecordsQueryTimeout;
// Only write the boolean value if it's provided by user
// This prevents writing "dml-tools": true when it's the default
@@ -188,9 +233,30 @@ public override void Write(Utf8JsonWriter writer, DmlToolsConfig? value, JsonSer
writer.WriteBoolean("execute-entity", value.ExecuteEntity.Value);
}
- if (value.UserProvidedAggregateRecords && value.AggregateRecords.HasValue)
+ if (value.UserProvidedAggregateRecords || value.UserProvidedAggregateRecordsQueryTimeout)
{
- writer.WriteBoolean("aggregate-records", value.AggregateRecords.Value);
+ if (value.UserProvidedAggregateRecordsQueryTimeout)
+ {
+ // Write as object format: { "enabled": true, "query-timeout": 60 }
+ writer.WritePropertyName("aggregate-records");
+ writer.WriteStartObject();
+
+ if (value.AggregateRecords.HasValue)
+ {
+ writer.WriteBoolean("enabled", value.AggregateRecords.Value);
+ }
+
+ if (value.AggregateRecordsQueryTimeout.HasValue)
+ {
+ writer.WriteNumber("query-timeout", value.AggregateRecordsQueryTimeout.Value);
+ }
+
+ writer.WriteEndObject();
+ }
+ else if (value.AggregateRecords.HasValue)
+ {
+ writer.WriteBoolean("aggregate-records", value.AggregateRecords.Value);
+ }
}
writer.WriteEndObject();
diff --git a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
index 6329236aa8..8b3c640725 100644
--- a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
+++ b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs
@@ -66,13 +66,12 @@ internal McpRuntimeOptionsConverter(DeserializationVariableReplacementSettings?
string? path = null;
DmlToolsConfig? dmlTools = null;
string? description = null;
- int? queryTimeout = null;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
{
- return new McpRuntimeOptions(enabled, path, dmlTools, description, queryTimeout);
+ return new McpRuntimeOptions(enabled, path, dmlTools, description);
}
string? propertyName = reader.GetString();
@@ -108,14 +107,6 @@ internal McpRuntimeOptionsConverter(DeserializationVariableReplacementSettings?
break;
- case "query-timeout":
- if (reader.TokenType is not JsonTokenType.Null)
- {
- queryTimeout = reader.GetInt32();
- }
-
- break;
-
default:
throw new JsonException($"Unexpected property {propertyName}");
}
@@ -159,13 +150,6 @@ public override void Write(Utf8JsonWriter writer, McpRuntimeOptions value, JsonS
JsonSerializer.Serialize(writer, value.Description, options);
}
- // Write query-timeout whenever a value is present (null = not specified = use default).
- // This covers both constructor-set (deserialization) and 'with' expression (CLI update) paths.
- if (value?.QueryTimeout.HasValue is true)
- {
- writer.WriteNumber("query-timeout", value.QueryTimeout.Value);
- }
-
writer.WriteEndObject();
}
}
diff --git a/src/Config/ObjectModel/DmlToolsConfig.cs b/src/Config/ObjectModel/DmlToolsConfig.cs
index c1f8b278cd..7d35b3018d 100644
--- a/src/Config/ObjectModel/DmlToolsConfig.cs
+++ b/src/Config/ObjectModel/DmlToolsConfig.cs
@@ -16,6 +16,16 @@ public record DmlToolsConfig
///
public const bool DEFAULT_ENABLED = true;
+ ///
+ /// Default query timeout in seconds for the aggregate-records tool.
+ ///
+ public const int DEFAULT_QUERY_TIMEOUT_SECONDS = 30;
+
+ ///
+ /// Maximum allowed query timeout in seconds for the aggregate-records tool.
+ ///
+ public const int MAX_QUERY_TIMEOUT_SECONDS = 600;
+
///
/// Indicates if all tools are enabled/disabled uniformly
///
@@ -56,6 +66,12 @@ public record DmlToolsConfig
///
public bool? AggregateRecords { get; init; }
+ ///
+ /// Execution timeout in seconds for aggregate-records tool operations.
+ /// Default: 30 seconds.
+ ///
+ public int? AggregateRecordsQueryTimeout { get; init; }
+
[JsonConstructor]
public DmlToolsConfig(
bool? allToolsEnabled = null,
@@ -65,7 +81,8 @@ public DmlToolsConfig(
bool? updateRecord = null,
bool? deleteRecord = null,
bool? executeEntity = null,
- bool? aggregateRecords = null)
+ bool? aggregateRecords = null,
+ int? aggregateRecordsQueryTimeout = null)
{
if (allToolsEnabled is not null)
{
@@ -105,6 +122,12 @@ public DmlToolsConfig(
UserProvidedDeleteRecord = deleteRecord is not null;
UserProvidedExecuteEntity = executeEntity is not null;
UserProvidedAggregateRecords = aggregateRecords is not null;
+
+ if (aggregateRecordsQueryTimeout is not null)
+ {
+ AggregateRecordsQueryTimeout = aggregateRecordsQueryTimeout;
+ UserProvidedAggregateRecordsQueryTimeout = true;
+ }
}
///
@@ -122,7 +145,8 @@ public static DmlToolsConfig FromBoolean(bool enabled)
updateRecord: null,
deleteRecord: null,
executeEntity: null,
- aggregateRecords: null
+ aggregateRecords: null,
+ aggregateRecordsQueryTimeout: null
);
}
@@ -138,7 +162,8 @@ public static DmlToolsConfig FromBoolean(bool enabled)
updateRecord: null,
deleteRecord: null,
executeEntity: null,
- aggregateRecords: null
+ aggregateRecords: null,
+ aggregateRecordsQueryTimeout: null
);
///
@@ -204,4 +229,18 @@ public static DmlToolsConfig FromBoolean(bool enabled)
[JsonIgnore(Condition = JsonIgnoreCondition.Always)]
[MemberNotNullWhen(true, nameof(AggregateRecords))]
public bool UserProvidedAggregateRecords { get; init; } = false;
+
+ ///
+ /// Flag which informs CLI and JSON serializer whether to write aggregate-records.query-timeout
+ /// property/value to the runtime config file.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.Always)]
+ public bool UserProvidedAggregateRecordsQueryTimeout { get; init; } = false;
+
+ ///
+ /// Gets the effective query timeout in seconds for the aggregate-records tool,
+ /// using the default if not specified.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.Always)]
+ public int EffectiveAggregateRecordsQueryTimeoutSeconds => AggregateRecordsQueryTimeout ?? DEFAULT_QUERY_TIMEOUT_SECONDS;
}
diff --git a/src/Config/ObjectModel/McpRuntimeOptions.cs b/src/Config/ObjectModel/McpRuntimeOptions.cs
index 5b48b2fcc3..e17d53fc8f 100644
--- a/src/Config/ObjectModel/McpRuntimeOptions.cs
+++ b/src/Config/ObjectModel/McpRuntimeOptions.cs
@@ -10,8 +10,6 @@ namespace Azure.DataApiBuilder.Config.ObjectModel;
public record McpRuntimeOptions
{
public const string DEFAULT_PATH = "/mcp";
- public const int DEFAULT_QUERY_TIMEOUT_SECONDS = 30;
- public const int MAX_QUERY_TIMEOUT_SECONDS = 600;
///
/// Whether MCP endpoints are enabled
@@ -38,22 +36,12 @@ public record McpRuntimeOptions
[JsonPropertyName("description")]
public string? Description { get; init; }
- ///
- /// Execution timeout in seconds for MCP tool operations.
- /// This timeout wraps the entire tool execution including database queries.
- /// It applies to all MCP tools, not just aggregation.
- /// Default: 30 seconds.
- ///
- [JsonPropertyName("query-timeout")]
- public int? QueryTimeout { get; init; }
-
[JsonConstructor]
public McpRuntimeOptions(
bool? Enabled = null,
string? Path = null,
DmlToolsConfig? DmlTools = null,
- string? Description = null,
- int? QueryTimeout = null)
+ string? Description = null)
{
this.Enabled = Enabled ?? true;
@@ -79,12 +67,6 @@ public McpRuntimeOptions(
}
this.Description = Description;
-
- if (QueryTimeout is not null)
- {
- this.QueryTimeout = QueryTimeout;
- UserProvidedQueryTimeout = true;
- }
}
///
@@ -96,17 +78,4 @@ public McpRuntimeOptions(
[JsonIgnore(Condition = JsonIgnoreCondition.Always)]
[MemberNotNullWhen(true, nameof(Enabled))]
public bool UserProvidedPath { get; init; } = false;
-
- ///
- /// Flag which informs CLI and JSON serializer whether to write query-timeout
- /// property and value to the runtime config file.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.Always)]
- public bool UserProvidedQueryTimeout { get; init; } = false;
-
- ///
- /// Gets the effective query timeout in seconds, using the default if not specified.
- ///
- [JsonIgnore(Condition = JsonIgnoreCondition.Always)]
- public int EffectiveQueryTimeoutSeconds => QueryTimeout ?? DEFAULT_QUERY_TIMEOUT_SECONDS;
}
diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs
index 19cc3e4c62..de35bf8ed5 100644
--- a/src/Core/Configurations/RuntimeConfigValidator.cs
+++ b/src/Core/Configurations/RuntimeConfigValidator.cs
@@ -911,13 +911,13 @@ public void ValidateMcpUri(RuntimeConfig runtimeConfig)
subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError));
}
- // Validate query-timeout if provided
- if (runtimeConfig.Runtime.Mcp.QueryTimeout is not null &&
- (runtimeConfig.Runtime.Mcp.QueryTimeout < 1 || runtimeConfig.Runtime.Mcp.QueryTimeout > McpRuntimeOptions.MAX_QUERY_TIMEOUT_SECONDS))
+ // Validate aggregate-records query-timeout if provided
+ if (runtimeConfig.Runtime.Mcp.DmlTools?.AggregateRecordsQueryTimeout is not null &&
+ (runtimeConfig.Runtime.Mcp.DmlTools.AggregateRecordsQueryTimeout < 1 || runtimeConfig.Runtime.Mcp.DmlTools.AggregateRecordsQueryTimeout > DmlToolsConfig.MAX_QUERY_TIMEOUT_SECONDS))
{
HandleOrRecordException(new DataApiBuilderException(
- message: $"MCP query-timeout must be between 1 and {McpRuntimeOptions.MAX_QUERY_TIMEOUT_SECONDS} seconds. " +
- $"Provided value: {runtimeConfig.Runtime.Mcp.QueryTimeout}.",
+ message: $"Aggregate-records query-timeout must be between 1 and {DmlToolsConfig.MAX_QUERY_TIMEOUT_SECONDS} seconds. " +
+ $"Provided value: {runtimeConfig.Runtime.Mcp.DmlTools.AggregateRecordsQueryTimeout}.",
statusCode: HttpStatusCode.ServiceUnavailable,
subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError));
}
diff --git a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
index 31e9b498cd..2e94a7f3e1 100644
--- a/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
+++ b/src/Service.Tests/Mcp/AggregateRecordsToolTests.cs
@@ -305,20 +305,6 @@ public void BuildTimeoutErrorMessage_ContainsGuidance(string entityName)
("first", "Must suggest pagination."));
}
- [DataTestMethod]
- [DataRow("Product", DisplayName = "Product entity")]
- public void BuildTaskCanceledErrorMessage_ContainsGuidance(string entityName)
- {
- string message = AggregateRecordsTool.BuildTaskCanceledErrorMessage(entityName);
-
- AssertContainsAll(message,
- (entityName, "Must include entity name."),
- ("NOT a tool error", "Must state this is NOT a tool error."),
- ("timeout", "Must reference timeout."),
- ("filter", "Must suggest filter."),
- ("first", "Must suggest pagination."));
- }
-
[DataTestMethod]
[DataRow("LargeProductCatalog", DisplayName = "LargeProductCatalog entity")]
public void BuildOperationCanceledErrorMessage_ContainsGuidance(string entityName)
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
index 158402c264..063fa0a1ba 100644
--- a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -19,14 +19,12 @@
namespace Azure.DataApiBuilder.Service.Tests.Mcp
{
///
- /// Tests for the MCP query-timeout configuration property.
+ /// Tests for the aggregate-records query-timeout configuration property.
/// Verifies:
/// - Default value of 30 seconds when not configured
/// - Custom value overrides default
- /// - Timeout wrapping applies to all MCP tools via ExecuteWithTelemetryAsync
- /// - Hot reload: changing config value updates behavior without restart
- /// - Timeout surfaces as TimeoutException, not generic cancellation
- /// - Telemetry maps timeout to TIMEOUT error code
+ /// - DmlToolsConfig properties reflect configured timeout
+ /// - JSON serialization/deserialization of aggregate-records with query-timeout
///
[TestClass]
public class McpQueryTimeoutTests
@@ -37,89 +35,73 @@ public class McpQueryTimeoutTests
[DataRow(1, DisplayName = "1 second")]
[DataRow(60, DisplayName = "60 seconds")]
[DataRow(120, DisplayName = "120 seconds")]
- public void McpRuntimeOptions_CustomTimeout_ReturnsConfiguredValue(int timeoutSeconds)
+ public void DmlToolsConfig_CustomTimeout_ReturnsConfiguredValue(int timeoutSeconds)
{
- McpRuntimeOptions options = new(QueryTimeout: timeoutSeconds);
- Assert.AreEqual(timeoutSeconds, options.EffectiveQueryTimeoutSeconds);
+ DmlToolsConfig config = new(aggregateRecordsQueryTimeout: timeoutSeconds);
+ Assert.AreEqual(timeoutSeconds, config.EffectiveAggregateRecordsQueryTimeoutSeconds);
+ Assert.IsTrue(config.UserProvidedAggregateRecordsQueryTimeout);
}
[TestMethod]
- public void RuntimeConfig_McpQueryTimeout_ExposedInConfig()
+ public void RuntimeConfig_AggregateRecordsQueryTimeout_ExposedInConfig()
{
RuntimeConfig config = CreateConfig(queryTimeout: 45);
- Assert.AreEqual(45, config.Runtime?.Mcp?.QueryTimeout);
- Assert.AreEqual(45, config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds);
+ Assert.AreEqual(45, config.Runtime?.Mcp?.DmlTools?.AggregateRecordsQueryTimeout);
+ Assert.AreEqual(45, config.Runtime?.Mcp?.DmlTools?.EffectiveAggregateRecordsQueryTimeoutSeconds);
}
[TestMethod]
- public void RuntimeConfig_McpQueryTimeout_DefaultWhenNotSet()
+ public void RuntimeConfig_AggregateRecordsQueryTimeout_DefaultWhenNotSet()
{
RuntimeConfig config = CreateConfig();
- Assert.IsNull(config.Runtime?.Mcp?.QueryTimeout);
- Assert.AreEqual(30, config.Runtime?.Mcp?.EffectiveQueryTimeoutSeconds);
+ Assert.IsNull(config.Runtime?.Mcp?.DmlTools?.AggregateRecordsQueryTimeout);
+ Assert.AreEqual(DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS, config.Runtime?.Mcp?.DmlTools?.EffectiveAggregateRecordsQueryTimeoutSeconds);
}
#endregion
- #region Timeout Wrapping Tests
+ #region Telemetry No-Timeout Tests
[TestMethod]
- public async Task ExecuteWithTelemetry_CompletesSuccessfully_WithinTimeout()
+ public async Task ExecuteWithTelemetry_CompletesSuccessfully_NoTimeout()
{
- // A tool that completes immediately should succeed
- RuntimeConfig config = CreateConfig(queryTimeout: 30);
+ // After moving timeout to AggregateRecordsTool, ExecuteWithTelemetryAsync should
+ // no longer apply any timeout wrapping. A fast tool should complete regardless of config.
+ RuntimeConfig config = CreateConfig(queryTimeout: 1);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
IMcpTool tool = new ImmediateCompletionTool();
CallToolResult result = await McpTelemetryHelper.ExecuteWithTelemetryAsync(
tool, "test_tool", null, sp, CancellationToken.None);
- // Tool should complete without throwing TimeoutException
Assert.IsNotNull(result);
Assert.IsTrue(result.IsError != true, "Tool result should not be an error");
}
[TestMethod]
- public async Task ExecuteWithTelemetry_ThrowsTimeoutException_WhenToolExceedsTimeout()
+ public async Task ExecuteWithTelemetry_DoesNotApplyTimeout_AfterRefactor()
{
- // Configure a very short timeout (1 second) and a tool that takes longer
+ // Verify that McpTelemetryHelper no longer applies timeout wrapping.
+ // A slow tool should NOT timeout in the telemetry layer (timeout is now tool-specific).
RuntimeConfig config = CreateConfig(queryTimeout: 1);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
- IMcpTool tool = new SlowTool(delaySeconds: 30);
- await Assert.ThrowsExceptionAsync(async () =>
- {
- await McpTelemetryHelper.ExecuteWithTelemetryAsync(
- tool, "slow_tool", null, sp, CancellationToken.None);
- });
- }
+ // Use a short-delay tool (2 seconds) with 1-second query-timeout.
+ // If McpTelemetryHelper still applied timeout, this would throw TimeoutException.
+ IMcpTool tool = new SlowTool(delaySeconds: 2);
- [TestMethod]
- public async Task ExecuteWithTelemetry_TimeoutMessage_ContainsToolName()
- {
- RuntimeConfig config = CreateConfig(queryTimeout: 1);
- IServiceProvider sp = CreateServiceProviderWithConfig(config);
- IMcpTool tool = new SlowTool(delaySeconds: 30);
+ // Should complete without timeout since McpTelemetryHelper no longer wraps with timeout
+ CallToolResult result = await McpTelemetryHelper.ExecuteWithTelemetryAsync(
+ tool, "test_tool", null, sp, CancellationToken.None);
- try
- {
- await McpTelemetryHelper.ExecuteWithTelemetryAsync(
- tool, "aggregate_records", null, sp, CancellationToken.None);
- Assert.Fail("Expected TimeoutException");
- }
- catch (TimeoutException ex)
- {
- Assert.IsTrue(ex.Message.Contains("aggregate_records"), "Message should contain tool name");
- Assert.IsTrue(ex.Message.Contains("1 second"), "Message should contain timeout value");
- Assert.IsTrue(ex.Message.Contains("NOT a tool error"), "Message should clarify it is not a tool error");
- }
+ Assert.IsNotNull(result);
+ Assert.IsTrue(result.IsError != true, "Tool should complete without timeout in telemetry layer");
}
[TestMethod]
public async Task ExecuteWithTelemetry_ClientCancellation_PropagatesAsCancellation()
{
- // Client cancellation (not timeout) should propagate as OperationCanceledException
- // rather than being converted to TimeoutException.
+ // Client cancellation should still propagate as OperationCanceledException.
RuntimeConfig config = CreateConfig(queryTimeout: 30);
IServiceProvider sp = CreateServiceProviderWithConfig(config);
IMcpTool tool = new SlowTool(delaySeconds: 30);
@@ -140,98 +122,60 @@ await McpTelemetryHelper.ExecuteWithTelemetryAsync(
catch (OperationCanceledException)
{
// Expected: client-initiated cancellation propagates as OperationCanceledException
- // (or subclass TaskCanceledException)
}
}
- [TestMethod]
- public async Task ExecuteWithTelemetry_AppliesTimeout_ToAllToolTypes()
- {
- // Verify timeout applies to both built-in and custom tool types
- RuntimeConfig config = CreateConfig(queryTimeout: 1);
- IServiceProvider sp = CreateServiceProviderWithConfig(config);
-
- // Test with built-in tool type
- IMcpTool builtInTool = new SlowTool(delaySeconds: 30, toolType: ToolType.BuiltIn);
- await Assert.ThrowsExceptionAsync(async () =>
- {
- await McpTelemetryHelper.ExecuteWithTelemetryAsync(
- builtInTool, "builtin_slow", null, sp, CancellationToken.None);
- });
-
- // Test with custom tool type
- IMcpTool customTool = new SlowTool(delaySeconds: 30, toolType: ToolType.Custom);
- await Assert.ThrowsExceptionAsync(async () =>
- {
- await McpTelemetryHelper.ExecuteWithTelemetryAsync(
- customTool, "custom_slow", null, sp, CancellationToken.None);
- });
- }
-
#endregion
- #region Hot Reload Tests
+ #region JSON Serialization Tests
[TestMethod]
- public async Task ExecuteWithTelemetry_ReadsConfigPerInvocation_HotReload()
+ public void DmlToolsConfig_Serialization_IncludesQueryTimeout_WhenUserProvided()
{
- // First invocation with long timeout should succeed
- RuntimeConfig config1 = CreateConfig(queryTimeout: 30);
- IServiceProvider sp1 = CreateServiceProviderWithConfig(config1);
-
- IMcpTool fastTool = new ImmediateCompletionTool();
- CallToolResult result1 = await McpTelemetryHelper.ExecuteWithTelemetryAsync(
- fastTool, "test_tool", null, sp1, CancellationToken.None);
- Assert.IsNotNull(result1);
-
- // Second invocation with very short timeout and a slow tool should timeout.
- // This demonstrates that each invocation reads the current config value.
- RuntimeConfig config2 = CreateConfig(queryTimeout: 1);
- IServiceProvider sp2 = CreateServiceProviderWithConfig(config2);
-
- IMcpTool slowTool = new SlowTool(delaySeconds: 30);
- await Assert.ThrowsExceptionAsync(async () =>
- {
- await McpTelemetryHelper.ExecuteWithTelemetryAsync(
- slowTool, "test_tool", null, sp2, CancellationToken.None);
- });
+ // When aggregate-records has a query-timeout, it should serialize as object format
+ DmlToolsConfig dmlTools = new(aggregateRecords: true, aggregateRecordsQueryTimeout: 45);
+ McpRuntimeOptions options = new(Enabled: true, DmlTools: dmlTools);
+ JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
+ string json = JsonSerializer.Serialize(options, serializerOptions);
+ Assert.IsTrue(json.Contains("\"query-timeout\""), $"Expected 'query-timeout' in JSON. Got: {json}");
+ Assert.IsTrue(json.Contains("45"), $"Expected timeout value 45 in JSON. Got: {json}");
}
- #endregion
-
- // Note: MapExceptionToErrorCode tests are in McpTelemetryTests (covers all exception types via DataRow).
-
- #region JSON Serialization Tests
-
[TestMethod]
- public void McpRuntimeOptions_Serialization_IncludesQueryTimeout_WhenUserProvided()
+ public void DmlToolsConfig_Deserialization_ReadsQueryTimeout_ObjectFormat()
{
- McpRuntimeOptions options = new(QueryTimeout: 45);
+ string json = @"{""enabled"": true, ""dml-tools"": { ""aggregate-records"": { ""enabled"": true, ""query-timeout"": 60 } }}";
JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
- string json = JsonSerializer.Serialize(options, serializerOptions);
- Assert.IsTrue(json.Contains("\"query-timeout\": 45") || json.Contains("\"query-timeout\":45"));
+ McpRuntimeOptions options = JsonSerializer.Deserialize(json, serializerOptions);
+ Assert.IsNotNull(options);
+ Assert.IsNotNull(options.DmlTools);
+ Assert.AreEqual(true, options.DmlTools.AggregateRecords);
+ Assert.AreEqual(60, options.DmlTools.AggregateRecordsQueryTimeout);
+ Assert.AreEqual(60, options.DmlTools.EffectiveAggregateRecordsQueryTimeoutSeconds);
}
[TestMethod]
- public void McpRuntimeOptions_Deserialization_ReadsQueryTimeout()
+ public void DmlToolsConfig_Deserialization_AggregateRecordsBoolean_NoQueryTimeout()
{
- string json = @"{""enabled"": true, ""query-timeout"": 60}";
+ string json = @"{""enabled"": true, ""dml-tools"": { ""aggregate-records"": true }}";
JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
McpRuntimeOptions options = JsonSerializer.Deserialize(json, serializerOptions);
Assert.IsNotNull(options);
- Assert.AreEqual(60, options.QueryTimeout);
- Assert.AreEqual(60, options.EffectiveQueryTimeoutSeconds);
+ Assert.IsNotNull(options.DmlTools);
+ Assert.AreEqual(true, options.DmlTools.AggregateRecords);
+ Assert.IsNull(options.DmlTools.AggregateRecordsQueryTimeout);
+ Assert.AreEqual(DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS, options.DmlTools.EffectiveAggregateRecordsQueryTimeoutSeconds);
}
[TestMethod]
- public void McpRuntimeOptions_Deserialization_DefaultsWhenOmitted()
+ public void DmlToolsConfig_Deserialization_DefaultsWhenOmitted()
{
string json = @"{""enabled"": true}";
JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
McpRuntimeOptions options = JsonSerializer.Deserialize(json, serializerOptions);
Assert.IsNotNull(options);
- Assert.IsNull(options.QueryTimeout);
- Assert.AreEqual(30, options.EffectiveQueryTimeoutSeconds);
+ Assert.IsNull(options.DmlTools?.AggregateRecordsQueryTimeout);
+ Assert.AreEqual(DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS, options.DmlTools?.EffectiveAggregateRecordsQueryTimeoutSeconds ?? DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS);
}
#endregion
@@ -249,7 +193,6 @@ private static RuntimeConfig CreateConfig(int? queryTimeout = null)
Mcp: new(
Enabled: true,
Path: "/mcp",
- QueryTimeout: queryTimeout,
DmlTools: new(
describeEntities: true,
readRecords: true,
@@ -257,7 +200,8 @@ private static RuntimeConfig CreateConfig(int? queryTimeout = null)
updateRecord: true,
deleteRecord: true,
executeEntity: true,
- aggregateRecords: true
+ aggregateRecords: true,
+ aggregateRecordsQueryTimeout: queryTimeout
)
),
Host: new(Cors: null, Authentication: null, Mode: HostMode.Development)
@@ -310,7 +254,7 @@ public Task ExecuteAsync(
///
/// A mock tool that delays for a specified duration, respecting cancellation.
- /// Used to test timeout behavior.
+ /// Used to test cancellation behavior.
///
private class SlowTool : IMcpTool
{
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
index 15f242605f..c9def099f9 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt
@@ -36,10 +36,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
index 966af2777f..9e698f2a0f 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
@@ -33,17 +33,17 @@
DeleteRecord: true,
ExecuteEntity: true,
AggregateRecords: true,
- UserProvidedAllTools: false,
+ UserProvidedAllTools: true,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
index 0779215cd0..e7f312ef48 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
index 75077c22fa..5dcef0dcdb 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt
@@ -32,10 +32,10 @@
UserProvidedUpdateRecord: false,
UserProvidedDeleteRecord: false,
UserProvidedExecuteEntity: false,
- UserProvidedAggregateRecords: false
- },
- UserProvidedQueryTimeout: false,
- EffectiveQueryTimeoutSeconds: 30
+ UserProvidedAggregateRecords: false,
+ UserProvidedAggregateRecordsQueryTimeout: false,
+ EffectiveAggregateRecordsQueryTimeoutSeconds: 30
+ }
},
Host: {
Cors: {
diff --git a/src/Service.Tests/UnitTests/McpTelemetryTests.cs b/src/Service.Tests/UnitTests/McpTelemetryTests.cs
index 1b7c66fbcc..d82e531519 100644
--- a/src/Service.Tests/UnitTests/McpTelemetryTests.cs
+++ b/src/Service.Tests/UnitTests/McpTelemetryTests.cs
@@ -336,42 +336,6 @@ public async Task ExecuteWithTelemetryAsync_RecordsExceptionAndRethrows_WhenTool
Assert.IsNotNull(exceptionEvent, "Exception event should be recorded");
}
- ///
- /// Test that ExecuteWithTelemetryAsync applies the configured query-timeout and throws TimeoutException
- /// when a tool exceeds the configured timeout.
- ///
- [TestMethod]
- public async Task ExecuteWithTelemetryAsync_ThrowsTimeoutException_WhenToolExceedsTimeout()
- {
- // Use a 1-second timeout with a tool that takes 10 seconds
- IServiceProvider serviceProvider = CreateServiceProviderWithTimeout(queryTimeoutSeconds: 1);
- IMcpTool tool = new SlowTool(delaySeconds: 10);
-
- TimeoutException thrownEx = await Assert.ThrowsExceptionAsync(
- () => McpTelemetryHelper.ExecuteWithTelemetryAsync(
- tool, "aggregate_records", arguments: null, serviceProvider, CancellationToken.None));
-
- Assert.IsTrue(thrownEx.Message.Contains("aggregate_records"), "Exception message should contain tool name");
- Assert.IsTrue(thrownEx.Message.Contains("1 second"), "Exception message should contain timeout duration");
- }
-
- ///
- /// Test that ExecuteWithTelemetryAsync succeeds when tool completes before the timeout.
- ///
- [TestMethod]
- public async Task ExecuteWithTelemetryAsync_Succeeds_WhenToolCompletesBeforeTimeout()
- {
- // Use a 30-second timeout with a tool that completes immediately
- IServiceProvider serviceProvider = CreateServiceProviderWithTimeout(queryTimeoutSeconds: 30);
- IMcpTool tool = new ImmediateCompletionTool();
-
- CallToolResult result = await McpTelemetryHelper.ExecuteWithTelemetryAsync(
- tool, "aggregate_records", arguments: null, serviceProvider, CancellationToken.None);
-
- Assert.IsNotNull(result);
- Assert.IsFalse(result.IsError == true);
- }
-
///
/// Test that aggregate_records tool name maps to "aggregate" operation.
///
@@ -388,48 +352,6 @@ public void InferOperationFromTool_AggregateRecords_ReturnsAggregate()
#endregion
- #region Helpers for timeout tests
-
- ///
- /// Creates a service provider with a RuntimeConfigProvider configured with the given timeout.
- ///
- private static IServiceProvider CreateServiceProviderWithTimeout(int queryTimeoutSeconds)
- {
- Azure.DataApiBuilder.Config.ObjectModel.RuntimeConfig config = CreateConfigWithQueryTimeout(queryTimeoutSeconds);
- ServiceCollection services = new();
- Azure.DataApiBuilder.Core.Configurations.RuntimeConfigProvider configProvider =
- TestHelper.GenerateInMemoryRuntimeConfigProvider(config);
- services.AddSingleton(configProvider);
- services.AddLogging();
- return services.BuildServiceProvider();
- }
-
- private static Azure.DataApiBuilder.Config.ObjectModel.RuntimeConfig CreateConfigWithQueryTimeout(int queryTimeoutSeconds)
- {
- return new Azure.DataApiBuilder.Config.ObjectModel.RuntimeConfig(
- Schema: "test-schema",
- DataSource: new Azure.DataApiBuilder.Config.ObjectModel.DataSource(
- DatabaseType: Azure.DataApiBuilder.Config.ObjectModel.DatabaseType.MSSQL,
- ConnectionString: "",
- Options: null),
- Runtime: new(
- Rest: new(),
- GraphQL: new(),
- Mcp: new(
- Enabled: true,
- Path: "/mcp",
- DmlTools: null,
- Description: null,
- QueryTimeout: queryTimeoutSeconds
- ),
- Host: new(Cors: null, Authentication: null, Mode: Azure.DataApiBuilder.Config.ObjectModel.HostMode.Development)
- ),
- Entities: new(new System.Collections.Generic.Dictionary())
- );
- }
-
- #endregion
-
#region Test Mocks
///
@@ -468,81 +390,6 @@ public Task ExecuteAsync(JsonDocument? arguments, IServiceProvid
}
}
- ///
- /// A mock tool that completes immediately with a success result.
- ///
- private class ImmediateCompletionTool : IMcpTool
- {
- public ToolType ToolType { get; } = ToolType.BuiltIn;
-
- public Tool GetToolMetadata()
- {
- using JsonDocument doc = JsonDocument.Parse("{\"type\": \"object\"}");
- return new Tool
- {
- Name = "test_tool",
- Description = "A test tool that completes immediately",
- InputSchema = doc.RootElement.Clone()
- };
- }
-
- public Task ExecuteAsync(
- JsonDocument? arguments,
- IServiceProvider serviceProvider,
- CancellationToken cancellationToken = default)
- {
- return Task.FromResult(new CallToolResult
- {
- Content = new List
- {
- new TextContentBlock { Text = "{\"result\": \"success\"}" }
- }
- });
- }
- }
-
- ///
- /// A mock tool that delays for a specified duration, respecting cancellation.
- /// Used to test timeout behavior.
- ///
- private class SlowTool : IMcpTool
- {
- private readonly int _delaySeconds;
-
- public SlowTool(int delaySeconds)
- {
- _delaySeconds = delaySeconds;
- }
-
- public ToolType ToolType { get; } = ToolType.BuiltIn;
-
- public Tool GetToolMetadata()
- {
- using JsonDocument doc = JsonDocument.Parse("{\"type\": \"object\"}");
- return new Tool
- {
- Name = "slow_tool",
- Description = "A test tool that takes a long time",
- InputSchema = doc.RootElement.Clone()
- };
- }
-
- public async Task ExecuteAsync(
- JsonDocument? arguments,
- IServiceProvider serviceProvider,
- CancellationToken cancellationToken = default)
- {
- await Task.Delay(TimeSpan.FromSeconds(_delaySeconds), cancellationToken);
- return new CallToolResult
- {
- Content = new List
- {
- new TextContentBlock { Text = "{\"result\": \"completed\"}" }
- }
- };
- }
- }
-
#endregion
}
}
From 751114660dcfacaf3b2b2e632185d4a8172c3865 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Tue, 10 Mar 2026 15:26:55 +0530
Subject: [PATCH 41/43] Fix failing test MsSQL snapshot
---
...igurationTests.TestReadingRuntimeConfigForMsSql.verified.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
index 9e698f2a0f..d80506e102 100644
--- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
+++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt
@@ -33,7 +33,7 @@
DeleteRecord: true,
ExecuteEntity: true,
AggregateRecords: true,
- UserProvidedAllTools: true,
+ UserProvidedAllTools: false,
UserProvidedDescribeEntities: false,
UserProvidedCreateRecord: false,
UserProvidedReadRecords: false,
From 9e1b7355f929d73433a4d8f6fc28b7f9d800bede Mon Sep 17 00:00:00 2001
From: Aniruddh Munde
Date: Tue, 10 Mar 2026 09:55:51 -0700
Subject: [PATCH 42/43] Remove redundant test for default timeout
---
src/Service.Tests/Mcp/McpQueryTimeoutTests.cs | 11 -----------
1 file changed, 11 deletions(-)
diff --git a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
index 063fa0a1ba..835527865d 100644
--- a/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
+++ b/src/Service.Tests/Mcp/McpQueryTimeoutTests.cs
@@ -167,17 +167,6 @@ public void DmlToolsConfig_Deserialization_AggregateRecordsBoolean_NoQueryTimeou
Assert.AreEqual(DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS, options.DmlTools.EffectiveAggregateRecordsQueryTimeoutSeconds);
}
- [TestMethod]
- public void DmlToolsConfig_Deserialization_DefaultsWhenOmitted()
- {
- string json = @"{""enabled"": true}";
- JsonSerializerOptions serializerOptions = RuntimeConfigLoader.GetSerializationOptions();
- McpRuntimeOptions options = JsonSerializer.Deserialize(json, serializerOptions);
- Assert.IsNotNull(options);
- Assert.IsNull(options.DmlTools?.AggregateRecordsQueryTimeout);
- Assert.AreEqual(DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS, options.DmlTools?.EffectiveAggregateRecordsQueryTimeoutSeconds ?? DmlToolsConfig.DEFAULT_QUERY_TIMEOUT_SECONDS);
- }
-
#endregion
#region Helpers
From dfe3ae80eb37b614888d72f34ac17704d7da55b5 Mon Sep 17 00:00:00 2001
From: souvikghosh04
Date: Wed, 11 Mar 2026 13:30:51 +0530
Subject: [PATCH 43/43] Review comment and refactoring fixes
---
.../BuiltInTools/AggregateRecordsTool.cs | 22 ++++++++++++++-----
.../Utils/McpTelemetryErrorCodes.cs | 2 +-
.../Utils/McpTelemetryHelper.cs | 2 +-
3 files changed, 19 insertions(+), 7 deletions(-)
diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
index a08a54ae75..00875d2596 100644
--- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
+++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/AggregateRecordsTool.cs
@@ -661,6 +661,11 @@ public async Task ExecuteAsync(
IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService();
HttpContext? httpContext = httpContextAccessor.HttpContext;
+ if (httpContext is null)
+ {
+ return (null, McpErrorHelpers.PermissionDenied(toolName, entityName, "read", "No active HTTP request context.", logger));
+ }
+
if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleCtxError))
{
return (null, McpErrorHelpers.PermissionDenied(toolName, entityName, "read", roleCtxError, logger));
@@ -782,15 +787,22 @@ private static void BuildAggregationStructure(
// Add groupby columns as LabelledColumns and GroupByMetadata.Fields
foreach (string groupbyField in args.Groupby)
{
- sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out string? backingGroupbyColumn);
+ if (!sqlMetadataProvider.TryGetBackingColumn(entityName, groupbyField, out string? backingGroupbyColumn) || string.IsNullOrEmpty(backingGroupbyColumn))
+ {
+ throw new DataApiBuilderException(
+ message: $"GroupBy field '{groupbyField}' is not a valid field for entity '{entityName}'.",
+ statusCode: System.Net.HttpStatusCode.BadRequest,
+ subStatusCode: DataApiBuilderException.SubStatusCodes.BadRequest);
+ }
+
structure.Columns.Add(new LabelledColumn(
- dbObject.SchemaName, dbObject.Name, backingGroupbyColumn!, groupbyField, structure.SourceAlias));
- structure.GroupByMetadata.Fields[backingGroupbyColumn!] = new Column(
- dbObject.SchemaName, dbObject.Name, backingGroupbyColumn!, structure.SourceAlias);
+ dbObject.SchemaName, dbObject.Name, backingGroupbyColumn, groupbyField, structure.SourceAlias));
+ structure.GroupByMetadata.Fields[backingGroupbyColumn] = new Column(
+ dbObject.SchemaName, dbObject.Name, backingGroupbyColumn, structure.SourceAlias);
}
// Build aggregation column using engine's AggregationColumn type.
- AggregationType aggregationType = Enum.Parse(args.Function);
+ AggregationType aggregationType = Enum.Parse(args.Function, ignoreCase: true);
AggregationColumn aggregationColumn = new(
dbObject.SchemaName, dbObject.Name, backingField, aggregationType, alias, args.Distinct, structure.SourceAlias);
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs
index 3ef3aa4d74..ecaaad54de 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryErrorCodes.cs
@@ -41,6 +41,6 @@ internal static class McpTelemetryErrorCodes
///
/// Operation timed out error code.
///
- public const string TIMEOUT = "Timeout";
+ public const string OPERATION_TIMEOUT = "OperationTimeout";
}
}
diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
index faf8b1d434..c423534816 100644
--- a/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
+++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpTelemetryHelper.cs
@@ -188,7 +188,7 @@ public static string MapExceptionToErrorCode(Exception ex)
return ex switch
{
OperationCanceledException => McpTelemetryErrorCodes.OPERATION_CANCELLED,
- TimeoutException => McpTelemetryErrorCodes.TIMEOUT,
+ TimeoutException => McpTelemetryErrorCodes.OPERATION_TIMEOUT,
DataApiBuilderException dabEx when dabEx.SubStatusCode == DataApiBuilderException.SubStatusCodes.AuthenticationChallenge
=> McpTelemetryErrorCodes.AUTHENTICATION_FAILED,
DataApiBuilderException dabEx when dabEx.SubStatusCode == DataApiBuilderException.SubStatusCodes.AuthorizationCheckFailed