From f973900756c734468cc104b0c88589fb56c5b7a8 Mon Sep 17 00:00:00 2001 From: Roo Code Date: Mon, 12 Jan 2026 08:59:16 +0000 Subject: [PATCH 1/2] fix: add mergeToolResultText for Mistral/Devstral models in OpenRouter Fixes the "Unexpected role user after role tool" error for Mistral and Devstral models when using OpenRouter. Changes: - Extended model detection to include "devstral" (e.g. mistralai/devstral-2512) - Added mergeToolResultText: true to merge text content into tool messages instead of creating a separate user message that violates Mistral ordering Closes #10618 --- src/api/providers/openrouter.ts | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/api/providers/openrouter.ts b/src/api/providers/openrouter.ts index 7fcc24b15f6..1af59f64f8a 100644 --- a/src/api/providers/openrouter.ts +++ b/src/api/providers/openrouter.ts @@ -232,13 +232,17 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH } // Convert Anthropic messages to OpenAI format. - // Pass normalization function for Mistral compatibility (requires 9-char alphanumeric IDs) - const isMistral = modelId.toLowerCase().includes("mistral") + // Pass normalization function for Mistral/Devstral compatibility (requires 9-char alphanumeric IDs) + // Also merge tool result text to avoid "Unexpected role 'user' after role 'tool'" errors + const modelIdLower = modelId.toLowerCase() + const isMistralFamily = modelIdLower.includes("mistral") || modelIdLower.includes("devstral") let openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [ { role: "system", content: systemPrompt }, ...convertToOpenAiMessages( messages, - isMistral ? { normalizeToolCallId: normalizeMistralToolCallId } : undefined, + isMistralFamily + ? { normalizeToolCallId: normalizeMistralToolCallId, mergeToolResultText: true } + : undefined, ), ] From 1ce23097048c93ba2c6efc3f3679536a22a2bdab Mon Sep 17 00:00:00 2001 From: Roo Code Date: Fri, 20 Feb 2026 19:10:08 +0000 Subject: [PATCH 2/2] test: add Mistral/Devstral model detection tests for mergeToolResultText and rebase on main --- .../providers/__tests__/openrouter.spec.ts | 118 ++++++++++++++++++ 1 file changed, 118 insertions(+) diff --git a/src/api/providers/__tests__/openrouter.spec.ts b/src/api/providers/__tests__/openrouter.spec.ts index e03abea6352..f2361b411a3 100644 --- a/src/api/providers/__tests__/openrouter.spec.ts +++ b/src/api/providers/__tests__/openrouter.spec.ts @@ -80,10 +80,41 @@ vitest.mock("../fetchers/modelCache", () => ({ excludedTools: ["existing_excluded"], includedTools: ["existing_included"], }, + "mistralai/mistral-large-latest": { + maxTokens: 8192, + contextWindow: 128000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 2, + outputPrice: 6, + description: "Mistral Large", + }, + "mistralai/devstral-2512": { + maxTokens: 8192, + contextWindow: 128000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 1, + outputPrice: 3, + description: "Devstral", + }, }) }), })) +const mockConvertToOpenAiMessages = vitest.fn().mockReturnValue([]) + +vitest.mock("../../transform/openai-format", async (importOriginal) => { + const actual = (await importOriginal()) as Record + return { + ...actual, + convertToOpenAiMessages: (...args: unknown[]) => { + mockConvertToOpenAiMessages(...args) + return (actual.convertToOpenAiMessages as (...a: unknown[]) => unknown)(...args) + }, + } +}) + describe("OpenRouterHandler", () => { const mockOptions: ApiHandlerOptions = { openRouterApiKey: "test-key", @@ -527,6 +558,93 @@ describe("OpenRouterHandler", () => { }) }) + describe("Mistral/Devstral model detection", () => { + const createMockStream = () => ({ + async *[Symbol.asyncIterator]() { + yield { + id: "test-id", + choices: [{ delta: { content: "response" } }], + } + yield { + id: "test-id", + choices: [{ delta: {} }], + usage: { prompt_tokens: 10, completion_tokens: 5 }, + } + }, + }) + + const setupMockCreate = () => { + const mockCreate = vitest.fn().mockResolvedValue(createMockStream()) + ;(OpenAI as any).prototype.chat = { + completions: { create: mockCreate }, + } as any + return mockCreate + } + + const messages: Anthropic.Messages.MessageParam[] = [{ role: "user" as const, content: "test" }] + + beforeEach(() => { + mockConvertToOpenAiMessages.mockClear() + }) + + it("passes mergeToolResultText and normalizeToolCallId for Mistral models", async () => { + const handler = new OpenRouterHandler({ + openRouterApiKey: "test-key", + openRouterModelId: "mistralai/mistral-large-latest", + }) + setupMockCreate() + + const generator = handler.createMessage("system", messages) + for await (const _chunk of generator) { + // consume stream + } + + expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith( + messages, + expect.objectContaining({ + normalizeToolCallId: expect.any(Function), + mergeToolResultText: true, + }), + ) + }) + + it("passes mergeToolResultText and normalizeToolCallId for Devstral models", async () => { + const handler = new OpenRouterHandler({ + openRouterApiKey: "test-key", + openRouterModelId: "mistralai/devstral-2512", + }) + setupMockCreate() + + const generator = handler.createMessage("system", messages) + for await (const _chunk of generator) { + // consume stream + } + + expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith( + messages, + expect.objectContaining({ + normalizeToolCallId: expect.any(Function), + mergeToolResultText: true, + }), + ) + }) + + it("does not pass Mistral options for non-Mistral models", async () => { + const handler = new OpenRouterHandler({ + openRouterApiKey: "test-key", + openRouterModelId: "anthropic/claude-sonnet-4", + }) + setupMockCreate() + + const generator = handler.createMessage("system", messages) + for await (const _chunk of generator) { + // consume stream + } + + expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith(messages, undefined) + }) + }) + describe("completePrompt", () => { it("returns correct response", async () => { const handler = new OpenRouterHandler(mockOptions)