diff --git a/packages/typescript/ai-anthropic/src/adapters/text.ts b/packages/typescript/ai-anthropic/src/adapters/text.ts index 359f2377a..448f8f330 100644 --- a/packages/typescript/ai-anthropic/src/adapters/text.ts +++ b/packages/typescript/ai-anthropic/src/adapters/text.ts @@ -290,10 +290,13 @@ export class AnthropicTextAdapter< 'service_tier', 'stop_sequences', 'system', + 'temperature', 'thinking', 'tool_choice', 'top_k', + 'top_p', ] + for (const key of validKeys) { if (key in modelOptions) { const value = modelOptions[key] @@ -312,7 +315,7 @@ export class AnthropicTextAdapter< validProviderOptions.thinking?.type === 'enabled' ? validProviderOptions.thinking.budget_tokens : undefined - const defaultMaxTokens = options.maxTokens || 1024 + const defaultMaxTokens = modelOptions?.max_tokens || 1024 const maxTokens = thinkingBudget && thinkingBudget >= defaultMaxTokens ? thinkingBudget + 1 @@ -321,8 +324,6 @@ export class AnthropicTextAdapter< const requestParams: InternalTextProviderOptions = { model: options.model, max_tokens: maxTokens, - temperature: options.temperature, - top_p: options.topP, messages: formattedMessages, system: options.systemPrompts?.join('\n'), tools: tools, diff --git a/packages/typescript/ai-gemini/src/adapters/text.ts b/packages/typescript/ai-gemini/src/adapters/text.ts index f4efcc466..9065980b9 100644 --- a/packages/typescript/ai-gemini/src/adapters/text.ts +++ b/packages/typescript/ai-gemini/src/adapters/text.ts @@ -812,9 +812,6 @@ export class GeminiTextAdapter< contents: this.formatMessages(options.messages), config: { ...modelOpts, - temperature: options.temperature, - topP: options.topP, - maxOutputTokens: options.maxTokens, thinkingConfig: thinkingConfig ? { ...thinkingConfig, diff --git a/packages/typescript/ai-gemini/src/text/text-provider-options.ts b/packages/typescript/ai-gemini/src/text/text-provider-options.ts index 4c53ec2ad..fa128c14f 100644 --- a/packages/typescript/ai-gemini/src/text/text-provider-options.ts +++ b/packages/typescript/ai-gemini/src/text/text-provider-options.ts @@ -24,8 +24,13 @@ This will be enforced on the GenerateContentRequest.contents and GenerateContent export interface GeminiCommonConfigOptions { /** - * Configuration options for model generation and outputs. + * Controls the randomness of the output. Range: [0.0, 2.0]. Higher values produce more random output. */ + temperature?: number + /** + * Nucleus sampling probability threshold. Range: (0.0, 1.0). Alter this or temperature, not both. + */ + topP?: number /** * The set of character sequences (up to 5) that will stop output generation. If specified, the API will stop at the first appearance of a stop_sequence. The stop sequence will not be included as part of the response. */ diff --git a/packages/typescript/ai-grok/src/adapters/text.ts b/packages/typescript/ai-grok/src/adapters/text.ts index e185c5ecf..8427b0ef3 100644 --- a/packages/typescript/ai-grok/src/adapters/text.ts +++ b/packages/typescript/ai-grok/src/adapters/text.ts @@ -450,10 +450,7 @@ export class GrokTextAdapter< options: TextOptions, ): OpenAI_SDK.Chat.Completions.ChatCompletionCreateParamsStreaming { const modelOptions = options.modelOptions as - | Omit< - InternalTextProviderOptions, - 'max_tokens' | 'tools' | 'temperature' | 'input' | 'top_p' - > + | Omit | undefined if (modelOptions) { @@ -487,9 +484,6 @@ export class GrokTextAdapter< return { model: options.model, messages, - temperature: options.temperature, - max_tokens: options.maxTokens, - top_p: options.topP, tools: tools as Array, stream: true, stream_options: { include_usage: true }, diff --git a/packages/typescript/ai-groq/src/adapters/text.ts b/packages/typescript/ai-groq/src/adapters/text.ts index 34f44ba81..38e0dc68b 100644 --- a/packages/typescript/ai-groq/src/adapters/text.ts +++ b/packages/typescript/ai-groq/src/adapters/text.ts @@ -442,10 +442,7 @@ export class GroqTextAdapter< options: TextOptions, ): ChatCompletionCreateParamsStreaming { const modelOptions = options.modelOptions as - | Omit< - InternalTextProviderOptions, - 'max_tokens' | 'tools' | 'temperature' | 'input' | 'top_p' - > + | Omit | undefined if (modelOptions) { @@ -475,9 +472,6 @@ export class GroqTextAdapter< return { model: options.model, messages, - temperature: options.temperature, - max_tokens: options.maxTokens, - top_p: options.topP, tools, stream: true, } diff --git a/packages/typescript/ai-ollama/src/adapters/text.ts b/packages/typescript/ai-ollama/src/adapters/text.ts index 07da8acab..b2a22736f 100644 --- a/packages/typescript/ai-ollama/src/adapters/text.ts +++ b/packages/typescript/ai-ollama/src/adapters/text.ts @@ -577,16 +577,9 @@ export class OllamaTextAdapter extends BaseTextAdapter< | OllamaTextProviderOptions | undefined - const ollamaOptions = { - temperature: options.temperature, - top_p: options.topP, - num_predict: options.maxTokens, - ...modelOptions, - } - return { model, - options: ollamaOptions, + options: modelOptions, messages: this.formatMessages(options.messages), tools: this.convertToolsToOllamaFormat(options.tools), ...(options.systemPrompts?.length diff --git a/packages/typescript/ai-ollama/src/meta/models-meta.ts b/packages/typescript/ai-ollama/src/meta/models-meta.ts index e343eff1e..55634428c 100644 --- a/packages/typescript/ai-ollama/src/meta/models-meta.ts +++ b/packages/typescript/ai-ollama/src/meta/models-meta.ts @@ -1,4 +1,4 @@ -import type { Tool, ToolCall } from 'ollama' +import type { Options, Tool, ToolCall } from 'ollama' export interface OllamaModelMeta { name: string @@ -12,37 +12,7 @@ export interface OllamaModelMeta { context?: number } -interface OllamaOptions { - numa: boolean - num_ctx: number - num_batch: number - num_gpu: number - main_gpu: number - low_vram: boolean - f16_kv: boolean - logits_all: boolean - vocab_only: boolean - use_mmap: boolean - use_mlock: boolean - embedding_only: boolean - num_thread: number - num_keep: number - seed: number - num_predict: number - top_k: number - tfs_z: number - typical_p: number - repeat_last_n: number - repeat_penalty: number - presence_penalty: number - frequency_penalty: number - mirostat: number - mirostat_tau: number - mirostat_eta: number - penalize_newline: boolean - stop: Array -} - +// ollama model for reference // interface ChatRequest { // model: string // messages?: Message[] @@ -55,18 +25,17 @@ interface OllamaOptions { // top_logprobs?: number // options?: Partial // } - export interface OllamaChatRequest { - // model: string - // messages?: Message[] + // model: string (extended later) + // messages?: Message[] (extended later) stream?: boolean format?: string | object keep_alive?: string | number - // tools?: Tool[] - // think?: boolean | 'high' | 'medium' | 'low' + // tools?: Tool[] (extended later) + // think?: boolean | 'high' | 'medium' | 'low' (extended later) logprobs?: boolean top_logprobs?: number - options?: Partial + options?: Partial } export interface OllamaChatRequestThinking { @@ -81,6 +50,7 @@ export interface OllamaChatRequestTools { tools?: Array } +// ollama model for reference // interface Message { // role: string // content: string @@ -89,7 +59,6 @@ export interface OllamaChatRequestTools { // tool_calls?: ToolCall[] // tool_name?: string // } - export interface OllamaChatRequestMessages< TMessageExtension extends OllamaMessageExtension = {}, > { @@ -97,10 +66,10 @@ export interface OllamaChatRequestMessages< { role: string content: string - // thinking?: string - // images?: Uint8Array[] | string[] - // tool_calls?: ToolCall[] - // tool_name?: string + // thinking?: string (extended later) + // images?: Uint8Array[] | string[] (extended later) + // tool_calls?: ToolCall[] (extended later) + // tool_name?: string (extended later) } & TMessageExtension > } diff --git a/packages/typescript/ai-openai/src/adapters/text.ts b/packages/typescript/ai-openai/src/adapters/text.ts index 97752d737..f3467e50e 100644 --- a/packages/typescript/ai-openai/src/adapters/text.ts +++ b/packages/typescript/ai-openai/src/adapters/text.ts @@ -846,15 +846,7 @@ export class OpenAITextAdapter< */ private mapTextOptionsToOpenAI(options: TextOptions) { const modelOptions = options.modelOptions as - | Omit< - InternalTextProviderOptions, - | 'max_output_tokens' - | 'tools' - | 'metadata' - | 'temperature' - | 'input' - | 'top_p' - > + | Omit | undefined const input = this.convertMessagesToInput(options.messages) if (modelOptions) { @@ -874,9 +866,6 @@ export class OpenAITextAdapter< 'stream' > = { model: options.model, - temperature: options.temperature, - max_output_tokens: options.maxTokens, - top_p: options.topP, metadata: options.metadata, instructions: options.systemPrompts?.join('\n'), ...modelOptions, diff --git a/packages/typescript/ai-openrouter/src/adapters/text.ts b/packages/typescript/ai-openrouter/src/adapters/text.ts index 29427171c..234462356 100644 --- a/packages/typescript/ai-openrouter/src/adapters/text.ts +++ b/packages/typescript/ai-openrouter/src/adapters/text.ts @@ -671,22 +671,12 @@ export class OpenRouterTextAdapter< }) } - // Spread modelOptions first, then conditionally override with explicit - // top-level options so undefined values don't clobber modelOptions. Fixes - // #310, where the reverse order silently dropped user-set values. + const { variant, ...restModelOptions } = modelOptions ?? {} + const request: ChatRequest = { - ...modelOptions, - model: - options.model + - (modelOptions?.variant ? `:${modelOptions.variant}` : ''), + ...restModelOptions, + model: options.model + (variant ? `:${variant}` : ''), messages, - ...(options.temperature !== undefined && { - temperature: options.temperature, - }), - ...(options.maxTokens !== undefined && { - maxCompletionTokens: options.maxTokens, - }), - ...(options.topP !== undefined && { topP: options.topP }), tools: options.tools ? convertToolsToProviderFormat(options.tools) : undefined, diff --git a/packages/typescript/smoke-tests/e2e/src/routeTree.gen.ts b/packages/typescript/smoke-tests/e2e/src/routeTree.gen.ts new file mode 100644 index 000000000..51773beae --- /dev/null +++ b/packages/typescript/smoke-tests/e2e/src/routeTree.gen.ts @@ -0,0 +1,177 @@ +/* eslint-disable */ + +// @ts-nocheck + +// noinspection JSUnusedGlobalSymbols + +// This file was automatically generated by TanStack Router. +// You should NOT make any changes in this file as it will be overwritten. +// Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified. + +import { Route as rootRouteImport } from './routes/__root' +import { Route as ToolsTestRouteImport } from './routes/tools-test' +import { Route as MockRouteImport } from './routes/mock' +import { Route as IndexRouteImport } from './routes/index' +import { Route as ApiToolsTestRouteImport } from './routes/api.tools-test' +import { Route as ApiTanchatRouteImport } from './routes/api.tanchat' +import { Route as ApiMockChatRouteImport } from './routes/api.mock-chat' + +const ToolsTestRoute = ToolsTestRouteImport.update({ + id: '/tools-test', + path: '/tools-test', + getParentRoute: () => rootRouteImport, +} as any) +const MockRoute = MockRouteImport.update({ + id: '/mock', + path: '/mock', + getParentRoute: () => rootRouteImport, +} as any) +const IndexRoute = IndexRouteImport.update({ + id: '/', + path: '/', + getParentRoute: () => rootRouteImport, +} as any) +const ApiToolsTestRoute = ApiToolsTestRouteImport.update({ + id: '/api/tools-test', + path: '/api/tools-test', + getParentRoute: () => rootRouteImport, +} as any) +const ApiTanchatRoute = ApiTanchatRouteImport.update({ + id: '/api/tanchat', + path: '/api/tanchat', + getParentRoute: () => rootRouteImport, +} as any) +const ApiMockChatRoute = ApiMockChatRouteImport.update({ + id: '/api/mock-chat', + path: '/api/mock-chat', + getParentRoute: () => rootRouteImport, +} as any) + +export interface FileRoutesByFullPath { + '/': typeof IndexRoute + '/mock': typeof MockRoute + '/tools-test': typeof ToolsTestRoute + '/api/mock-chat': typeof ApiMockChatRoute + '/api/tanchat': typeof ApiTanchatRoute + '/api/tools-test': typeof ApiToolsTestRoute +} +export interface FileRoutesByTo { + '/': typeof IndexRoute + '/mock': typeof MockRoute + '/tools-test': typeof ToolsTestRoute + '/api/mock-chat': typeof ApiMockChatRoute + '/api/tanchat': typeof ApiTanchatRoute + '/api/tools-test': typeof ApiToolsTestRoute +} +export interface FileRoutesById { + __root__: typeof rootRouteImport + '/': typeof IndexRoute + '/mock': typeof MockRoute + '/tools-test': typeof ToolsTestRoute + '/api/mock-chat': typeof ApiMockChatRoute + '/api/tanchat': typeof ApiTanchatRoute + '/api/tools-test': typeof ApiToolsTestRoute +} +export interface FileRouteTypes { + fileRoutesByFullPath: FileRoutesByFullPath + fullPaths: + | '/' + | '/mock' + | '/tools-test' + | '/api/mock-chat' + | '/api/tanchat' + | '/api/tools-test' + fileRoutesByTo: FileRoutesByTo + to: + | '/' + | '/mock' + | '/tools-test' + | '/api/mock-chat' + | '/api/tanchat' + | '/api/tools-test' + id: + | '__root__' + | '/' + | '/mock' + | '/tools-test' + | '/api/mock-chat' + | '/api/tanchat' + | '/api/tools-test' + fileRoutesById: FileRoutesById +} +export interface RootRouteChildren { + IndexRoute: typeof IndexRoute + MockRoute: typeof MockRoute + ToolsTestRoute: typeof ToolsTestRoute + ApiMockChatRoute: typeof ApiMockChatRoute + ApiTanchatRoute: typeof ApiTanchatRoute + ApiToolsTestRoute: typeof ApiToolsTestRoute +} + +declare module '@tanstack/react-router' { + interface FileRoutesByPath { + '/tools-test': { + id: '/tools-test' + path: '/tools-test' + fullPath: '/tools-test' + preLoaderRoute: typeof ToolsTestRouteImport + parentRoute: typeof rootRouteImport + } + '/mock': { + id: '/mock' + path: '/mock' + fullPath: '/mock' + preLoaderRoute: typeof MockRouteImport + parentRoute: typeof rootRouteImport + } + '/': { + id: '/' + path: '/' + fullPath: '/' + preLoaderRoute: typeof IndexRouteImport + parentRoute: typeof rootRouteImport + } + '/api/tools-test': { + id: '/api/tools-test' + path: '/api/tools-test' + fullPath: '/api/tools-test' + preLoaderRoute: typeof ApiToolsTestRouteImport + parentRoute: typeof rootRouteImport + } + '/api/tanchat': { + id: '/api/tanchat' + path: '/api/tanchat' + fullPath: '/api/tanchat' + preLoaderRoute: typeof ApiTanchatRouteImport + parentRoute: typeof rootRouteImport + } + '/api/mock-chat': { + id: '/api/mock-chat' + path: '/api/mock-chat' + fullPath: '/api/mock-chat' + preLoaderRoute: typeof ApiMockChatRouteImport + parentRoute: typeof rootRouteImport + } + } +} + +const rootRouteChildren: RootRouteChildren = { + IndexRoute: IndexRoute, + MockRoute: MockRoute, + ToolsTestRoute: ToolsTestRoute, + ApiMockChatRoute: ApiMockChatRoute, + ApiTanchatRoute: ApiTanchatRoute, + ApiToolsTestRoute: ApiToolsTestRoute, +} +export const routeTree = rootRouteImport + ._addFileChildren(rootRouteChildren) + ._addFileTypes() + +import type { getRouter } from './router.tsx' +import type { createStart } from '@tanstack/react-start' +declare module '@tanstack/react-start' { + interface Register { + ssr: true + router: Awaited> + } +}