From 167c645921ffb84e397c5e1e26101ebab9d705b7 Mon Sep 17 00:00:00 2001 From: Joshua Barrington Date: Fri, 10 Apr 2026 17:42:55 +0200 Subject: [PATCH 1/4] default the api_type as openai_response for appropriate github copilot models --- pkg/model/provider/provider_test.go | 49 +++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/pkg/model/provider/provider_test.go b/pkg/model/provider/provider_test.go index 1e39d50f9..18564c86a 100644 --- a/pkg/model/provider/provider_test.go +++ b/pkg/model/provider/provider_test.go @@ -4,6 +4,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/docker/docker-agent/pkg/config/latest" ) func TestCatalogProviders(t *testing.T) { @@ -88,3 +90,50 @@ func TestIsKnownProvider(t *testing.T) { assert.False(t, IsKnownProvider("unknown")) assert.False(t, IsKnownProvider("")) } + +func TestIsGithubCopilotProvider(t *testing.T) { + t.Parallel() + + assert.True(t, isGithubCopilotProvider("github-copilot")) + assert.False(t, isGithubCopilotProvider("openai")) + assert.False(t, isGithubCopilotProvider("")) +} + +func TestIsCopilotResponsesModel(t *testing.T) { + t.Parallel() + + assert.True(t, isCopilotResponsesModel("gpt-5.3-codex")) + assert.True(t, isCopilotResponsesModel("gpt-5.2-codex")) + assert.False(t, isCopilotResponsesModel("gpt-4o")) + assert.False(t, isCopilotResponsesModel("claude-sonnet-4-5")) + assert.False(t, isCopilotResponsesModel("")) +} + +func TestGithubCopilotApiType(t *testing.T) { + cfg := &latest.ModelConfig{ + Provider: "github-copilot", + Model: "gpt-5.3-codex", + } + + enhancedCfg := applyProviderDefaults(cfg, nil) + + apiType := resolveProviderType(enhancedCfg) + + if apiType != "openai_responses" { + t.Errorf("Expected api_type to be 'openai_responses', got '%s'", apiType) + } + + // test when it is a custom provider + customProviders := map[string]latest.ProviderConfig{ + "github-copilot": { + Provider: "github-copilot", + }, + } + + enhancedCfg2 := applyProviderDefaults(cfg, customProviders) + apiType2 := resolveProviderType(enhancedCfg2) + + if apiType2 != "openai_responses" { + t.Errorf("Expected api_type to be 'openai_responses', got '%s'", apiType2) + } +} From 61c5b84f96dbd5754e1e2547cd6c45cc19d8aeb8 Mon Sep 17 00:00:00 2001 From: Joshua Barrington Date: Fri, 10 Apr 2026 18:57:02 +0200 Subject: [PATCH 2/4] apply copilot api type earlier in defaults setup --- pkg/model/provider/provider.go | 383 +++++++++++++++++++++++++++++++++ 1 file changed, 383 insertions(+) diff --git a/pkg/model/provider/provider.go b/pkg/model/provider/provider.go index 780144e0b..ba430b272 100644 --- a/pkg/model/provider/provider.go +++ b/pkg/model/provider/provider.go @@ -89,3 +89,386 @@ func NewWithModels(ctx context.Context, cfg *latest.ModelConfig, models map[stri return createDirectProvider(ctx, cfg, env, opts...) } + +// createRuleBasedRouter creates a rule-based routing provider. +func createRuleBasedRouter(ctx context.Context, cfg *latest.ModelConfig, models map[string]latest.ModelConfig, env environment.Provider, opts ...options.Opt) (Provider, error) { + // Create a provider factory that can resolve model references + factory := func(ctx context.Context, modelSpec string, models map[string]latest.ModelConfig, env environment.Provider, factoryOpts ...options.Opt) (rulebased.Provider, error) { + // Check if modelSpec is a reference to a model in the models map + if modelCfg, exists := models[modelSpec]; exists { + // Prevent infinite recursion - referenced models cannot have routing rules + if len(modelCfg.Routing) > 0 { + return nil, fmt.Errorf("model %q has routing rules and cannot be used as a routing target", modelSpec) + } + p, err := createDirectProvider(ctx, &modelCfg, env, factoryOpts...) + if err != nil { + return nil, err + } + return p, nil + } + + // Otherwise, treat as an inline model spec (e.g., "openai/gpt-4o") + inlineCfg, parseErr := latest.ParseModelRef(modelSpec) + if parseErr != nil { + return nil, fmt.Errorf("invalid model spec %q: expected 'provider/model' format or a model reference", modelSpec) + } + p, err := createDirectProvider(ctx, &inlineCfg, env, factoryOpts...) + if err != nil { + return nil, err + } + return p, nil + } + + return rulebased.NewClient(ctx, cfg, models, env, factory, opts...) +} + +// createDirectProvider creates a provider without routing (direct model access). +func createDirectProvider(ctx context.Context, cfg *latest.ModelConfig, env environment.Provider, opts ...options.Opt) (Provider, error) { + var globalOptions options.ModelOptions + for _, opt := range opts { + opt(&globalOptions) + } + + // Apply defaults from custom providers (from config) or built-in aliases + enhancedCfg := applyProviderDefaults(cfg, globalOptions.Providers()) + + providerType := resolveProviderType(enhancedCfg) + + switch providerType { + case "openai", "openai_chatcompletions", "openai_responses": + return openai.NewClient(ctx, enhancedCfg, env, opts...) + case "anthropic": + return anthropic.NewClient(ctx, enhancedCfg, env, opts...) + case "google": + // Route non-Gemini models on Vertex AI (Model Garden) through the + // OpenAI-compatible endpoint instead of the Gemini SDK. + if vertexai.IsModelGardenConfig(enhancedCfg) { + return vertexai.NewClient(ctx, enhancedCfg, env, opts...) + } + return gemini.NewClient(ctx, enhancedCfg, env, opts...) + case "dmr": + return dmr.NewClient(ctx, enhancedCfg, opts...) + case "amazon-bedrock": + return bedrock.NewClient(ctx, enhancedCfg, env, opts...) + default: + slog.Error("Unknown provider type", "type", providerType) + return nil, fmt.Errorf("unknown provider type: %s", providerType) + } +} + +// --------------------------------------------------------------------------- +// Provider-type resolution +// --------------------------------------------------------------------------- + +// resolveProviderType determines the effective API type for a config. +// Priority: ProviderOpts["api_type"] > built-in alias > provider name. +func resolveProviderType(cfg *latest.ModelConfig) string { + if cfg.ProviderOpts != nil { + if apiType, ok := cfg.ProviderOpts["api_type"].(string); ok && apiType != "" { + return apiType + } + } + if alias, exists := Aliases[cfg.Provider]; exists && alias.APIType != "" { + return alias.APIType + } + return cfg.Provider +} + +// --------------------------------------------------------------------------- +// Provider defaults +// --------------------------------------------------------------------------- + +// applyProviderDefaults applies default configuration from custom providers or built-in aliases. +// Custom providers (from config) take precedence over built-in aliases. +// This sets default base URLs, token keys, api_type, and model-specific defaults (like thinking budget). +// +// The returned config is a deep-enough copy: the caller's ModelConfig, ProviderOpts map, +// and ThinkingBudget pointer are never mutated. +func applyProviderDefaults(cfg *latest.ModelConfig, customProviders map[string]latest.ProviderConfig) *latest.ModelConfig { + // Create a copy to avoid modifying the original. + // cloneModelConfig also deep-copies ProviderOpts so writes are safe. + enhancedCfg := cloneModelConfig(cfg) + + if customProviders != nil { + if providerCfg, exists := customProviders[cfg.Provider]; exists { + slog.Debug("Applying custom provider defaults", + "provider", cfg.Provider, + "model", cfg.Model, + "base_url", providerCfg.BaseURL, + ) + + // Apply the underlying provider type if set on the provider config. + // This allows the model to inherit the real provider type (e.g., "anthropic") + // so that the correct API client is selected. + if providerCfg.Provider != "" { + enhancedCfg.Provider = providerCfg.Provider + } + + if enhancedCfg.BaseURL == "" && providerCfg.BaseURL != "" { + enhancedCfg.BaseURL = providerCfg.BaseURL + } + if enhancedCfg.TokenKey == "" && providerCfg.TokenKey != "" { + enhancedCfg.TokenKey = providerCfg.TokenKey + } + if enhancedCfg.Temperature == nil && providerCfg.Temperature != nil { + enhancedCfg.Temperature = providerCfg.Temperature + } + if enhancedCfg.MaxTokens == nil && providerCfg.MaxTokens != nil { + enhancedCfg.MaxTokens = providerCfg.MaxTokens + } + if enhancedCfg.TopP == nil && providerCfg.TopP != nil { + enhancedCfg.TopP = providerCfg.TopP + } + if enhancedCfg.FrequencyPenalty == nil && providerCfg.FrequencyPenalty != nil { + enhancedCfg.FrequencyPenalty = providerCfg.FrequencyPenalty + } + if enhancedCfg.PresencePenalty == nil && providerCfg.PresencePenalty != nil { + enhancedCfg.PresencePenalty = providerCfg.PresencePenalty + } + if enhancedCfg.ParallelToolCalls == nil && providerCfg.ParallelToolCalls != nil { + enhancedCfg.ParallelToolCalls = providerCfg.ParallelToolCalls + } + if enhancedCfg.TrackUsage == nil && providerCfg.TrackUsage != nil { + enhancedCfg.TrackUsage = providerCfg.TrackUsage + } + if enhancedCfg.ThinkingBudget == nil && providerCfg.ThinkingBudget != nil { + enhancedCfg.ThinkingBudget = providerCfg.ThinkingBudget + } + + // Merge provider_opts from provider config (model opts take precedence) + if len(providerCfg.ProviderOpts) > 0 { + if enhancedCfg.ProviderOpts == nil { + enhancedCfg.ProviderOpts = make(map[string]any) + } + for k, v := range providerCfg.ProviderOpts { + if _, has := enhancedCfg.ProviderOpts[k]; !has { + enhancedCfg.ProviderOpts[k] = v + } + } + } + + // Set api_type in ProviderOpts if not already set. + // Only default to openai_chatcompletions for OpenAI-compatible providers. + if providerCfg.APIType != "" { + if enhancedCfg.ProviderOpts == nil { + enhancedCfg.ProviderOpts = make(map[string]any) + } + if _, has := enhancedCfg.ProviderOpts["api_type"]; !has { + enhancedCfg.ProviderOpts["api_type"] = providerCfg.APIType + } + } else if isOpenAICompatibleProvider(resolveEffectiveProvider(providerCfg)) { + if enhancedCfg.ProviderOpts == nil { + enhancedCfg.ProviderOpts = make(map[string]any) + } + if _, has := enhancedCfg.ProviderOpts["api_type"]; !has { + enhancedCfg.ProviderOpts["api_type"] = "openai_chatcompletions" + } + } + + applyModelDefaults(enhancedCfg) + return enhancedCfg + } + } + + if alias, exists := Aliases[cfg.Provider]; exists { + // Set default base URL if not already specified + if enhancedCfg.BaseURL == "" && alias.BaseURL != "" { + enhancedCfg.BaseURL = alias.BaseURL + } + + // Set default token key if not already specified + if enhancedCfg.TokenKey == "" && alias.TokenEnvVar != "" { + enhancedCfg.TokenKey = alias.TokenEnvVar + } + } + + // Apply model-specific defaults (e.g., thinking budget for Claude/GPT models) + applyModelDefaults(enhancedCfg) + return enhancedCfg +} + +// --------------------------------------------------------------------------- +// Thinking defaults and overrides +// --------------------------------------------------------------------------- + +// applyModelDefaults applies provider-specific default values for model configuration. +// +// Thinking defaults policy: +// - thinking_budget: 0 or thinking_budget: none → thinking is off (nil). +// - thinking_budget explicitly set to a real value → kept as-is; interleaved_thinking +// is auto-enabled for Anthropic/Bedrock-Claude. +// - thinking_budget NOT set: +// - Thinking-only models (OpenAI o-series) get "medium". +// - All other models get no thinking. +// +// NOTE: max_tokens is NOT set here; see teamloader and runtime/model_switcher. +func applyModelDefaults(cfg *latest.ModelConfig) { + // Set appropriate github copilot api_type. + applyGithubCopilotAPIType(cfg) + + // Explicitly disabled → normalise to nil so providers never see it. + if cfg.ThinkingBudget.IsDisabled() { + cfg.ThinkingBudget = nil + slog.Debug("Thinking explicitly disabled", + "provider", cfg.Provider, "model", cfg.Model) + return + } + + providerType := resolveProviderType(cfg) + + // User already set a real thinking_budget — just apply side-effects. + if cfg.ThinkingBudget != nil { + ensureInterleavedThinking(cfg, providerType) + return + } + + // No thinking_budget configured — only thinking-only models get a default. + switch providerType { + case "openai", "openai_chatcompletions", "openai_responses": + if isOpenAIThinkingOnlyModel(cfg.Model) { + cfg.ThinkingBudget = &latest.ThinkingBudget{Effort: "medium"} + slog.Debug("Applied default thinking for thinking-only OpenAI model", + "provider", cfg.Provider, "model", cfg.Model) + } + } +} + +// --------------------------------------------------------------------------- +// Shared helpers +// --------------------------------------------------------------------------- + +// cloneModelConfig returns a shallow copy of cfg with a deep copy of +// ProviderOpts so that callers can safely mutate the returned config's +// map and pointer fields without affecting the original. +func cloneModelConfig(cfg *latest.ModelConfig) *latest.ModelConfig { + c := *cfg + if cfg.ProviderOpts != nil { + c.ProviderOpts = make(map[string]any, len(cfg.ProviderOpts)) + maps.Copy(c.ProviderOpts, cfg.ProviderOpts) + } + return &c +} + +// ensureInterleavedThinking sets interleaved_thinking=true in ProviderOpts +// for Anthropic and Bedrock-Claude models, unless the user already set it. +func ensureInterleavedThinking(cfg *latest.ModelConfig, providerType string) { + needsInterleaved := providerType == "anthropic" || + (providerType == "amazon-bedrock" && isBedrockClaudeModel(cfg.Model)) + if !needsInterleaved { + return + } + if cfg.ProviderOpts == nil { + cfg.ProviderOpts = make(map[string]any) + } + if _, has := cfg.ProviderOpts["interleaved_thinking"]; !has { + cfg.ProviderOpts["interleaved_thinking"] = true + slog.Debug("Auto-enabled interleaved_thinking", + "provider", cfg.Provider, "model", cfg.Model) + } +} + +// applyGithubCopilotAPIType ensures api_type is set to openai_responses for appropriate models. +func applyGithubCopilotAPIType(cfg *latest.ModelConfig) { + if isGithubCopilotProvider(cfg.Provider) && isCopilotResponsesModel(cfg.Model) { + if cfg.ProviderOpts == nil { + cfg.ProviderOpts = make(map[string]any) + } + // If it's not set, or was set to openai_chatcompletions by the generic fallback, override it. + if apiType, ok := cfg.ProviderOpts["api_type"].(string); !ok || apiType == "" || apiType == "openai_chatcompletions" { + cfg.ProviderOpts["api_type"] = "openai_responses" + } + } +} + +// isOpenAIThinkingOnlyModel returns true for OpenAI models that require thinking +// to function properly (o-series reasoning models). +func isOpenAIThinkingOnlyModel(model string) bool { + m := strings.ToLower(model) + return strings.HasPrefix(m, "o1") || + strings.HasPrefix(m, "o3") || + strings.HasPrefix(m, "o4") +} + +// isBedrockClaudeModel returns true if the model ID is a Claude model on Bedrock. +// Claude model IDs on Bedrock start with "anthropic.claude-" or "global.anthropic.claude-". +func isBedrockClaudeModel(model string) bool { + m := strings.ToLower(model) + return strings.HasPrefix(m, "anthropic.claude-") || strings.HasPrefix(m, "global.anthropic.claude-") +} + +// gemini3Family extracts the model family (e.g. "pro", "flash") from a +// Gemini 3+ model name, or returns "" if the model is not Gemini 3+. +// It handles both "gemini-3-" and "gemini-3.X-" patterns. +// +// Examples: +// +// gemini3Family("gemini-3-pro") → "pro" +// gemini3Family("gemini-3.1-flash-preview") → "flash-preview" +// gemini3Family("gemini-2.5-flash") → "" +func gemini3Family(model string) string { + if !strings.HasPrefix(model, "gemini-3") { + return "" + } + rest := model[len("gemini-3"):] + if rest == "" { + return "" + } + // Accept "gemini-3-..." or "gemini-3.X-..." (e.g. gemini-3.1-pro-preview) + switch rest[0] { + case '-': + return rest[1:] // "gemini-3-pro" → "pro" + case '.': + if _, family, ok := strings.Cut(rest, "-"); ok { + return family // "gemini-3.1-pro-preview" → "pro-preview" + } + } + return "" +} + +func isGeminiProModel(model string) bool { + return strings.HasPrefix(gemini3Family(model), "pro") +} + +func isGeminiFlashModel(model string) bool { + return strings.HasPrefix(gemini3Family(model), "flash") +} + +// resolveEffectiveProvider returns the effective provider type for a ProviderConfig. +// If Provider is explicitly set, returns that. Otherwise returns "openai" (backward compat). +func resolveEffectiveProvider(cfg latest.ProviderConfig) string { + if cfg.Provider != "" { + return cfg.Provider + } + return "openai" +} + +func isGithubCopilotProvider(providerType string) bool { + switch providerType { + case "github-copilot": + return true + default: + return false + } +} + +func isCopilotResponsesModel(model string) bool { + codex := map[string]bool{ + "gpt-5.3-codex": true, + "gpt-5.2-codex": true, + } + return codex[model] +} + +// isOpenAICompatibleProvider returns true if the provider type uses the OpenAI API protocol. +func isOpenAICompatibleProvider(providerType string) bool { + switch providerType { + case "openai", "openai_chatcompletions", "openai_responses": + return true + default: + // Check if it's an alias that maps to openai + if alias, exists := Aliases[providerType]; exists { + return alias.APIType == "openai" + } + return false + } +} From 1b6483a62c7379009f1fbed864fc6fe34eb6bc0e Mon Sep 17 00:00:00 2001 From: David Gageot Date: Mon, 13 Apr 2026 11:51:07 +0200 Subject: [PATCH 3/4] Update pkg/model/provider/provider.go --- pkg/model/provider/provider.go | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/pkg/model/provider/provider.go b/pkg/model/provider/provider.go index ba430b272..f2a6032d6 100644 --- a/pkg/model/provider/provider.go +++ b/pkg/model/provider/provider.go @@ -443,12 +443,7 @@ func resolveEffectiveProvider(cfg latest.ProviderConfig) string { } func isGithubCopilotProvider(providerType string) bool { - switch providerType { - case "github-copilot": - return true - default: - return false - } + return providerType == "github-copilot" } func isCopilotResponsesModel(model string) bool { From 52a34cdf1fd4e2b03153962de648988fd8f78e2b Mon Sep 17 00:00:00 2001 From: Joshua Barrington Date: Wed, 22 Apr 2026 10:01:48 +0200 Subject: [PATCH 4/4] add gpt-5.4-mini, gpt-5.4-nano to copilot response models --- pkg/model/provider/defaults.go | 31 ++ pkg/model/provider/model_defaults_test.go | 10 + pkg/model/provider/provider.go | 378 ------------------- pkg/model/provider/provider_defaults_test.go | 37 ++ pkg/model/provider/provider_test.go | 49 --- 5 files changed, 78 insertions(+), 427 deletions(-) diff --git a/pkg/model/provider/defaults.go b/pkg/model/provider/defaults.go index ef084083e..714765d6c 100644 --- a/pkg/model/provider/defaults.go +++ b/pkg/model/provider/defaults.go @@ -43,6 +43,11 @@ func isOpenAICompatibleProvider(providerType string) bool { return exists && alias.APIType == "openai" } +// IsGithubCopilotProvider returns true if the provider type is "github-copilot". +func isGithubCopilotProvider(providerType string) bool { + return providerType == "github-copilot" +} + // --------------------------------------------------------------------------- // Provider defaults // --------------------------------------------------------------------------- @@ -180,6 +185,9 @@ func cloneModelConfig(cfg *latest.ModelConfig) *latest.ModelConfig { // // NOTE: max_tokens is NOT set here; see teamloader and runtime/model_switcher. func applyModelDefaults(cfg *latest.ModelConfig) { + // Set appropriate github copilot api_type. + applyGithubCopilotAPIType(cfg) + // Explicitly disabled → normalise to nil so providers never see it. if cfg.ThinkingBudget.IsDisabled() { cfg.ThinkingBudget = nil @@ -225,6 +233,18 @@ func ensureInterleavedThinking(cfg *latest.ModelConfig, providerType string) { } } +func applyGithubCopilotAPIType(cfg *latest.ModelConfig) { + if isGithubCopilotProvider(cfg.Provider) && isCopilotResponsesModel(cfg.Model) { + if cfg.ProviderOpts == nil { + cfg.ProviderOpts = make(map[string]any) + } + // If it's not set, or was set to openai_chatcompletions by the generic fallback, override it. + if apiType, ok := cfg.ProviderOpts["api_type"].(string); !ok || apiType == "" || apiType == "openai_chatcompletions" { + cfg.ProviderOpts["api_type"] = "openai_responses" + } + } +} + // --------------------------------------------------------------------------- // Model-name predicates // --------------------------------------------------------------------------- @@ -281,3 +301,14 @@ func isGeminiProModel(model string) bool { func isGeminiFlashModel(model string) bool { return strings.HasPrefix(gemini3Family(model), "flash") } + +// isCopilotResponsesModel returns true if the model is a GitHub Copilot model that requires the openai_responses API type. +func isCopilotResponsesModel(model string) bool { + codex := map[string]bool{ + "gpt-5.3-codex": true, + "gpt-5.2-codex": true, + "gpt-5.4-mini": true, + "gpt-5.4-nano": true, + } + return codex[model] +} diff --git a/pkg/model/provider/model_defaults_test.go b/pkg/model/provider/model_defaults_test.go index fd2e5d268..303109b85 100644 --- a/pkg/model/provider/model_defaults_test.go +++ b/pkg/model/provider/model_defaults_test.go @@ -289,3 +289,13 @@ func TestApplyProviderDefaults_DoesNotModifyOriginal(t *testing.T) { // Original custom key must still be there. assert.Equal(t, "original_value", original.ProviderOpts["custom_key"]) } + +func TestIsCopilotResponsesModel(t *testing.T) { + t.Parallel() + + assert.True(t, isCopilotResponsesModel("gpt-5.3-codex")) + assert.True(t, isCopilotResponsesModel("gpt-5.2-codex")) + assert.False(t, isCopilotResponsesModel("gpt-4o")) + assert.False(t, isCopilotResponsesModel("claude-sonnet-4-5")) + assert.False(t, isCopilotResponsesModel("")) +} diff --git a/pkg/model/provider/provider.go b/pkg/model/provider/provider.go index f2a6032d6..780144e0b 100644 --- a/pkg/model/provider/provider.go +++ b/pkg/model/provider/provider.go @@ -89,381 +89,3 @@ func NewWithModels(ctx context.Context, cfg *latest.ModelConfig, models map[stri return createDirectProvider(ctx, cfg, env, opts...) } - -// createRuleBasedRouter creates a rule-based routing provider. -func createRuleBasedRouter(ctx context.Context, cfg *latest.ModelConfig, models map[string]latest.ModelConfig, env environment.Provider, opts ...options.Opt) (Provider, error) { - // Create a provider factory that can resolve model references - factory := func(ctx context.Context, modelSpec string, models map[string]latest.ModelConfig, env environment.Provider, factoryOpts ...options.Opt) (rulebased.Provider, error) { - // Check if modelSpec is a reference to a model in the models map - if modelCfg, exists := models[modelSpec]; exists { - // Prevent infinite recursion - referenced models cannot have routing rules - if len(modelCfg.Routing) > 0 { - return nil, fmt.Errorf("model %q has routing rules and cannot be used as a routing target", modelSpec) - } - p, err := createDirectProvider(ctx, &modelCfg, env, factoryOpts...) - if err != nil { - return nil, err - } - return p, nil - } - - // Otherwise, treat as an inline model spec (e.g., "openai/gpt-4o") - inlineCfg, parseErr := latest.ParseModelRef(modelSpec) - if parseErr != nil { - return nil, fmt.Errorf("invalid model spec %q: expected 'provider/model' format or a model reference", modelSpec) - } - p, err := createDirectProvider(ctx, &inlineCfg, env, factoryOpts...) - if err != nil { - return nil, err - } - return p, nil - } - - return rulebased.NewClient(ctx, cfg, models, env, factory, opts...) -} - -// createDirectProvider creates a provider without routing (direct model access). -func createDirectProvider(ctx context.Context, cfg *latest.ModelConfig, env environment.Provider, opts ...options.Opt) (Provider, error) { - var globalOptions options.ModelOptions - for _, opt := range opts { - opt(&globalOptions) - } - - // Apply defaults from custom providers (from config) or built-in aliases - enhancedCfg := applyProviderDefaults(cfg, globalOptions.Providers()) - - providerType := resolveProviderType(enhancedCfg) - - switch providerType { - case "openai", "openai_chatcompletions", "openai_responses": - return openai.NewClient(ctx, enhancedCfg, env, opts...) - case "anthropic": - return anthropic.NewClient(ctx, enhancedCfg, env, opts...) - case "google": - // Route non-Gemini models on Vertex AI (Model Garden) through the - // OpenAI-compatible endpoint instead of the Gemini SDK. - if vertexai.IsModelGardenConfig(enhancedCfg) { - return vertexai.NewClient(ctx, enhancedCfg, env, opts...) - } - return gemini.NewClient(ctx, enhancedCfg, env, opts...) - case "dmr": - return dmr.NewClient(ctx, enhancedCfg, opts...) - case "amazon-bedrock": - return bedrock.NewClient(ctx, enhancedCfg, env, opts...) - default: - slog.Error("Unknown provider type", "type", providerType) - return nil, fmt.Errorf("unknown provider type: %s", providerType) - } -} - -// --------------------------------------------------------------------------- -// Provider-type resolution -// --------------------------------------------------------------------------- - -// resolveProviderType determines the effective API type for a config. -// Priority: ProviderOpts["api_type"] > built-in alias > provider name. -func resolveProviderType(cfg *latest.ModelConfig) string { - if cfg.ProviderOpts != nil { - if apiType, ok := cfg.ProviderOpts["api_type"].(string); ok && apiType != "" { - return apiType - } - } - if alias, exists := Aliases[cfg.Provider]; exists && alias.APIType != "" { - return alias.APIType - } - return cfg.Provider -} - -// --------------------------------------------------------------------------- -// Provider defaults -// --------------------------------------------------------------------------- - -// applyProviderDefaults applies default configuration from custom providers or built-in aliases. -// Custom providers (from config) take precedence over built-in aliases. -// This sets default base URLs, token keys, api_type, and model-specific defaults (like thinking budget). -// -// The returned config is a deep-enough copy: the caller's ModelConfig, ProviderOpts map, -// and ThinkingBudget pointer are never mutated. -func applyProviderDefaults(cfg *latest.ModelConfig, customProviders map[string]latest.ProviderConfig) *latest.ModelConfig { - // Create a copy to avoid modifying the original. - // cloneModelConfig also deep-copies ProviderOpts so writes are safe. - enhancedCfg := cloneModelConfig(cfg) - - if customProviders != nil { - if providerCfg, exists := customProviders[cfg.Provider]; exists { - slog.Debug("Applying custom provider defaults", - "provider", cfg.Provider, - "model", cfg.Model, - "base_url", providerCfg.BaseURL, - ) - - // Apply the underlying provider type if set on the provider config. - // This allows the model to inherit the real provider type (e.g., "anthropic") - // so that the correct API client is selected. - if providerCfg.Provider != "" { - enhancedCfg.Provider = providerCfg.Provider - } - - if enhancedCfg.BaseURL == "" && providerCfg.BaseURL != "" { - enhancedCfg.BaseURL = providerCfg.BaseURL - } - if enhancedCfg.TokenKey == "" && providerCfg.TokenKey != "" { - enhancedCfg.TokenKey = providerCfg.TokenKey - } - if enhancedCfg.Temperature == nil && providerCfg.Temperature != nil { - enhancedCfg.Temperature = providerCfg.Temperature - } - if enhancedCfg.MaxTokens == nil && providerCfg.MaxTokens != nil { - enhancedCfg.MaxTokens = providerCfg.MaxTokens - } - if enhancedCfg.TopP == nil && providerCfg.TopP != nil { - enhancedCfg.TopP = providerCfg.TopP - } - if enhancedCfg.FrequencyPenalty == nil && providerCfg.FrequencyPenalty != nil { - enhancedCfg.FrequencyPenalty = providerCfg.FrequencyPenalty - } - if enhancedCfg.PresencePenalty == nil && providerCfg.PresencePenalty != nil { - enhancedCfg.PresencePenalty = providerCfg.PresencePenalty - } - if enhancedCfg.ParallelToolCalls == nil && providerCfg.ParallelToolCalls != nil { - enhancedCfg.ParallelToolCalls = providerCfg.ParallelToolCalls - } - if enhancedCfg.TrackUsage == nil && providerCfg.TrackUsage != nil { - enhancedCfg.TrackUsage = providerCfg.TrackUsage - } - if enhancedCfg.ThinkingBudget == nil && providerCfg.ThinkingBudget != nil { - enhancedCfg.ThinkingBudget = providerCfg.ThinkingBudget - } - - // Merge provider_opts from provider config (model opts take precedence) - if len(providerCfg.ProviderOpts) > 0 { - if enhancedCfg.ProviderOpts == nil { - enhancedCfg.ProviderOpts = make(map[string]any) - } - for k, v := range providerCfg.ProviderOpts { - if _, has := enhancedCfg.ProviderOpts[k]; !has { - enhancedCfg.ProviderOpts[k] = v - } - } - } - - // Set api_type in ProviderOpts if not already set. - // Only default to openai_chatcompletions for OpenAI-compatible providers. - if providerCfg.APIType != "" { - if enhancedCfg.ProviderOpts == nil { - enhancedCfg.ProviderOpts = make(map[string]any) - } - if _, has := enhancedCfg.ProviderOpts["api_type"]; !has { - enhancedCfg.ProviderOpts["api_type"] = providerCfg.APIType - } - } else if isOpenAICompatibleProvider(resolveEffectiveProvider(providerCfg)) { - if enhancedCfg.ProviderOpts == nil { - enhancedCfg.ProviderOpts = make(map[string]any) - } - if _, has := enhancedCfg.ProviderOpts["api_type"]; !has { - enhancedCfg.ProviderOpts["api_type"] = "openai_chatcompletions" - } - } - - applyModelDefaults(enhancedCfg) - return enhancedCfg - } - } - - if alias, exists := Aliases[cfg.Provider]; exists { - // Set default base URL if not already specified - if enhancedCfg.BaseURL == "" && alias.BaseURL != "" { - enhancedCfg.BaseURL = alias.BaseURL - } - - // Set default token key if not already specified - if enhancedCfg.TokenKey == "" && alias.TokenEnvVar != "" { - enhancedCfg.TokenKey = alias.TokenEnvVar - } - } - - // Apply model-specific defaults (e.g., thinking budget for Claude/GPT models) - applyModelDefaults(enhancedCfg) - return enhancedCfg -} - -// --------------------------------------------------------------------------- -// Thinking defaults and overrides -// --------------------------------------------------------------------------- - -// applyModelDefaults applies provider-specific default values for model configuration. -// -// Thinking defaults policy: -// - thinking_budget: 0 or thinking_budget: none → thinking is off (nil). -// - thinking_budget explicitly set to a real value → kept as-is; interleaved_thinking -// is auto-enabled for Anthropic/Bedrock-Claude. -// - thinking_budget NOT set: -// - Thinking-only models (OpenAI o-series) get "medium". -// - All other models get no thinking. -// -// NOTE: max_tokens is NOT set here; see teamloader and runtime/model_switcher. -func applyModelDefaults(cfg *latest.ModelConfig) { - // Set appropriate github copilot api_type. - applyGithubCopilotAPIType(cfg) - - // Explicitly disabled → normalise to nil so providers never see it. - if cfg.ThinkingBudget.IsDisabled() { - cfg.ThinkingBudget = nil - slog.Debug("Thinking explicitly disabled", - "provider", cfg.Provider, "model", cfg.Model) - return - } - - providerType := resolveProviderType(cfg) - - // User already set a real thinking_budget — just apply side-effects. - if cfg.ThinkingBudget != nil { - ensureInterleavedThinking(cfg, providerType) - return - } - - // No thinking_budget configured — only thinking-only models get a default. - switch providerType { - case "openai", "openai_chatcompletions", "openai_responses": - if isOpenAIThinkingOnlyModel(cfg.Model) { - cfg.ThinkingBudget = &latest.ThinkingBudget{Effort: "medium"} - slog.Debug("Applied default thinking for thinking-only OpenAI model", - "provider", cfg.Provider, "model", cfg.Model) - } - } -} - -// --------------------------------------------------------------------------- -// Shared helpers -// --------------------------------------------------------------------------- - -// cloneModelConfig returns a shallow copy of cfg with a deep copy of -// ProviderOpts so that callers can safely mutate the returned config's -// map and pointer fields without affecting the original. -func cloneModelConfig(cfg *latest.ModelConfig) *latest.ModelConfig { - c := *cfg - if cfg.ProviderOpts != nil { - c.ProviderOpts = make(map[string]any, len(cfg.ProviderOpts)) - maps.Copy(c.ProviderOpts, cfg.ProviderOpts) - } - return &c -} - -// ensureInterleavedThinking sets interleaved_thinking=true in ProviderOpts -// for Anthropic and Bedrock-Claude models, unless the user already set it. -func ensureInterleavedThinking(cfg *latest.ModelConfig, providerType string) { - needsInterleaved := providerType == "anthropic" || - (providerType == "amazon-bedrock" && isBedrockClaudeModel(cfg.Model)) - if !needsInterleaved { - return - } - if cfg.ProviderOpts == nil { - cfg.ProviderOpts = make(map[string]any) - } - if _, has := cfg.ProviderOpts["interleaved_thinking"]; !has { - cfg.ProviderOpts["interleaved_thinking"] = true - slog.Debug("Auto-enabled interleaved_thinking", - "provider", cfg.Provider, "model", cfg.Model) - } -} - -// applyGithubCopilotAPIType ensures api_type is set to openai_responses for appropriate models. -func applyGithubCopilotAPIType(cfg *latest.ModelConfig) { - if isGithubCopilotProvider(cfg.Provider) && isCopilotResponsesModel(cfg.Model) { - if cfg.ProviderOpts == nil { - cfg.ProviderOpts = make(map[string]any) - } - // If it's not set, or was set to openai_chatcompletions by the generic fallback, override it. - if apiType, ok := cfg.ProviderOpts["api_type"].(string); !ok || apiType == "" || apiType == "openai_chatcompletions" { - cfg.ProviderOpts["api_type"] = "openai_responses" - } - } -} - -// isOpenAIThinkingOnlyModel returns true for OpenAI models that require thinking -// to function properly (o-series reasoning models). -func isOpenAIThinkingOnlyModel(model string) bool { - m := strings.ToLower(model) - return strings.HasPrefix(m, "o1") || - strings.HasPrefix(m, "o3") || - strings.HasPrefix(m, "o4") -} - -// isBedrockClaudeModel returns true if the model ID is a Claude model on Bedrock. -// Claude model IDs on Bedrock start with "anthropic.claude-" or "global.anthropic.claude-". -func isBedrockClaudeModel(model string) bool { - m := strings.ToLower(model) - return strings.HasPrefix(m, "anthropic.claude-") || strings.HasPrefix(m, "global.anthropic.claude-") -} - -// gemini3Family extracts the model family (e.g. "pro", "flash") from a -// Gemini 3+ model name, or returns "" if the model is not Gemini 3+. -// It handles both "gemini-3-" and "gemini-3.X-" patterns. -// -// Examples: -// -// gemini3Family("gemini-3-pro") → "pro" -// gemini3Family("gemini-3.1-flash-preview") → "flash-preview" -// gemini3Family("gemini-2.5-flash") → "" -func gemini3Family(model string) string { - if !strings.HasPrefix(model, "gemini-3") { - return "" - } - rest := model[len("gemini-3"):] - if rest == "" { - return "" - } - // Accept "gemini-3-..." or "gemini-3.X-..." (e.g. gemini-3.1-pro-preview) - switch rest[0] { - case '-': - return rest[1:] // "gemini-3-pro" → "pro" - case '.': - if _, family, ok := strings.Cut(rest, "-"); ok { - return family // "gemini-3.1-pro-preview" → "pro-preview" - } - } - return "" -} - -func isGeminiProModel(model string) bool { - return strings.HasPrefix(gemini3Family(model), "pro") -} - -func isGeminiFlashModel(model string) bool { - return strings.HasPrefix(gemini3Family(model), "flash") -} - -// resolveEffectiveProvider returns the effective provider type for a ProviderConfig. -// If Provider is explicitly set, returns that. Otherwise returns "openai" (backward compat). -func resolveEffectiveProvider(cfg latest.ProviderConfig) string { - if cfg.Provider != "" { - return cfg.Provider - } - return "openai" -} - -func isGithubCopilotProvider(providerType string) bool { - return providerType == "github-copilot" -} - -func isCopilotResponsesModel(model string) bool { - codex := map[string]bool{ - "gpt-5.3-codex": true, - "gpt-5.2-codex": true, - } - return codex[model] -} - -// isOpenAICompatibleProvider returns true if the provider type uses the OpenAI API protocol. -func isOpenAICompatibleProvider(providerType string) bool { - switch providerType { - case "openai", "openai_chatcompletions", "openai_responses": - return true - default: - // Check if it's an alias that maps to openai - if alias, exists := Aliases[providerType]; exists { - return alias.APIType == "openai" - } - return false - } -} diff --git a/pkg/model/provider/provider_defaults_test.go b/pkg/model/provider/provider_defaults_test.go index fd264e0b6..9b51cb42f 100644 --- a/pkg/model/provider/provider_defaults_test.go +++ b/pkg/model/provider/provider_defaults_test.go @@ -383,3 +383,40 @@ func TestApplyProviderDefaults_AliasFallback(t *testing.T) { assert.Empty(t, cfg.BaseURL) assert.Empty(t, cfg.TokenKey) } + +func TestIsGithubCopilotProvider(t *testing.T) { + t.Parallel() + + assert.True(t, isGithubCopilotProvider("github-copilot")) + assert.False(t, isGithubCopilotProvider("openai")) + assert.False(t, isGithubCopilotProvider("")) +} + +func TestGithubCopilotApiType(t *testing.T) { + cfg := &latest.ModelConfig{ + Provider: "github-copilot", + Model: "gpt-5.3-codex", + } + + enhancedCfg := applyProviderDefaults(cfg, nil) + + apiType := resolveProviderType(enhancedCfg) + + if apiType != "openai_responses" { + t.Errorf("Expected api_type to be 'openai_responses', got '%s'", apiType) + } + + // test when it is a custom provider + customProviders := map[string]latest.ProviderConfig{ + "github-copilot": { + Provider: "github-copilot", + }, + } + + enhancedCfg2 := applyProviderDefaults(cfg, customProviders) + apiType2 := resolveProviderType(enhancedCfg2) + + if apiType2 != "openai_responses" { + t.Errorf("Expected api_type to be 'openai_responses', got '%s'", apiType2) + } +} diff --git a/pkg/model/provider/provider_test.go b/pkg/model/provider/provider_test.go index 18564c86a..1e39d50f9 100644 --- a/pkg/model/provider/provider_test.go +++ b/pkg/model/provider/provider_test.go @@ -4,8 +4,6 @@ import ( "testing" "github.com/stretchr/testify/assert" - - "github.com/docker/docker-agent/pkg/config/latest" ) func TestCatalogProviders(t *testing.T) { @@ -90,50 +88,3 @@ func TestIsKnownProvider(t *testing.T) { assert.False(t, IsKnownProvider("unknown")) assert.False(t, IsKnownProvider("")) } - -func TestIsGithubCopilotProvider(t *testing.T) { - t.Parallel() - - assert.True(t, isGithubCopilotProvider("github-copilot")) - assert.False(t, isGithubCopilotProvider("openai")) - assert.False(t, isGithubCopilotProvider("")) -} - -func TestIsCopilotResponsesModel(t *testing.T) { - t.Parallel() - - assert.True(t, isCopilotResponsesModel("gpt-5.3-codex")) - assert.True(t, isCopilotResponsesModel("gpt-5.2-codex")) - assert.False(t, isCopilotResponsesModel("gpt-4o")) - assert.False(t, isCopilotResponsesModel("claude-sonnet-4-5")) - assert.False(t, isCopilotResponsesModel("")) -} - -func TestGithubCopilotApiType(t *testing.T) { - cfg := &latest.ModelConfig{ - Provider: "github-copilot", - Model: "gpt-5.3-codex", - } - - enhancedCfg := applyProviderDefaults(cfg, nil) - - apiType := resolveProviderType(enhancedCfg) - - if apiType != "openai_responses" { - t.Errorf("Expected api_type to be 'openai_responses', got '%s'", apiType) - } - - // test when it is a custom provider - customProviders := map[string]latest.ProviderConfig{ - "github-copilot": { - Provider: "github-copilot", - }, - } - - enhancedCfg2 := applyProviderDefaults(cfg, customProviders) - apiType2 := resolveProviderType(enhancedCfg2) - - if apiType2 != "openai_responses" { - t.Errorf("Expected api_type to be 'openai_responses', got '%s'", apiType2) - } -}