fix(openai): route copilot Gemini preview models to chat endpoint

This commit is contained in:
Ben Vargas
2026-02-19 23:25:34 -07:00
parent f8d1bc06ea
commit 08e078fc25
4 changed files with 70 additions and 1 deletions

View File

@@ -571,6 +571,7 @@ func GetGitHubCopilotModels() []*ModelInfo {
Description: "Google Gemini 2.5 Pro via GitHub Copilot",
ContextLength: 1048576,
MaxCompletionTokens: 65536,
SupportedEndpoints: []string{"/chat/completions"},
},
{
ID: "gemini-3-pro-preview",
@@ -582,6 +583,7 @@ func GetGitHubCopilotModels() []*ModelInfo {
Description: "Google Gemini 3 Pro Preview via GitHub Copilot",
ContextLength: 1048576,
MaxCompletionTokens: 65536,
SupportedEndpoints: []string{"/chat/completions"},
},
{
ID: "gemini-3.1-pro-preview",
@@ -604,6 +606,7 @@ func GetGitHubCopilotModels() []*ModelInfo {
Description: "Google Gemini 3 Flash Preview via GitHub Copilot",
ContextLength: 1048576,
MaxCompletionTokens: 65536,
SupportedEndpoints: []string{"/chat/completions"},
},
{
ID: "grok-code-fast-1",

View File

@@ -0,0 +1,28 @@
package registry
import "testing"
func TestGitHubCopilotGeminiModelsAreChatOnly(t *testing.T) {
models := GetGitHubCopilotModels()
required := map[string]bool{
"gemini-2.5-pro": false,
"gemini-3-pro-preview": false,
"gemini-3-flash-preview": false,
}
for _, model := range models {
if _, ok := required[model.ID]; !ok {
continue
}
required[model.ID] = true
if len(model.SupportedEndpoints) != 1 || model.SupportedEndpoints[0] != "/chat/completions" {
t.Fatalf("model %q supported endpoints = %v, want [/chat/completions]", model.ID, model.SupportedEndpoints)
}
}
for modelID, found := range required {
if !found {
t.Fatalf("expected GitHub Copilot model %q in definitions", modelID)
}
}
}

View File

@@ -1,6 +1,9 @@
package openai
import "github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
import (
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
)
const (
openAIChatEndpoint = "/chat/completions"
@@ -12,6 +15,12 @@ func resolveEndpointOverride(modelName, requestedEndpoint string) (string, bool)
return "", false
}
info := registry.GetGlobalRegistry().GetModelInfo(modelName, "")
if info == nil {
baseModel := thinking.ParseSuffix(modelName).ModelName
if baseModel != "" && baseModel != modelName {
info = registry.GetGlobalRegistry().GetModelInfo(baseModel, "")
}
}
if info == nil || len(info.SupportedEndpoints) == 0 {
return "", false
}

View File

@@ -0,0 +1,29 @@
package openai
import (
"testing"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
)
func TestResolveEndpointOverride_StripsThinkingSuffix(t *testing.T) {
const clientID = "test-endpoint-compat-suffix"
reg := registry.GetGlobalRegistry()
reg.RegisterClient(clientID, "github-copilot", []*registry.ModelInfo{
{
ID: "test-gemini-chat-only",
SupportedEndpoints: []string{openAIChatEndpoint},
},
})
t.Cleanup(func() {
reg.UnregisterClient(clientID)
})
override, ok := resolveEndpointOverride("test-gemini-chat-only(high)", openAIResponsesEndpoint)
if !ok {
t.Fatalf("expected endpoint override to be resolved")
}
if override != openAIChatEndpoint {
t.Fatalf("override endpoint = %q, want %q", override, openAIChatEndpoint)
}
}