From 35907416b8c642b8bd33ab24fcebf0c2ddc93b1b Mon Sep 17 00:00:00 2001 From: hechieh Date: Sun, 22 Mar 2026 19:05:44 +0800 Subject: [PATCH] Fix GitHub Copilot gpt-5.4 endpoint routing Amp-Thread-ID: https://ampcode.com/threads/T-019d14cd-bc90-70ce-b1ae-87bc97332650 Co-authored-by: Amp --- internal/registry/model_definitions.go | 13 +++++++++++ .../executor/github_copilot_executor.go | 23 +++++++++++++++++++ .../executor/github_copilot_executor_test.go | 7 ++++++ 3 files changed, 43 insertions(+) diff --git a/internal/registry/model_definitions.go b/internal/registry/model_definitions.go index 8896a9df..06921a02 100644 --- a/internal/registry/model_definitions.go +++ b/internal/registry/model_definitions.go @@ -365,6 +365,19 @@ func GetGitHubCopilotModels() []*ModelInfo { SupportedEndpoints: []string{"/responses"}, Thinking: &ThinkingSupport{Levels: []string{"none", "low", "medium", "high", "xhigh"}}, }, + { + ID: "gpt-5.4", + Object: "model", + Created: now, + OwnedBy: "github-copilot", + Type: "github-copilot", + DisplayName: "GPT-5.4", + Description: "OpenAI GPT-5.4 via GitHub Copilot", + ContextLength: 200000, + MaxCompletionTokens: 32768, + SupportedEndpoints: []string{"/responses"}, + Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high", "xhigh"}}, + }, { ID: "claude-haiku-4.5", Object: "model", diff --git a/internal/runtime/executor/github_copilot_executor.go b/internal/runtime/executor/github_copilot_executor.go index b86146a3..abdb3006 100644 --- a/internal/runtime/executor/github_copilot_executor.go +++ b/internal/runtime/executor/github_copilot_executor.go @@ -577,9 +577,32 @@ func useGitHubCopilotResponsesEndpoint(sourceFormat sdktranslator.Format, model return true } baseModel := strings.ToLower(thinking.ParseSuffix(model).ModelName) + if info := registry.GetGlobalRegistry().GetModelInfo(baseModel, ""); info != nil { + if len(info.SupportedEndpoints) > 0 && !containsEndpoint(info.SupportedEndpoints, githubCopilotChatPath) && containsEndpoint(info.SupportedEndpoints, githubCopilotResponsesPath) { + return true + } + } + for _, info := range registry.GetGitHubCopilotModels() { + if info == nil || !strings.EqualFold(info.ID, baseModel) { + continue + } + if len(info.SupportedEndpoints) > 0 && !containsEndpoint(info.SupportedEndpoints, githubCopilotChatPath) && containsEndpoint(info.SupportedEndpoints, githubCopilotResponsesPath) { + return true + } + break + } return strings.Contains(baseModel, "codex") } +func containsEndpoint(endpoints []string, endpoint string) bool { + for _, item := range endpoints { + if item == endpoint { + return true + } + } + return false +} + // flattenAssistantContent converts assistant message content from array format // to a joined string. GitHub Copilot requires assistant content as a string; // sending it as an array causes Claude models to re-answer all previous prompts. diff --git a/internal/runtime/executor/github_copilot_executor_test.go b/internal/runtime/executor/github_copilot_executor_test.go index 66d5ce92..d3ce194a 100644 --- a/internal/runtime/executor/github_copilot_executor_test.go +++ b/internal/runtime/executor/github_copilot_executor_test.go @@ -70,6 +70,13 @@ func TestUseGitHubCopilotResponsesEndpoint_CodexModel(t *testing.T) { } } +func TestUseGitHubCopilotResponsesEndpoint_RegistryResponsesOnlyModel(t *testing.T) { + t.Parallel() + if !useGitHubCopilotResponsesEndpoint(sdktranslator.FromString("openai"), "gpt-5.4") { + t.Fatal("expected responses-only registry model to use /responses") + } +} + func TestUseGitHubCopilotResponsesEndpoint_DefaultChat(t *testing.T) { t.Parallel() if useGitHubCopilotResponsesEndpoint(sdktranslator.FromString("openai"), "claude-3-5-sonnet") {