diff --git a/internal/translator/codex/openai/chat-completions/codex_openai_response.go b/internal/translator/codex/openai/chat-completions/codex_openai_response.go index f0e264c8..0054d995 100644 --- a/internal/translator/codex/openai/chat-completions/codex_openai_response.go +++ b/internal/translator/codex/openai/chat-completions/codex_openai_response.go @@ -74,8 +74,13 @@ func ConvertCodexResponseToOpenAI(_ context.Context, modelName string, originalR } // Extract and set the model version. + cachedModel := (*param).(*ConvertCliToOpenAIParams).Model if modelResult := gjson.GetBytes(rawJSON, "model"); modelResult.Exists() { template, _ = sjson.Set(template, "model", modelResult.String()) + } else if cachedModel != "" { + template, _ = sjson.Set(template, "model", cachedModel) + } else if modelName != "" { + template, _ = sjson.Set(template, "model", modelName) } template, _ = sjson.Set(template, "created", (*param).(*ConvertCliToOpenAIParams).CreatedAt) diff --git a/internal/translator/codex/openai/chat-completions/codex_openai_response_test.go b/internal/translator/codex/openai/chat-completions/codex_openai_response_test.go new file mode 100644 index 00000000..70aaea06 --- /dev/null +++ b/internal/translator/codex/openai/chat-completions/codex_openai_response_test.go @@ -0,0 +1,47 @@ +package chat_completions + +import ( + "context" + "testing" + + "github.com/tidwall/gjson" +) + +func TestConvertCodexResponseToOpenAI_StreamSetsModelFromResponseCreated(t *testing.T) { + ctx := context.Background() + var param any + + modelName := "gpt-5.3-codex" + + out := ConvertCodexResponseToOpenAI(ctx, modelName, nil, nil, []byte(`data: {"type":"response.created","response":{"id":"resp_123","created_at":1700000000,"model":"gpt-5.3-codex"}}`), ¶m) + if len(out) != 0 { + t.Fatalf("expected no output for response.created, got %d chunks", len(out)) + } + + out = ConvertCodexResponseToOpenAI(ctx, modelName, nil, nil, []byte(`data: {"type":"response.output_text.delta","delta":"hello"}`), ¶m) + if len(out) != 1 { + t.Fatalf("expected 1 chunk, got %d", len(out)) + } + + gotModel := gjson.Get(out[0], "model").String() + if gotModel != modelName { + t.Fatalf("expected model %q, got %q", modelName, gotModel) + } +} + +func TestConvertCodexResponseToOpenAI_FirstChunkUsesRequestModelName(t *testing.T) { + ctx := context.Background() + var param any + + modelName := "gpt-5.3-codex" + + out := ConvertCodexResponseToOpenAI(ctx, modelName, nil, nil, []byte(`data: {"type":"response.output_text.delta","delta":"hello"}`), ¶m) + if len(out) != 1 { + t.Fatalf("expected 1 chunk, got %d", len(out)) + } + + gotModel := gjson.Get(out[0], "model").String() + if gotModel != modelName { + t.Fatalf("expected model %q, got %q", modelName, gotModel) + } +}