diff --git a/README.md b/README.md index 7d92bac3..f4d8ed82 100644 --- a/README.md +++ b/README.md @@ -229,6 +229,7 @@ console.log(await claudeResponse.json()); - gemini-2.5-flash - gemini-2.5-flash-lite - gpt-5 +- gpt-5-codex - claude-opus-4-1-20250805 - claude-opus-4-20250514 - claude-sonnet-4-20250514 @@ -262,6 +263,7 @@ The server uses a YAML configuration file (`config.yaml`) located in the project | `debug` | boolean | false | Enable debug mode for verbose logging. | | `api-keys` | string[] | [] | List of API keys that can be used to authenticate requests. | | `generative-language-api-key` | string[] | [] | List of Generative Language API keys. | +| `force-gpt-5-codex` | bool | false | Force the conversion of GPT-5 calls to GPT-5 Codex. | | `codex-api-key` | object | {} | List of Codex API keys. | | `codex-api-key.api-key` | string | "" | Codex API key. | | `codex-api-key.base-url` | string | "" | Custom Codex API endpoint, if you use a third-party API endpoint. | @@ -322,6 +324,9 @@ generative-language-api-key: - "AIzaSy...03" - "AIzaSy...04" +# Force the conversion of GPT-5 calls to GPT-5 Codex. +force-gpt-5-codex: true + # Codex API keys codex-api-key: - api-key: "sk-atSM..." @@ -423,7 +428,7 @@ export ANTHROPIC_MODEL=gemini-2.5-pro export ANTHROPIC_SMALL_FAST_MODEL=gemini-2.5-flash ``` -Using OpenAI models: +Using OpenAI GPT 5 models: ```bash export ANTHROPIC_BASE_URL=http://127.0.0.1:8317 export ANTHROPIC_AUTH_TOKEN=sk-dummy @@ -431,6 +436,14 @@ export ANTHROPIC_MODEL=gpt-5 export ANTHROPIC_SMALL_FAST_MODEL=gpt-5-minimal ``` +Using OpenAI GPT 5 Codex models: +```bash +export ANTHROPIC_BASE_URL=http://127.0.0.1:8317 +export ANTHROPIC_AUTH_TOKEN=sk-dummy +export ANTHROPIC_MODEL=gpt-5-codex +export ANTHROPIC_SMALL_FAST_MODEL=gpt-5-codex-low +``` + Using Claude models: ```bash export ANTHROPIC_BASE_URL=http://127.0.0.1:8317 @@ -454,7 +467,7 @@ Start CLI Proxy API server, and then edit the `~/.codex/config.toml` and `~/.cod config.toml: ```toml model_provider = "cliproxyapi" -model = "gpt-5" # You can use any of the models that we support. +model = "gpt-5-codex" # Or gpt-5, you can also use any of the models that we support. model_reasoning_effort = "high" [model_providers.cliproxyapi] diff --git a/README_CN.md b/README_CN.md index 1b89a51e..bd0395a9 100644 --- a/README_CN.md +++ b/README_CN.md @@ -241,6 +241,7 @@ console.log(await claudeResponse.json()); - gemini-2.5-flash - gemini-2.5-flash-lite - gpt-5 +- gpt-5-codex - claude-opus-4-1-20250805 - claude-opus-4-20250514 - claude-sonnet-4-20250514 @@ -274,6 +275,7 @@ console.log(await claudeResponse.json()); | `debug` | boolean | false | 启用调试模式以获取详细日志。 | | `api-keys` | string[] | [] | 可用于验证请求的API密钥列表。 | | `generative-language-api-key` | string[] | [] | 生成式语言API密钥列表。 | +| `force-gpt-5-codex` | bool | false | 强制将 GPT-5 调用转换成 GPT-5 Codex. | | `codex-api-key` | object | {} | Codex API密钥列表。 | | `codex-api-key.api-key` | string | "" | Codex API密钥。 | | `codex-api-key.base-url` | string | "" | 自定义的Codex API端点 | @@ -334,6 +336,9 @@ generative-language-api-key: - "AIzaSy...03" - "AIzaSy...04" +# 强制将 GPT-5 调用转换成 GPT-5 Codex. +force-gpt-5-codex: true + # Codex API 密钥 codex-api-key: - api-key: "sk-atSM..." @@ -430,7 +435,7 @@ export ANTHROPIC_MODEL=gemini-2.5-pro export ANTHROPIC_SMALL_FAST_MODEL=gemini-2.5-flash ``` -使用 OpenAI 模型: +使用 OpenAI GPT 5 模型: ```bash export ANTHROPIC_BASE_URL=http://127.0.0.1:8317 export ANTHROPIC_AUTH_TOKEN=sk-dummy @@ -438,6 +443,15 @@ export ANTHROPIC_MODEL=gpt-5 export ANTHROPIC_SMALL_FAST_MODEL=gpt-5-minimal ``` +使用 OpenAI GPT 5 Codex 模型: +```bash +export ANTHROPIC_BASE_URL=http://127.0.0.1:8317 +export ANTHROPIC_AUTH_TOKEN=sk-dummy +export ANTHROPIC_MODEL=gpt-5-codex +export ANTHROPIC_SMALL_FAST_MODEL=gpt-5-codex-low +``` + + 使用 Claude 模型: ```bash export ANTHROPIC_BASE_URL=http://127.0.0.1:8317 @@ -461,7 +475,7 @@ export ANTHROPIC_SMALL_FAST_MODEL=qwen3-coder-flash config.toml: ```toml model_provider = "cliproxyapi" -model = "gpt-5" # 你可以使用任何我们支持的模型 +model = "gpt-5-codex" # 或者是gpt-5,你也可以使用任何我们支持的模型 model_reasoning_effort = "high" [model_providers.cliproxyapi] diff --git a/config.example.yaml b/config.example.yaml index 4281e24e..855b9ad6 100644 --- a/config.example.yaml +++ b/config.example.yaml @@ -41,6 +41,9 @@ generative-language-api-key: - "AIzaSy...03" - "AIzaSy...04" +# forces the use of GPT-5 Codex model. +gpt-5-codex: true + # Codex API keys codex-api-key: - api-key: "sk-atSM..." diff --git a/internal/api/handlers/management/config_basic.go b/internal/api/handlers/management/config_basic.go index 57af692c..346e24ff 100644 --- a/internal/api/handlers/management/config_basic.go +++ b/internal/api/handlers/management/config_basic.go @@ -12,6 +12,14 @@ func (h *Handler) GetConfig(c *gin.Context) { func (h *Handler) GetDebug(c *gin.Context) { c.JSON(200, gin.H{"debug": h.cfg.Debug}) } func (h *Handler) PutDebug(c *gin.Context) { h.updateBoolField(c, func(v bool) { h.cfg.Debug = v }) } +// ForceGPT5Codex +func (h *Handler) GetForceGPT5Codex(c *gin.Context) { + c.JSON(200, gin.H{"gpt-5-codex": h.cfg.ForceGPT5Codex}) +} +func (h *Handler) PutForceGPT5Codex(c *gin.Context) { + h.updateBoolField(c, func(v bool) { h.cfg.ForceGPT5Codex = v }) +} + // Request log func (h *Handler) GetRequestLog(c *gin.Context) { c.JSON(200, gin.H{"request-log": h.cfg.RequestLog}) } func (h *Handler) PutRequestLog(c *gin.Context) { diff --git a/internal/api/server.go b/internal/api/server.go index 5842801b..e8e56fe2 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -200,6 +200,10 @@ func (s *Server) setupRoutes() { mgmt.PUT("/debug", s.mgmt.PutDebug) mgmt.PATCH("/debug", s.mgmt.PutDebug) + mgmt.GET("/force-gpt-5-codex", s.mgmt.GetForceGPT5Codex) + mgmt.PUT("/force-gpt-5-codex", s.mgmt.PutForceGPT5Codex) + mgmt.PATCH("/force-gpt-5-codex", s.mgmt.PutForceGPT5Codex) + mgmt.GET("/proxy-url", s.mgmt.GetProxyURL) mgmt.PUT("/proxy-url", s.mgmt.PutProxyURL) mgmt.PATCH("/proxy-url", s.mgmt.PutProxyURL) diff --git a/internal/client/codex_client.go b/internal/client/codex_client.go index 37fb99de..87bb8105 100644 --- a/internal/client/codex_client.go +++ b/internal/client/codex_client.go @@ -136,6 +136,10 @@ func (c *CodexClient) CanProvideModel(modelName string) bool { "gpt-5-low", "gpt-5-medium", "gpt-5-high", + "gpt-5-codex", + "gpt-5-codex-low", + "gpt-5-codex-medium", + "gpt-5-codex-high", "codex-mini-latest", } return util.InArray(models, modelName) @@ -415,6 +419,25 @@ func (c *CodexClient) APIRequest(ctx context.Context, modelName, endpoint string case "gpt-5-high": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "high") } + } else if util.InArray([]string{"gpt-5-codex", "gpt-5-codex-low", "gpt-5-codex-medium", "gpt-5-codex-high"}, modelName) { + jsonBody, _ = sjson.SetBytes(jsonBody, "model", "gpt-5-codex") + switch modelName { + case "gpt-5-codex": + jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "medium") + case "gpt-5-codex-low": + jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "low") + case "gpt-5-codex-medium": + jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "medium") + case "gpt-5-codex-high": + jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "high") + } + } else if c.cfg.ForceGPT5Codex { + if gjson.GetBytes(jsonBody, "model").String() == "gpt-5" { + if gjson.GetBytes(jsonBody, "reasoning.effort").String() == "minimal" { + jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "low") + } + jsonBody, _ = sjson.SetBytes(jsonBody, "model", "gpt-5-codex") + } } url := fmt.Sprintf("%s%s", chatGPTEndpoint, endpoint) diff --git a/internal/config/config.go b/internal/config/config.go index a3a87a56..ce4fb257 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -44,6 +44,9 @@ type Config struct { // ClaudeKey defines a list of Claude API key configurations as specified in the YAML configuration file. ClaudeKey []ClaudeKey `yaml:"claude-api-key" json:"claude-api-key"` + // ForceGPT5Codex forces the use of GPT-5 Codex model. + ForceGPT5Codex bool `yaml:"force-gpt-5-codex" json:"force-gpt-5-codex"` + // Codex defines a list of Codex API key configurations as specified in the YAML configuration file. CodexKey []CodexKey `yaml:"codex-api-key" json:"codex-api-key"` diff --git a/internal/registry/model_definitions.go b/internal/registry/model_definitions.go index 83456f30..aab7e973 100644 --- a/internal/registry/model_definitions.go +++ b/internal/registry/model_definitions.go @@ -215,6 +215,58 @@ func GetOpenAIModels() []*ModelInfo { MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, }, + { + ID: "gpt-5-codex", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-09-15", + DisplayName: "GPT 5 Codex", + Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-codex-low", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-09-15", + DisplayName: "GPT 5 Codex Low", + Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-codex-medium", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-09-15", + DisplayName: "GPT 5 Codex Medium", + Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-codex-high", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-09-15", + DisplayName: "GPT 5 Codex High", + Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, { ID: "codex-mini-latest", Object: "model", diff --git a/internal/watcher/watcher.go b/internal/watcher/watcher.go index 0214b086..1057763a 100644 --- a/internal/watcher/watcher.go +++ b/internal/watcher/watcher.go @@ -240,6 +240,9 @@ func (w *Watcher) reloadConfig() bool { if oldConfig.RemoteManagement.AllowRemote != newConfig.RemoteManagement.AllowRemote { log.Debugf(" remote-management.allow-remote: %t -> %t", oldConfig.RemoteManagement.AllowRemote, newConfig.RemoteManagement.AllowRemote) } + if oldConfig.ForceGPT5Codex != newConfig.ForceGPT5Codex { + log.Debugf(" gpt-5-codex: %t -> %t", oldConfig.ForceGPT5Codex, newConfig.ForceGPT5Codex) + } } log.Infof("config successfully reloaded, triggering client reload")