From 1ef4f09df516c7a28ed1070185ee16ebac4f737f Mon Sep 17 00:00:00 2001 From: yangjianbo Date: Wed, 31 Dec 2025 16:17:45 +0800 Subject: [PATCH] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E6=B7=BB=E5=8A=A0?= =?UTF-8?q?=20model=20=E5=8F=82=E6=95=B0=E5=BF=85=E5=A1=AB=E9=AA=8C?= =?UTF-8?q?=E8=AF=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 在以下端点添加 model 参数的必填验证,缺失时直接返回 400 错误: - /v1/messages - /v1/messages/count_tokens - /openai/v1/responses 修复前:空 model 会进入账号选择流程,最终由上游 API 返回错误 修复后:入口处直接拒绝,避免浪费资源和不明确的错误信息 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- backend/internal/handler/gateway_handler.go | 12 ++++++++++++ backend/internal/handler/openai_gateway_handler.go | 6 ++++++ 2 files changed, 18 insertions(+) diff --git a/backend/internal/handler/gateway_handler.go b/backend/internal/handler/gateway_handler.go index fc92b2d8..a2f833ff 100644 --- a/backend/internal/handler/gateway_handler.go +++ b/backend/internal/handler/gateway_handler.go @@ -88,6 +88,12 @@ func (h *GatewayHandler) Messages(c *gin.Context) { reqModel := parsedReq.Model reqStream := parsedReq.Stream + // 验证 model 必填 + if reqModel == "" { + h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "model is required") + return + } + // Track if we've started streaming (for error handling) streamStarted := false @@ -517,6 +523,12 @@ func (h *GatewayHandler) CountTokens(c *gin.Context) { return } + // 验证 model 必填 + if parsedReq.Model == "" { + h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "model is required") + return + } + // 获取订阅信息(可能为nil) subscription, _ := middleware2.GetSubscriptionFromContext(c) diff --git a/backend/internal/handler/openai_gateway_handler.go b/backend/internal/handler/openai_gateway_handler.go index 7fcb329d..7c9934c6 100644 --- a/backend/internal/handler/openai_gateway_handler.go +++ b/backend/internal/handler/openai_gateway_handler.go @@ -80,6 +80,12 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) { reqModel, _ := reqBody["model"].(string) reqStream, _ := reqBody["stream"].(bool) + // 验证 model 必填 + if reqModel == "" { + h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "model is required") + return + } + // For non-Codex CLI requests, set default instructions userAgent := c.GetHeader("User-Agent") if !openai.IsCodexCLIRequest(userAgent) {