fix(test): 优化账户测试逻辑和默认模型配置

- 更新默认模型列表顺序,gemini-2.0-flash 作为首选
- OpenAI API Key 账户优先使用 Chat Completions API,兼容第三方代理
- 重构 OAuth 和 API Key 测试逻辑为独立方法
- 修复 Gemini 流处理中 finishReason 检查顺序
This commit is contained in:
ianshaw
2026-01-03 17:31:05 -08:00
parent d505c5b2f2
commit cc86f94474
2 changed files with 200 additions and 78 deletions

View File

@@ -11,11 +11,12 @@ type Model struct {
// DefaultModels is the curated Gemini model list used by the admin UI "test account" flow. // DefaultModels is the curated Gemini model list used by the admin UI "test account" flow.
var DefaultModels = []Model{ var DefaultModels = []Model{
{ID: "gemini-3-pro-preview", Type: "model", DisplayName: "Gemini 3 Pro Preview", CreatedAt: ""}, {ID: "gemini-2.0-flash", Type: "model", DisplayName: "Gemini 2.0 Flash", CreatedAt: ""},
{ID: "gemini-3-flash-preview", Type: "model", DisplayName: "Gemini 3 Flash Preview", CreatedAt: ""},
{ID: "gemini-2.5-pro", Type: "model", DisplayName: "Gemini 2.5 Pro", CreatedAt: ""}, {ID: "gemini-2.5-pro", Type: "model", DisplayName: "Gemini 2.5 Pro", CreatedAt: ""},
{ID: "gemini-2.5-flash", Type: "model", DisplayName: "Gemini 2.5 Flash", CreatedAt: ""}, {ID: "gemini-2.5-flash", Type: "model", DisplayName: "Gemini 2.5 Flash", CreatedAt: ""},
{ID: "gemini-3-pro-preview", Type: "model", DisplayName: "Gemini 3 Pro Preview", CreatedAt: ""},
{ID: "gemini-3-flash-preview", Type: "model", DisplayName: "Gemini 3 Flash Preview", CreatedAt: ""},
} }
// DefaultTestModel is the default model to preselect in test flows. // DefaultTestModel is the default model to preselect in test flows.
const DefaultTestModel = "gemini-3-pro-preview" const DefaultTestModel = "gemini-2.0-flash"

View File

@@ -296,16 +296,34 @@ func (s *AccountTestService) testOpenAIAccountConnection(c *gin.Context, account
} }
} }
// Determine authentication method and API URL // Set SSE headers early
var authToken string c.Writer.Header().Set("Content-Type", "text/event-stream")
var apiURL string c.Writer.Header().Set("Cache-Control", "no-cache")
var isOAuth bool c.Writer.Header().Set("Connection", "keep-alive")
var chatgptAccountID string c.Writer.Header().Set("X-Accel-Buffering", "no")
c.Writer.Flush()
// Send test_start event
s.sendEvent(c, TestEvent{Type: "test_start", Model: testModelID})
// Get proxy URL
proxyURL := ""
if account.ProxyID != nil && account.Proxy != nil {
proxyURL = account.Proxy.URL()
}
if account.IsOAuth() { if account.IsOAuth() {
isOAuth = true // OAuth - use ChatGPT internal API (Responses API)
// OAuth - use Bearer token with ChatGPT internal API return s.testOpenAIOAuthAccount(c, ctx, account, testModelID, proxyURL)
authToken = account.GetOpenAIAccessToken() }
// API Key - try Chat Completions API first, fallback to Responses API
return s.testOpenAIApiKeyAccount(c, ctx, account, testModelID, proxyURL)
}
// testOpenAIOAuthAccount tests OAuth account using ChatGPT internal API
func (s *AccountTestService) testOpenAIOAuthAccount(c *gin.Context, ctx context.Context, account *Account, testModelID, proxyURL string) error {
authToken := account.GetOpenAIAccessToken()
if authToken == "" { if authToken == "" {
return s.sendErrorAndEnd(c, "No access token available") return s.sendErrorAndEnd(c, "No access token available")
} }
@@ -319,62 +337,22 @@ func (s *AccountTestService) testOpenAIAccountConnection(c *gin.Context, account
authToken = tokenInfo.AccessToken authToken = tokenInfo.AccessToken
} }
// OAuth uses ChatGPT internal API // Create Responses API payload
apiURL = chatgptCodexAPIURL payload := createOpenAITestPayload(testModelID, true)
chatgptAccountID = account.GetChatGPTAccountID()
} else if account.Type == "apikey" {
// API Key - use Platform API
authToken = account.GetOpenAIApiKey()
if authToken == "" {
return s.sendErrorAndEnd(c, "No API key available")
}
baseURL := account.GetOpenAIBaseURL()
if baseURL == "" {
baseURL = "https://api.openai.com"
}
apiURL = strings.TrimSuffix(baseURL, "/") + "/responses"
} else {
return s.sendErrorAndEnd(c, fmt.Sprintf("Unsupported account type: %s", account.Type))
}
// Set SSE headers
c.Writer.Header().Set("Content-Type", "text/event-stream")
c.Writer.Header().Set("Cache-Control", "no-cache")
c.Writer.Header().Set("Connection", "keep-alive")
c.Writer.Header().Set("X-Accel-Buffering", "no")
c.Writer.Flush()
// Create OpenAI Responses API payload
payload := createOpenAITestPayload(testModelID, isOAuth)
payloadBytes, _ := json.Marshal(payload) payloadBytes, _ := json.Marshal(payload)
// Send test_start event req, err := http.NewRequestWithContext(ctx, "POST", chatgptCodexAPIURL, bytes.NewReader(payloadBytes))
s.sendEvent(c, TestEvent{Type: "test_start", Model: testModelID})
req, err := http.NewRequestWithContext(ctx, "POST", apiURL, bytes.NewReader(payloadBytes))
if err != nil { if err != nil {
return s.sendErrorAndEnd(c, "Failed to create request") return s.sendErrorAndEnd(c, "Failed to create request")
} }
// Set common headers
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+authToken) req.Header.Set("Authorization", "Bearer "+authToken)
// Set OAuth-specific headers for ChatGPT internal API
if isOAuth {
req.Host = "chatgpt.com" req.Host = "chatgpt.com"
req.Header.Set("accept", "text/event-stream") req.Header.Set("accept", "text/event-stream")
if chatgptAccountID != "" { if chatgptAccountID := account.GetChatGPTAccountID(); chatgptAccountID != "" {
req.Header.Set("chatgpt-account-id", chatgptAccountID) req.Header.Set("chatgpt-account-id", chatgptAccountID)
} }
}
// Get proxy URL
proxyURL := ""
if account.ProxyID != nil && account.Proxy != nil {
proxyURL = account.Proxy.URL()
}
resp, err := s.httpUpstream.Do(req, proxyURL, account.ID, account.Concurrency) resp, err := s.httpUpstream.Do(req, proxyURL, account.ID, account.Concurrency)
if err != nil { if err != nil {
@@ -387,10 +365,153 @@ func (s *AccountTestService) testOpenAIAccountConnection(c *gin.Context, account
return s.sendErrorAndEnd(c, fmt.Sprintf("API returned %d: %s", resp.StatusCode, string(body))) return s.sendErrorAndEnd(c, fmt.Sprintf("API returned %d: %s", resp.StatusCode, string(body)))
} }
// Process SSE stream
return s.processOpenAIStream(c, resp.Body) return s.processOpenAIStream(c, resp.Body)
} }
// testOpenAIApiKeyAccount tests API Key account, trying Chat Completions first, then Responses API
func (s *AccountTestService) testOpenAIApiKeyAccount(c *gin.Context, ctx context.Context, account *Account, testModelID, proxyURL string) error {
authToken := account.GetOpenAIApiKey()
if authToken == "" {
return s.sendErrorAndEnd(c, "No API key available")
}
baseURL := account.GetOpenAIBaseURL()
if baseURL == "" {
baseURL = "https://api.openai.com"
}
baseURL = strings.TrimSuffix(baseURL, "/")
// Try Chat Completions API first (more compatible with third-party proxies)
chatCompletionsURL := baseURL + "/v1/chat/completions"
chatPayload := createOpenAIChatCompletionsPayload(testModelID)
chatPayloadBytes, _ := json.Marshal(chatPayload)
req, err := http.NewRequestWithContext(ctx, "POST", chatCompletionsURL, bytes.NewReader(chatPayloadBytes))
if err != nil {
return s.sendErrorAndEnd(c, "Failed to create request")
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+authToken)
resp, err := s.httpUpstream.Do(req, proxyURL, account.ID, account.Concurrency)
if err != nil {
// Network error, try Responses API
s.sendEvent(c, TestEvent{Type: "info", Text: "Chat Completions API failed, trying Responses API..."})
return s.tryOpenAIResponsesAPI(c, ctx, account, testModelID, baseURL, authToken, proxyURL)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode == http.StatusOK {
// Chat Completions API succeeded
return s.processOpenAIChatCompletionsStream(c, resp.Body)
}
// Chat Completions API failed, try Responses API
_ = resp.Body.Close()
s.sendEvent(c, TestEvent{Type: "info", Text: "Chat Completions API failed, trying Responses API..."})
return s.tryOpenAIResponsesAPI(c, ctx, account, testModelID, baseURL, authToken, proxyURL)
}
// tryOpenAIResponsesAPI tries the OpenAI Responses API as fallback
func (s *AccountTestService) tryOpenAIResponsesAPI(c *gin.Context, ctx context.Context, account *Account, testModelID, baseURL, authToken, proxyURL string) error {
responsesURL := baseURL + "/v1/responses"
payload := createOpenAITestPayload(testModelID, false)
payloadBytes, _ := json.Marshal(payload)
req, err := http.NewRequestWithContext(ctx, "POST", responsesURL, bytes.NewReader(payloadBytes))
if err != nil {
return s.sendErrorAndEnd(c, "Failed to create request")
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+authToken)
resp, err := s.httpUpstream.Do(req, proxyURL, account.ID, account.Concurrency)
if err != nil {
return s.sendErrorAndEnd(c, fmt.Sprintf("Request failed: %s", err.Error()))
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
body, _ := io.ReadAll(resp.Body)
return s.sendErrorAndEnd(c, fmt.Sprintf("API returned %d: %s", resp.StatusCode, string(body)))
}
return s.processOpenAIStream(c, resp.Body)
}
// createOpenAIChatCompletionsPayload creates a test payload for OpenAI Chat Completions API
func createOpenAIChatCompletionsPayload(modelID string) map[string]any {
return map[string]any{
"model": modelID,
"messages": []map[string]any{
{
"role": "user",
"content": "hi",
},
},
"stream": true,
"max_tokens": 100,
}
}
// processOpenAIChatCompletionsStream processes the SSE stream from OpenAI Chat Completions API
func (s *AccountTestService) processOpenAIChatCompletionsStream(c *gin.Context, body io.Reader) error {
reader := bufio.NewReader(body)
for {
line, err := reader.ReadString('\n')
if err != nil {
if err == io.EOF {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil
}
return s.sendErrorAndEnd(c, fmt.Sprintf("Stream read error: %s", err.Error()))
}
line = strings.TrimSpace(line)
if line == "" || !sseDataPrefix.MatchString(line) {
continue
}
jsonStr := sseDataPrefix.ReplaceAllString(line, "")
if jsonStr == "[DONE]" {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil
}
var data map[string]any
if err := json.Unmarshal([]byte(jsonStr), &data); err != nil {
continue
}
// Handle Chat Completions format: choices[0].delta.content
if choices, ok := data["choices"].([]any); ok && len(choices) > 0 {
if choice, ok := choices[0].(map[string]any); ok {
// Check finish_reason
if finishReason, ok := choice["finish_reason"].(string); ok && finishReason != "" {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil
}
// Extract content from delta
if delta, ok := choice["delta"].(map[string]any); ok {
if content, ok := delta["content"].(string); ok && content != "" {
s.sendEvent(c, TestEvent{Type: "content", Text: content})
}
}
}
}
// Handle error
if errData, ok := data["error"].(map[string]any); ok {
errorMsg := "Unknown error"
if msg, ok := errData["message"].(string); ok {
errorMsg = msg
}
return s.sendErrorAndEnd(c, errorMsg)
}
}
}
// testGeminiAccountConnection tests a Gemini account's connection // testGeminiAccountConnection tests a Gemini account's connection
func (s *AccountTestService) testGeminiAccountConnection(c *gin.Context, account *Account, modelID string) error { func (s *AccountTestService) testGeminiAccountConnection(c *gin.Context, account *Account, modelID string) error {
ctx := c.Request.Context() ctx := c.Request.Context()
@@ -627,11 +748,11 @@ func (s *AccountTestService) processGeminiStream(c *gin.Context, body io.Reader)
} }
line = strings.TrimSpace(line) line = strings.TrimSpace(line)
if line == "" || !strings.HasPrefix(line, "data: ") { if line == "" || !sseDataPrefix.MatchString(line) {
continue continue
} }
jsonStr := strings.TrimPrefix(line, "data: ") jsonStr := sseDataPrefix.ReplaceAllString(line, "")
if jsonStr == "[DONE]" { if jsonStr == "[DONE]" {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true}) s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil return nil
@@ -650,13 +771,7 @@ func (s *AccountTestService) processGeminiStream(c *gin.Context, body io.Reader)
} }
if candidates, ok := data["candidates"].([]any); ok && len(candidates) > 0 { if candidates, ok := data["candidates"].([]any); ok && len(candidates) > 0 {
if candidate, ok := candidates[0].(map[string]any); ok { if candidate, ok := candidates[0].(map[string]any); ok {
// Check for completion // Extract content first (before checking finishReason)
if finishReason, ok := candidate["finishReason"].(string); ok && finishReason != "" {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil
}
// Extract content
if content, ok := candidate["content"].(map[string]any); ok { if content, ok := candidate["content"].(map[string]any); ok {
if parts, ok := content["parts"].([]any); ok { if parts, ok := content["parts"].([]any); ok {
for _, part := range parts { for _, part := range parts {
@@ -668,6 +783,12 @@ func (s *AccountTestService) processGeminiStream(c *gin.Context, body io.Reader)
} }
} }
} }
// Check for completion after extracting content
if finishReason, ok := candidate["finishReason"].(string); ok && finishReason != "" {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil
}
} }
} }