fix(openai): tighten responses stream account tests

This commit is contained in:
hungryboy1025
2026-04-25 16:56:50 +08:00
parent 5d1c12e60e
commit 8987e0ba67
5 changed files with 87 additions and 8 deletions

View File

@@ -1145,13 +1145,17 @@ func (s *AccountTestService) processClaudeStream(c *gin.Context, body io.Reader)
// processOpenAIStream processes the SSE stream from OpenAI Responses API // processOpenAIStream processes the SSE stream from OpenAI Responses API
func (s *AccountTestService) processOpenAIStream(c *gin.Context, body io.Reader) error { func (s *AccountTestService) processOpenAIStream(c *gin.Context, body io.Reader) error {
reader := bufio.NewReader(body) reader := bufio.NewReader(body)
seenCompleted := false
for { for {
line, err := reader.ReadString('\n') line, err := reader.ReadString('\n')
if err != nil { if err != nil {
if err == io.EOF { if err == io.EOF {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true}) if seenCompleted {
return nil s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil
}
return s.sendErrorAndEnd(c, "Stream ended before response.completed")
} }
return s.sendErrorAndEnd(c, fmt.Sprintf("Stream read error: %s", err.Error())) return s.sendErrorAndEnd(c, fmt.Sprintf("Stream read error: %s", err.Error()))
} }
@@ -1163,8 +1167,11 @@ func (s *AccountTestService) processOpenAIStream(c *gin.Context, body io.Reader)
jsonStr := sseDataPrefix.ReplaceAllString(line, "") jsonStr := sseDataPrefix.ReplaceAllString(line, "")
if jsonStr == "[DONE]" { if jsonStr == "[DONE]" {
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true}) if seenCompleted {
return nil s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil
}
return s.sendErrorAndEnd(c, "Stream ended before response.completed")
} }
var data map[string]any var data map[string]any
@@ -1180,9 +1187,20 @@ func (s *AccountTestService) processOpenAIStream(c *gin.Context, body io.Reader)
if delta, ok := data["delta"].(string); ok && delta != "" { if delta, ok := data["delta"].(string); ok && delta != "" {
s.sendEvent(c, TestEvent{Type: "content", Text: delta}) s.sendEvent(c, TestEvent{Type: "content", Text: delta})
} }
case "response.completed": case "response.completed", "response.done":
seenCompleted = true
s.sendEvent(c, TestEvent{Type: "test_complete", Success: true}) s.sendEvent(c, TestEvent{Type: "test_complete", Success: true})
return nil return nil
case "response.failed":
errorMsg := "OpenAI response failed"
if responseData, ok := data["response"].(map[string]any); ok {
if errData, ok := responseData["error"].(map[string]any); ok {
if msg, ok := errData["message"].(string); ok && msg != "" {
errorMsg = msg
}
}
}
return s.sendErrorAndEnd(c, errorMsg)
case "error": case "error":
errorMsg := "Unknown error" errorMsg := "Unknown error"
if errData, ok := data["error"].(map[string]any); ok { if errData, ok := data["error"].(map[string]any); ok {

View File

@@ -125,6 +125,31 @@ func TestAccountTestService_OpenAISuccessPersistsSnapshotFromHeaders(t *testing.
require.Contains(t, recorder.Body.String(), "test_complete") require.Contains(t, recorder.Body.String(), "test_complete")
} }
func TestAccountTestService_OpenAIStreamEOFBeforeCompletedFails(t *testing.T) {
gin.SetMode(gin.TestMode)
ctx, recorder := newTestContext()
resp := newJSONResponse(http.StatusOK, "")
resp.Body = io.NopCloser(strings.NewReader(`data: {"type":"response.output_text.delta","delta":"hi"}
`))
upstream := &queuedHTTPUpstream{responses: []*http.Response{resp}}
svc := &AccountTestService{httpUpstream: upstream}
account := &Account{
ID: 90,
Platform: PlatformOpenAI,
Type: AccountTypeOAuth,
Concurrency: 1,
Credentials: map[string]any{"access_token": "test-token"},
}
err := svc.testOpenAIAccountConnection(ctx, account, "gpt-5.4", "", "")
require.Error(t, err)
require.Contains(t, recorder.Body.String(), "response.completed")
require.NotContains(t, recorder.Body.String(), `"success":true`)
}
func TestAccountTestService_OpenAI429PersistsSnapshotAndRateLimitState(t *testing.T) { func TestAccountTestService_OpenAI429PersistsSnapshotAndRateLimitState(t *testing.T) {
gin.SetMode(gin.TestMode) gin.SetMode(gin.TestMode)
ctx, _ := newTestContext() ctx, _ := newTestContext()

View File

@@ -119,7 +119,7 @@ func openAIStreamEventIsTerminal(data string) bool {
return true return true
} }
switch gjson.Get(trimmed, "type").String() { switch gjson.Get(trimmed, "type").String() {
case "response.completed", "response.done", "response.failed": case "response.completed", "response.done", "response.failed", "response.incomplete", "response.cancelled", "response.canceled":
return true return true
default: default:
return false return false

View File

@@ -4372,7 +4372,8 @@ func (s *OpenAIGatewayService) parseSSEUsageBytes(data []byte, usage *OpenAIUsag
return return
} }
eventType := gjson.GetBytes(data, "type").String() eventType := gjson.GetBytes(data, "type").String()
if eventType != "response.completed" && eventType != "response.done" { if eventType != "response.completed" && eventType != "response.done" &&
eventType != "response.incomplete" && eventType != "response.cancelled" && eventType != "response.canceled" {
return return
} }
@@ -4519,7 +4520,7 @@ func extractOpenAISSETerminalEvent(body string) (string, []byte, bool) {
} }
eventType := strings.TrimSpace(gjson.Get(data, "type").String()) eventType := strings.TrimSpace(gjson.Get(data, "type").String())
switch eventType { switch eventType {
case "response.completed", "response.done", "response.failed": case "response.completed", "response.done", "response.failed", "response.incomplete", "response.cancelled", "response.canceled":
return eventType, []byte(data), true return eventType, []byte(data), true
} }
} }

View File

@@ -1336,6 +1336,41 @@ func TestOpenAIStreamingPassthroughResponseDoneWithoutDoneMarkerStillSucceeds(t
require.Equal(t, 1, result.usage.CacheReadInputTokens) require.Equal(t, 1, result.usage.CacheReadInputTokens)
} }
func TestOpenAIStreamingPassthroughResponseIncompleteWithoutDoneMarkerStillSucceeds(t *testing.T) {
gin.SetMode(gin.TestMode)
cfg := &config.Config{
Gateway: config.GatewayConfig{
MaxLineSize: defaultMaxLineSize,
},
}
svc := &OpenAIGatewayService{cfg: cfg}
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/", nil)
pr, pw := io.Pipe()
resp := &http.Response{
StatusCode: http.StatusOK,
Body: pr,
Header: http.Header{},
}
go func() {
defer func() { _ = pw.Close() }()
_, _ = pw.Write([]byte("data: {\"type\":\"response.incomplete\",\"response\":{\"usage\":{\"input_tokens\":2,\"output_tokens\":3,\"input_tokens_details\":{\"cached_tokens\":1}}}}\n\n"))
}()
result, err := svc.handleStreamingResponsePassthrough(c.Request.Context(), resp, c, &Account{ID: 1}, time.Now(), "", "")
_ = pr.Close()
require.NoError(t, err)
require.NotNil(t, result)
require.NotNil(t, result.usage)
require.Equal(t, 2, result.usage.InputTokens)
require.Equal(t, 3, result.usage.OutputTokens)
require.Equal(t, 1, result.usage.CacheReadInputTokens)
}
func TestOpenAIStreamingTooLong(t *testing.T) { func TestOpenAIStreamingTooLong(t *testing.T) {
gin.SetMode(gin.TestMode) gin.SetMode(gin.TestMode)
cfg := &config.Config{ cfg := &config.Config{