fix(ops): 修复日志级别过滤并增强OpenAI错误诊断日志

- 移除 warn 级别下 access info 的强制入库补写,确保运行时日志级别真实生效

- 将 OpenAI fallback matched 与 passthrough 断流提示按需求降级为 info

- 为 codex_cli_only 与 instructions required 场景补充请求诊断字段(含 User-Agent)

- 出于安全考虑移除请求体预览,仅保留 request_body_size 与白名单头信息

- 新增/更新回归测试,覆盖 Forward 入口到日志落库链路
This commit is contained in:
yangjianbo
2026-02-13 19:27:07 +08:00
parent 2459eafb71
commit f96acf6e27
8 changed files with 410 additions and 75 deletions

View File

@@ -58,36 +58,6 @@ func Logger() gin.HandlerFunc {
l := logger.FromContext(c.Request.Context()).With(fields...)
l.Info("http request completed", zap.Time("completed_at", endTime))
// 当全局日志级别高于 info如 warn/erroraccess info 不会进入 zap core
// 这里补写一次 sink保证 ops 系统日志仍可索引关键访问轨迹。
if !logger.L().Core().Enabled(logger.LevelInfo) {
sinkFields := map[string]any{
"component": "http.access",
"status_code": statusCode,
"latency_ms": latency.Milliseconds(),
"client_ip": clientIP,
"protocol": protocol,
"method": method,
"path": path,
"completed_at": endTime,
}
if requestID, ok := c.Request.Context().Value(ctxkey.RequestID).(string); ok && requestID != "" {
sinkFields["request_id"] = requestID
}
if clientRequestID, ok := c.Request.Context().Value(ctxkey.ClientRequestID).(string); ok && clientRequestID != "" {
sinkFields["client_request_id"] = clientRequestID
}
if hasAccountID && accountID > 0 {
sinkFields["account_id"] = accountID
}
if platform != "" {
sinkFields["platform"] = platform
}
if model != "" {
sinkFields["model"] = model
}
logger.WriteSinkEvent("info", "http.access", "http request completed", sinkFields)
}
if len(c.Errors) > 0 {
l.Warn("http request contains gin errors", zap.String("errors", c.Errors.String()))

View File

@@ -201,7 +201,7 @@ func TestLogger_HealthPathSkipped(t *testing.T) {
}
}
func TestLogger_AccessLogStillIndexedWhenLevelWarn(t *testing.T) {
func TestLogger_AccessLogDroppedWhenLevelWarn(t *testing.T) {
gin.SetMode(gin.TestMode)
sink := initMiddlewareTestLoggerWithLevel(t, "warn")
@@ -220,30 +220,9 @@ func TestLogger_AccessLogStillIndexedWhenLevelWarn(t *testing.T) {
}
events := sink.list()
if len(events) == 0 {
t.Fatalf("expected access log event to be indexed when level=warn")
}
found := false
for _, event := range events {
if event == nil || event.Message != "http request completed" {
continue
if event != nil && event.Message == "http request completed" {
t.Fatalf("access log should not be indexed when level=warn: %+v", event)
}
found = true
if event.Level != "info" {
t.Fatalf("event level=%q, want info", event.Level)
}
if event.Component != "http.access" && event.Fields["component"] != "http.access" {
t.Fatalf("event component mismatch: component=%q fields=%v", event.Component, event.Fields["component"])
}
if _, ok := event.Fields["status_code"]; !ok {
t.Fatalf("status_code field missing: %+v", event.Fields)
}
if _, ok := event.Fields["request_id"]; !ok {
t.Fatalf("request_id field missing: %+v", event.Fields)
}
}
if !found {
t.Fatalf("access log event not found")
}
}

View File

@@ -76,7 +76,7 @@ func TestOpenAIHandleErrorResponse_NoRuleKeepsDefault(t *testing.T) {
}
account := &Account{ID: 12, Platform: PlatformOpenAI, Type: AccountTypeAPIKey}
_, err := svc.handleErrorResponse(context.Background(), resp, c, account)
_, err := svc.handleErrorResponse(context.Background(), resp, c, account, nil)
require.Error(t, err)
assert.Equal(t, http.StatusBadGateway, rec.Code)
@@ -157,7 +157,7 @@ func TestOpenAIHandleErrorResponse_AppliesRuleFor422(t *testing.T) {
}
account := &Account{ID: 2, Platform: PlatformOpenAI, Type: AccountTypeAPIKey}
_, err := svc.handleErrorResponse(context.Background(), resp, c, account)
_, err := svc.handleErrorResponse(context.Background(), resp, c, account, nil)
require.Error(t, err)
assert.Equal(t, http.StatusTeapot, rec.Code)

View File

@@ -35,6 +35,8 @@ const (
openaiPlatformAPIURL = "https://api.openai.com/v1/responses"
openaiStickySessionTTL = time.Hour // 粘性会话TTL
codexCLIUserAgent = "codex_cli_rs/0.98.0"
// codex_cli_only 拒绝时单个请求头日志长度上限(字符)
codexCLIOnlyHeaderValueMaxBytes = 256
// OpenAIParsedRequestBodyKey 缓存 handler 侧已解析的请求体,避免重复解析。
OpenAIParsedRequestBodyKey = "openai_parsed_request_body"
@@ -63,6 +65,22 @@ var openaiPassthroughAllowedHeaders = map[string]bool{
"session_id": true,
}
// codex_cli_only 拒绝时记录的请求头白名单(仅用于诊断日志,不参与上游透传)
var codexCLIOnlyDebugHeaderWhitelist = []string{
"User-Agent",
"Content-Type",
"Accept",
"Accept-Language",
"OpenAI-Beta",
"Originator",
"Session_ID",
"Conversation_ID",
"X-Request-ID",
"X-Client-Request-ID",
"X-Forwarded-For",
"X-Real-IP",
}
// OpenAICodexUsageSnapshot represents Codex API usage limits from response headers
type OpenAICodexUsageSnapshot struct {
PrimaryUsedPercent *float64 `json:"primary_used_percent,omitempty"`
@@ -269,7 +287,7 @@ func getAPIKeyIDFromContext(c *gin.Context) int64 {
return apiKey.ID
}
func logCodexCLIOnlyDetection(ctx context.Context, account *Account, apiKeyID int64, result CodexClientRestrictionDetectionResult) {
func logCodexCLIOnlyDetection(ctx context.Context, c *gin.Context, account *Account, apiKeyID int64, result CodexClientRestrictionDetectionResult, body []byte) {
if !result.Enabled {
return
}
@@ -290,6 +308,9 @@ func logCodexCLIOnlyDetection(ctx context.Context, account *Account, apiKeyID in
if apiKeyID > 0 {
fields = append(fields, zap.Int64("api_key_id", apiKeyID))
}
if !result.Matched {
fields = appendCodexCLIOnlyRejectedRequestFields(fields, c, body)
}
log := logger.FromContext(ctx).With(fields...)
if result.Matched {
log.Warn("OpenAI codex_cli_only 允许官方客户端请求")
@@ -298,6 +319,160 @@ func logCodexCLIOnlyDetection(ctx context.Context, account *Account, apiKeyID in
log.Warn("OpenAI codex_cli_only 拒绝非官方客户端请求")
}
func appendCodexCLIOnlyRejectedRequestFields(fields []zap.Field, c *gin.Context, body []byte) []zap.Field {
if c == nil || c.Request == nil {
return fields
}
req := c.Request
requestModel, requestStream, promptCacheKey := extractOpenAIRequestMetaFromBody(body)
fields = append(fields,
zap.String("request_method", strings.TrimSpace(req.Method)),
zap.String("request_path", strings.TrimSpace(req.URL.Path)),
zap.String("request_query", strings.TrimSpace(req.URL.RawQuery)),
zap.String("request_host", strings.TrimSpace(req.Host)),
zap.String("request_client_ip", strings.TrimSpace(c.ClientIP())),
zap.String("request_remote_addr", strings.TrimSpace(req.RemoteAddr)),
zap.String("request_user_agent", strings.TrimSpace(req.Header.Get("User-Agent"))),
zap.String("request_content_type", strings.TrimSpace(req.Header.Get("Content-Type"))),
zap.Int64("request_content_length", req.ContentLength),
zap.Bool("request_stream", requestStream),
)
if requestModel != "" {
fields = append(fields, zap.String("request_model", requestModel))
}
if promptCacheKey != "" {
fields = append(fields, zap.String("request_prompt_cache_key_sha256", hashSensitiveValueForLog(promptCacheKey)))
}
if headers := snapshotCodexCLIOnlyHeaders(req.Header); len(headers) > 0 {
fields = append(fields, zap.Any("request_headers", headers))
}
fields = append(fields, zap.Int("request_body_size", len(body)))
return fields
}
func snapshotCodexCLIOnlyHeaders(header http.Header) map[string]string {
if len(header) == 0 {
return nil
}
result := make(map[string]string, len(codexCLIOnlyDebugHeaderWhitelist))
for _, key := range codexCLIOnlyDebugHeaderWhitelist {
value := strings.TrimSpace(header.Get(key))
if value == "" {
continue
}
result[strings.ToLower(key)] = truncateString(value, codexCLIOnlyHeaderValueMaxBytes)
}
return result
}
func hashSensitiveValueForLog(raw string) string {
value := strings.TrimSpace(raw)
if value == "" {
return ""
}
sum := sha256.Sum256([]byte(value))
return hex.EncodeToString(sum[:8])
}
func logOpenAIInstructionsRequiredDebug(
ctx context.Context,
c *gin.Context,
account *Account,
upstreamStatusCode int,
upstreamMsg string,
requestBody []byte,
upstreamBody []byte,
) {
msg := strings.TrimSpace(upstreamMsg)
if !isOpenAIInstructionsRequiredError(upstreamStatusCode, msg, upstreamBody) {
return
}
if ctx == nil {
ctx = context.Background()
}
accountID := int64(0)
accountName := ""
if account != nil {
accountID = account.ID
accountName = strings.TrimSpace(account.Name)
}
userAgent := ""
if c != nil {
userAgent = strings.TrimSpace(c.GetHeader("User-Agent"))
}
fields := []zap.Field{
zap.String("component", "service.openai_gateway"),
zap.Int64("account_id", accountID),
zap.String("account_name", accountName),
zap.Int("upstream_status_code", upstreamStatusCode),
zap.String("upstream_error_message", msg),
zap.String("request_user_agent", userAgent),
zap.Bool("codex_official_client_match", openai.IsCodexCLIRequest(userAgent)),
}
fields = appendCodexCLIOnlyRejectedRequestFields(fields, c, requestBody)
logger.FromContext(ctx).With(fields...).Warn("OpenAI 上游返回 Instructions are required已记录请求详情用于排查")
}
func isOpenAIInstructionsRequiredError(upstreamStatusCode int, upstreamMsg string, upstreamBody []byte) bool {
if upstreamStatusCode != http.StatusBadRequest {
return false
}
hasInstructionRequired := func(text string) bool {
lower := strings.ToLower(strings.TrimSpace(text))
if lower == "" {
return false
}
if strings.Contains(lower, "instructions are required") {
return true
}
if strings.Contains(lower, "required parameter: 'instructions'") {
return true
}
if strings.Contains(lower, "required parameter: instructions") {
return true
}
if strings.Contains(lower, "missing required parameter") && strings.Contains(lower, "instructions") {
return true
}
return strings.Contains(lower, "instruction") && strings.Contains(lower, "required")
}
if hasInstructionRequired(upstreamMsg) {
return true
}
if len(upstreamBody) == 0 {
return false
}
errMsg := gjson.GetBytes(upstreamBody, "error.message").String()
errMsgLower := strings.ToLower(strings.TrimSpace(errMsg))
errCode := strings.ToLower(strings.TrimSpace(gjson.GetBytes(upstreamBody, "error.code").String()))
errParam := strings.ToLower(strings.TrimSpace(gjson.GetBytes(upstreamBody, "error.param").String()))
errType := strings.ToLower(strings.TrimSpace(gjson.GetBytes(upstreamBody, "error.type").String()))
if errParam == "instructions" {
return true
}
if hasInstructionRequired(errMsg) {
return true
}
if strings.Contains(errCode, "missing_required_parameter") && strings.Contains(errMsgLower, "instructions") {
return true
}
if strings.Contains(errType, "invalid_request") && strings.Contains(errMsgLower, "instructions") && strings.Contains(errMsgLower, "required") {
return true
}
return false
}
// GenerateSessionHash generates a sticky-session hash for OpenAI requests.
//
// Priority:
@@ -820,7 +995,7 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
restrictionResult := s.detectCodexClientRestriction(c, account)
apiKeyID := getAPIKeyIDFromContext(c)
logCodexCLIOnlyDetection(ctx, account, apiKeyID, restrictionResult)
logCodexCLIOnlyDetection(ctx, c, account, apiKeyID, restrictionResult, body)
if restrictionResult.Enabled && !restrictionResult.Matched {
c.JSON(http.StatusForbidden, gin.H{
"error": gin.H{
@@ -1047,7 +1222,7 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
s.handleFailoverSideEffects(ctx, resp, account)
return nil, &UpstreamFailoverError{StatusCode: resp.StatusCode, ResponseBody: respBody}
}
return s.handleErrorResponse(ctx, resp, c, account)
return s.handleErrorResponse(ctx, resp, c, account, body)
}
// Handle normal response
@@ -1183,7 +1358,7 @@ func (s *OpenAIGatewayService) forwardOpenAIPassthrough(
if resp.StatusCode >= 400 {
// 透传模式不做 failover避免改变原始上游语义按上游原样返回错误响应。
return nil, s.handleErrorResponsePassthrough(ctx, resp, c, account)
return nil, s.handleErrorResponsePassthrough(ctx, resp, c, account, body)
}
var usage *OpenAIUsage
@@ -1314,7 +1489,13 @@ func (s *OpenAIGatewayService) buildUpstreamRequestOpenAIPassthrough(
return req, nil
}
func (s *OpenAIGatewayService) handleErrorResponsePassthrough(ctx context.Context, resp *http.Response, c *gin.Context, account *Account) error {
func (s *OpenAIGatewayService) handleErrorResponsePassthrough(
ctx context.Context,
resp *http.Response,
c *gin.Context,
account *Account,
requestBody []byte,
) error {
body, _ := io.ReadAll(io.LimitReader(resp.Body, 2<<20))
upstreamMsg := strings.TrimSpace(extractUpstreamErrorMessage(body))
@@ -1328,6 +1509,7 @@ func (s *OpenAIGatewayService) handleErrorResponsePassthrough(ctx context.Contex
upstreamDetail = truncateString(string(body), maxBytes)
}
setOpsUpstreamError(c, resp.StatusCode, upstreamMsg, upstreamDetail)
logOpenAIInstructionsRequiredDebug(ctx, c, account, resp.StatusCode, upstreamMsg, requestBody, body)
appendOpsUpstreamError(c, OpsUpstreamErrorEvent{
Platform: account.Platform,
AccountID: account.ID,
@@ -1470,7 +1652,7 @@ func (s *OpenAIGatewayService) handleStreamingResponsePassthrough(
}
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
logger.LegacyPrintf("service.openai_gateway",
"[WARN] [OpenAI passthrough] 流读取被取消,可能发生断流: account=%d request_id=%s err=%v ctx_err=%v",
"[OpenAI passthrough] 流读取被取消,可能发生断流: account=%d request_id=%s err=%v ctx_err=%v",
account.ID,
upstreamRequestID,
err,
@@ -1483,7 +1665,7 @@ func (s *OpenAIGatewayService) handleStreamingResponsePassthrough(
return &openaiStreamingResultPassthrough{usage: usage, firstTokenMs: firstTokenMs}, err
}
logger.LegacyPrintf("service.openai_gateway",
"[WARN] [OpenAI passthrough] 流读取异常中断: account=%d request_id=%s err=%v",
"[OpenAI passthrough] 流读取异常中断: account=%d request_id=%s err=%v",
account.ID,
upstreamRequestID,
err,
@@ -1495,7 +1677,7 @@ func (s *OpenAIGatewayService) handleStreamingResponsePassthrough(
zap.String("component", "service.openai_gateway"),
zap.Int64("account_id", account.ID),
zap.String("upstream_request_id", upstreamRequestID),
).Warn("OpenAI passthrough 上游流在未收到 [DONE] 时结束,疑似断流")
).Info("OpenAI passthrough 上游流在未收到 [DONE] 时结束,疑似断流")
}
return &openaiStreamingResultPassthrough{usage: usage, firstTokenMs: firstTokenMs}, nil
@@ -1678,7 +1860,13 @@ func (s *OpenAIGatewayService) buildUpstreamRequest(ctx context.Context, c *gin.
return req, nil
}
func (s *OpenAIGatewayService) handleErrorResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account) (*OpenAIForwardResult, error) {
func (s *OpenAIGatewayService) handleErrorResponse(
ctx context.Context,
resp *http.Response,
c *gin.Context,
account *Account,
requestBody []byte,
) (*OpenAIForwardResult, error) {
body, _ := io.ReadAll(io.LimitReader(resp.Body, 2<<20))
upstreamMsg := strings.TrimSpace(extractUpstreamErrorMessage(body))
@@ -1692,6 +1880,7 @@ func (s *OpenAIGatewayService) handleErrorResponse(ctx context.Context, resp *ht
upstreamDetail = truncateString(string(body), maxBytes)
}
setOpsUpstreamError(c, resp.StatusCode, upstreamMsg, upstreamDetail)
logOpenAIInstructionsRequiredDebug(ctx, c, account, resp.StatusCode, upstreamMsg, requestBody, body)
if s.cfg != nil && s.cfg.Gateway.LogUpstreamErrorBody {
logger.LegacyPrintf("service.openai_gateway",

View File

@@ -1,9 +1,12 @@
package service
import (
"bytes"
"context"
"io"
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/Wei-Shaw/sub2api/internal/config"
@@ -95,8 +98,8 @@ func TestGetAPIKeyIDFromContext(t *testing.T) {
func TestLogCodexCLIOnlyDetection_NilSafety(t *testing.T) {
// 不校验日志内容,仅保证在 nil 入参下不会 panic。
require.NotPanics(t, func() {
logCodexCLIOnlyDetection(context.TODO(), nil, 0, CodexClientRestrictionDetectionResult{Enabled: true, Matched: false, Reason: "test"})
logCodexCLIOnlyDetection(context.Background(), nil, 0, CodexClientRestrictionDetectionResult{Enabled: false, Matched: false, Reason: "disabled"})
logCodexCLIOnlyDetection(context.TODO(), nil, nil, 0, CodexClientRestrictionDetectionResult{Enabled: true, Matched: false, Reason: "test"}, nil)
logCodexCLIOnlyDetection(context.Background(), nil, nil, 0, CodexClientRestrictionDetectionResult{Enabled: false, Matched: false, Reason: "disabled"}, nil)
})
}
@@ -105,17 +108,159 @@ func TestLogCodexCLIOnlyDetection_LogsBothMatchedAndRejected(t *testing.T) {
defer restore()
account := &Account{ID: 1001}
logCodexCLIOnlyDetection(context.Background(), account, 2002, CodexClientRestrictionDetectionResult{
logCodexCLIOnlyDetection(context.Background(), nil, account, 2002, CodexClientRestrictionDetectionResult{
Enabled: true,
Matched: true,
Reason: CodexClientRestrictionReasonMatchedUA,
})
logCodexCLIOnlyDetection(context.Background(), account, 2002, CodexClientRestrictionDetectionResult{
}, nil)
logCodexCLIOnlyDetection(context.Background(), nil, account, 2002, CodexClientRestrictionDetectionResult{
Enabled: true,
Matched: false,
Reason: CodexClientRestrictionReasonNotMatchedUA,
})
}, nil)
require.True(t, logSink.ContainsMessage("OpenAI codex_cli_only 允许官方客户端请求"))
require.True(t, logSink.ContainsMessage("OpenAI codex_cli_only 拒绝非官方客户端请求"))
}
func TestLogCodexCLIOnlyDetection_RejectedIncludesRequestDetails(t *testing.T) {
gin.SetMode(gin.TestMode)
logSink, restore := captureStructuredLog(t)
defer restore()
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses?trace=1", bytes.NewReader(nil))
c.Request.Header.Set("User-Agent", "curl/8.0")
c.Request.Header.Set("Content-Type", "application/json")
c.Request.Header.Set("OpenAI-Beta", "assistants=v2")
body := []byte(`{"model":"gpt-5.2","stream":false,"prompt_cache_key":"pc-123","access_token":"secret-token","input":[{"type":"text","text":"hello"}]}`)
account := &Account{ID: 1001}
logCodexCLIOnlyDetection(context.Background(), c, account, 2002, CodexClientRestrictionDetectionResult{
Enabled: true,
Matched: false,
Reason: CodexClientRestrictionReasonNotMatchedUA,
}, body)
require.True(t, logSink.ContainsFieldValue("request_user_agent", "curl/8.0"))
require.True(t, logSink.ContainsFieldValue("request_model", "gpt-5.2"))
require.True(t, logSink.ContainsFieldValue("request_query", "trace=1"))
require.True(t, logSink.ContainsFieldValue("request_prompt_cache_key_sha256", hashSensitiveValueForLog("pc-123")))
require.True(t, logSink.ContainsFieldValue("request_headers", "openai-beta"))
require.True(t, logSink.ContainsField("request_body_size"))
require.False(t, logSink.ContainsField("request_body_preview"))
}
func TestLogOpenAIInstructionsRequiredDebug_LogsRequestDetails(t *testing.T) {
gin.SetMode(gin.TestMode)
logSink, restore := captureStructuredLog(t)
defer restore()
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses?trace=1", bytes.NewReader(nil))
c.Request.Header.Set("User-Agent", "curl/8.0")
c.Request.Header.Set("Content-Type", "application/json")
c.Request.Header.Set("OpenAI-Beta", "assistants=v2")
body := []byte(`{"model":"gpt-5.1-codex","stream":false,"prompt_cache_key":"pc-abc","access_token":"secret-token","input":[{"type":"text","text":"hello"}]}`)
account := &Account{ID: 1001, Name: "codex max套餐"}
logOpenAIInstructionsRequiredDebug(
context.Background(),
c,
account,
http.StatusBadRequest,
"Instructions are required",
body,
[]byte(`{"error":{"message":"Instructions are required","type":"invalid_request_error","param":"instructions","code":"missing_required_parameter"}}`),
)
require.True(t, logSink.ContainsMessageAtLevel("OpenAI 上游返回 Instructions are required已记录请求详情用于排查", "warn"))
require.True(t, logSink.ContainsFieldValue("request_user_agent", "curl/8.0"))
require.True(t, logSink.ContainsFieldValue("request_model", "gpt-5.1-codex"))
require.True(t, logSink.ContainsFieldValue("request_query", "trace=1"))
require.True(t, logSink.ContainsFieldValue("account_name", "codex max套餐"))
require.True(t, logSink.ContainsFieldValue("request_headers", "openai-beta"))
require.True(t, logSink.ContainsField("request_body_size"))
require.False(t, logSink.ContainsField("request_body_preview"))
}
func TestLogOpenAIInstructionsRequiredDebug_NonTargetErrorSkipped(t *testing.T) {
gin.SetMode(gin.TestMode)
logSink, restore := captureStructuredLog(t)
defer restore()
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses", bytes.NewReader(nil))
c.Request.Header.Set("User-Agent", "curl/8.0")
body := []byte(`{"model":"gpt-5.1-codex","stream":false}`)
logOpenAIInstructionsRequiredDebug(
context.Background(),
c,
&Account{ID: 1001},
http.StatusForbidden,
"forbidden",
body,
[]byte(`{"error":{"message":"forbidden"}}`),
)
require.False(t, logSink.ContainsMessage("OpenAI 上游返回 Instructions are required已记录请求详情用于排查"))
}
func TestOpenAIGatewayService_Forward_LogsInstructionsRequiredDetails(t *testing.T) {
gin.SetMode(gin.TestMode)
logSink, restore := captureStructuredLog(t)
defer restore()
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses?trace=1", bytes.NewReader(nil))
c.Request.Header.Set("User-Agent", "codex_cli_rs/0.1.0")
c.Request.Header.Set("Content-Type", "application/json")
c.Request.Header.Set("OpenAI-Beta", "assistants=v2")
upstream := &httpUpstreamRecorder{
resp: &http.Response{
StatusCode: http.StatusBadRequest,
Header: http.Header{
"Content-Type": []string{"application/json"},
"x-request-id": []string{"rid-upstream"},
},
Body: io.NopCloser(strings.NewReader(`{"error":{"message":"Missing required parameter: 'instructions'","type":"invalid_request_error","param":"instructions","code":"missing_required_parameter"}}`)),
},
}
svc := &OpenAIGatewayService{
cfg: &config.Config{
Gateway: config.GatewayConfig{ForceCodexCLI: false},
},
httpUpstream: upstream,
}
account := &Account{
ID: 1001,
Name: "codex max套餐",
Platform: PlatformOpenAI,
Type: AccountTypeAPIKey,
Concurrency: 1,
Credentials: map[string]any{"api_key": "sk-test"},
Status: StatusActive,
Schedulable: true,
RateMultiplier: f64p(1),
}
body := []byte(`{"model":"gpt-5.1-codex","stream":false,"input":[{"type":"text","text":"hello"}],"prompt_cache_key":"pc-forward","access_token":"secret-token"}`)
_, err := svc.Forward(context.Background(), c, account, body)
require.Error(t, err)
require.Equal(t, http.StatusBadGateway, rec.Code)
require.Contains(t, err.Error(), "upstream error: 400")
require.True(t, logSink.ContainsMessageAtLevel("OpenAI 上游返回 Instructions are required已记录请求详情用于排查", "warn"))
require.True(t, logSink.ContainsFieldValue("request_user_agent", "codex_cli_rs/0.1.0"))
require.True(t, logSink.ContainsFieldValue("request_model", "gpt-5.1-codex"))
require.True(t, logSink.ContainsFieldValue("request_headers", "openai-beta"))
require.True(t, logSink.ContainsField("request_body_size"))
require.False(t, logSink.ContainsField("request_body_preview"))
}

View File

@@ -81,6 +81,21 @@ func (s *inMemoryLogSink) ContainsMessage(substr string) bool {
return false
}
func (s *inMemoryLogSink) ContainsMessageAtLevel(substr, level string) bool {
s.mu.Lock()
defer s.mu.Unlock()
wantLevel := strings.ToLower(strings.TrimSpace(level))
for _, ev := range s.events {
if ev == nil {
continue
}
if strings.Contains(ev.Message, substr) && strings.ToLower(strings.TrimSpace(ev.Level)) == wantLevel {
return true
}
}
return false
}
func (s *inMemoryLogSink) ContainsFieldValue(field, substr string) bool {
s.mu.Lock()
defer s.mu.Unlock()
@@ -95,6 +110,20 @@ func (s *inMemoryLogSink) ContainsFieldValue(field, substr string) bool {
return false
}
func (s *inMemoryLogSink) ContainsField(field string) bool {
s.mu.Lock()
defer s.mu.Unlock()
for _, ev := range s.events {
if ev == nil || ev.Fields == nil {
continue
}
if _, ok := ev.Fields[field]; ok {
return true
}
}
return false
}
func captureStructuredLog(t *testing.T) (*inMemoryLogSink, func()) {
t.Helper()
structuredLogCaptureMu.Lock()
@@ -712,7 +741,7 @@ func TestOpenAIGatewayService_OAuthPassthrough_WarnOnTimeoutHeadersForStream(t *
require.True(t, logSink.ContainsFieldValue("timeout_headers", "x-stainless-timeout=10000"))
}
func TestOpenAIGatewayService_OAuthPassthrough_WarnWhenStreamEndsWithoutDone(t *testing.T) {
func TestOpenAIGatewayService_OAuthPassthrough_InfoWhenStreamEndsWithoutDone(t *testing.T) {
gin.SetMode(gin.TestMode)
logSink, restore := captureStructuredLog(t)
defer restore()
@@ -750,6 +779,7 @@ func TestOpenAIGatewayService_OAuthPassthrough_WarnWhenStreamEndsWithoutDone(t *
_, err := svc.Forward(context.Background(), c, account, originalBody)
require.NoError(t, err)
require.True(t, logSink.ContainsMessage("上游流在未收到 [DONE] 时结束,疑似断流"))
require.True(t, logSink.ContainsMessageAtLevel("上游流在未收到 [DONE] 时结束,疑似断流", "info"))
require.True(t, logSink.ContainsFieldValue("upstream_request_id", "rid-truncate"))
}

View File

@@ -17,6 +17,7 @@ import (
"github.com/Wei-Shaw/sub2api/internal/pkg/logger"
"github.com/Wei-Shaw/sub2api/internal/pkg/openai"
"github.com/Wei-Shaw/sub2api/internal/util/urlvalidator"
"go.uber.org/zap"
)
var (
@@ -659,7 +660,8 @@ func (s *PricingService) matchOpenAIModel(model string) *LiteLLMModelPricing {
if strings.HasPrefix(model, "gpt-5.3-codex-spark") {
if pricing, ok := s.pricingData["gpt-5.1-codex"]; ok {
logger.LegacyPrintf("service.pricing", "[Pricing][SparkBilling] %s -> %s billing", model, "gpt-5.1-codex")
logger.LegacyPrintf("service.pricing", "[Pricing] OpenAI fallback matched %s -> %s", model, "gpt-5.1-codex")
logger.With(zap.String("component", "service.pricing")).
Info(fmt.Sprintf("[Pricing] OpenAI fallback matched %s -> %s", model, "gpt-5.1-codex"))
return pricing
}
}
@@ -669,14 +671,16 @@ func (s *PricingService) matchOpenAIModel(model string) *LiteLLMModelPricing {
for _, variant := range variants {
if pricing, ok := s.pricingData[variant]; ok {
logger.LegacyPrintf("service.pricing", "[Pricing] OpenAI fallback matched %s -> %s", model, variant)
logger.With(zap.String("component", "service.pricing")).
Info(fmt.Sprintf("[Pricing] OpenAI fallback matched %s -> %s", model, variant))
return pricing
}
}
if strings.HasPrefix(model, "gpt-5.3-codex") {
if pricing, ok := s.pricingData["gpt-5.2-codex"]; ok {
logger.LegacyPrintf("service.pricing", "[Pricing] OpenAI fallback matched %s -> %s", model, "gpt-5.2-codex")
logger.With(zap.String("component", "service.pricing")).
Info(fmt.Sprintf("[Pricing] OpenAI fallback matched %s -> %s", model, "gpt-5.2-codex"))
return pricing
}
}

View File

@@ -33,3 +33,21 @@ func TestGetModelPricing_Gpt53CodexFallbackStillUsesGpt52Codex(t *testing.T) {
got := svc.GetModelPricing("gpt-5.3-codex")
require.Same(t, gpt52CodexPricing, got)
}
func TestGetModelPricing_OpenAIFallbackMatchedLoggedAsInfo(t *testing.T) {
logSink, restore := captureStructuredLog(t)
defer restore()
gpt52CodexPricing := &LiteLLMModelPricing{InputCostPerToken: 2}
svc := &PricingService{
pricingData: map[string]*LiteLLMModelPricing{
"gpt-5.2-codex": gpt52CodexPricing,
},
}
got := svc.GetModelPricing("gpt-5.3-codex")
require.Same(t, gpt52CodexPricing, got)
require.True(t, logSink.ContainsMessageAtLevel("[Pricing] OpenAI fallback matched gpt-5.3-codex -> gpt-5.2-codex", "info"))
require.False(t, logSink.ContainsMessageAtLevel("[Pricing] OpenAI fallback matched gpt-5.3-codex -> gpt-5.2-codex", "warn"))
}