feat(openai): 极致优化 OAuth 链路并补齐性能守护
- 优化 /v1/responses 热路径,减少重复解析与不必要拷贝\n- 优化并发与 token 竞争路径并补齐运行指标\n- 补充 OpenAI/Ops 相关单元测试与回归用例\n- 新增灰度阈值守护与压测脚本,支撑发布验收
This commit is contained in:
125
backend/internal/service/openai_gateway_service_hotpath_test.go
Normal file
125
backend/internal/service/openai_gateway_service_hotpath_test.go
Normal file
@@ -0,0 +1,125 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestExtractOpenAIRequestMetaFromBody(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
body []byte
|
||||
wantModel string
|
||||
wantStream bool
|
||||
wantPromptKey string
|
||||
}{
|
||||
{
|
||||
name: "完整字段",
|
||||
body: []byte(`{"model":"gpt-5","stream":true,"prompt_cache_key":" ses-1 "}`),
|
||||
wantModel: "gpt-5",
|
||||
wantStream: true,
|
||||
wantPromptKey: "ses-1",
|
||||
},
|
||||
{
|
||||
name: "缺失可选字段",
|
||||
body: []byte(`{"model":"gpt-4"}`),
|
||||
wantModel: "gpt-4",
|
||||
wantStream: false,
|
||||
wantPromptKey: "",
|
||||
},
|
||||
{
|
||||
name: "空请求体",
|
||||
body: nil,
|
||||
wantModel: "",
|
||||
wantStream: false,
|
||||
wantPromptKey: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
model, stream, promptKey := extractOpenAIRequestMetaFromBody(tt.body)
|
||||
require.Equal(t, tt.wantModel, model)
|
||||
require.Equal(t, tt.wantStream, stream)
|
||||
require.Equal(t, tt.wantPromptKey, promptKey)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractOpenAIReasoningEffortFromBody(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
body []byte
|
||||
model string
|
||||
wantNil bool
|
||||
wantValue string
|
||||
}{
|
||||
{
|
||||
name: "优先读取 reasoning.effort",
|
||||
body: []byte(`{"reasoning":{"effort":"medium"}}`),
|
||||
model: "gpt-5-high",
|
||||
wantNil: false,
|
||||
wantValue: "medium",
|
||||
},
|
||||
{
|
||||
name: "兼容 reasoning_effort",
|
||||
body: []byte(`{"reasoning_effort":"x-high"}`),
|
||||
model: "",
|
||||
wantNil: false,
|
||||
wantValue: "xhigh",
|
||||
},
|
||||
{
|
||||
name: "minimal 归一化为空",
|
||||
body: []byte(`{"reasoning":{"effort":"minimal"}}`),
|
||||
model: "gpt-5-high",
|
||||
wantNil: true,
|
||||
},
|
||||
{
|
||||
name: "缺失字段时从模型后缀推导",
|
||||
body: []byte(`{"input":"hi"}`),
|
||||
model: "gpt-5-high",
|
||||
wantNil: false,
|
||||
wantValue: "high",
|
||||
},
|
||||
{
|
||||
name: "未知后缀不返回",
|
||||
body: []byte(`{"input":"hi"}`),
|
||||
model: "gpt-5-unknown",
|
||||
wantNil: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := extractOpenAIReasoningEffortFromBody(tt.body, tt.model)
|
||||
if tt.wantNil {
|
||||
require.Nil(t, got)
|
||||
return
|
||||
}
|
||||
require.NotNil(t, got)
|
||||
require.Equal(t, tt.wantValue, *got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetOpenAIRequestBodyMap_UsesContextCache(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
rec := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(rec)
|
||||
|
||||
cached := map[string]any{"model": "cached-model", "stream": true}
|
||||
c.Set(OpenAIParsedRequestBodyKey, cached)
|
||||
|
||||
got, err := getOpenAIRequestBodyMap(c, []byte(`{invalid-json`))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, cached, got)
|
||||
}
|
||||
|
||||
func TestGetOpenAIRequestBodyMap_ParseErrorWithoutCache(t *testing.T) {
|
||||
_, err := getOpenAIRequestBodyMap(nil, []byte(`{invalid-json`))
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "parse request")
|
||||
}
|
||||
Reference in New Issue
Block a user