fix(openai): 使用 prompt_cache_key 兜底粘性会话
opencode 请求不带 session_id/conversation_id,导致粘性会话失效。现在按 header 优先、prompt_cache_key 兜底生成 session hash,并补充单测验证优先级。
This commit is contained in:
@@ -186,8 +186,8 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
// Generate session hash (from header for OpenAI)
|
||||
sessionHash := h.gatewayService.GenerateSessionHash(c)
|
||||
// Generate session hash (header first; fallback to prompt_cache_key)
|
||||
sessionHash := h.gatewayService.GenerateSessionHash(c, reqBody)
|
||||
|
||||
const maxAccountSwitches = 3
|
||||
switchCount := 0
|
||||
|
||||
Reference in New Issue
Block a user