From 8518ca65e283430402c61a5c12dd958a4f93c253 Mon Sep 17 00:00:00 2001 From: "1808837298@qq.com" <1808837298@qq.com> Date: Mon, 6 Jan 2025 17:52:33 +0800 Subject: [PATCH] Adjust streaming timeout for OpenAI models in OaiStreamHandler - Implemented conditional logic to double the streaming timeout for models starting with "o1" or "o3". - Improved handling of streaming timeout configuration to enhance performance based on model type. --- relay/channel/openai/relay-openai.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/relay/channel/openai/relay-openai.go b/relay/channel/openai/relay-openai.go index bd39b904..d8b1aef3 100644 --- a/relay/channel/openai/relay-openai.go +++ b/relay/channel/openai/relay-openai.go @@ -65,8 +65,12 @@ func OaiStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.Rel scanner.Split(bufio.ScanLines) service.SetEventStreamHeaders(c) - - ticker := time.NewTicker(time.Duration(constant.StreamingTimeout) * time.Second) + streamingTimeout := time.Duration(constant.StreamingTimeout) * time.Second + if strings.HasPrefix(info.UpstreamModelName, "o1") || strings.HasPrefix(info.UpstreamModelName, "o3") { + // twice timeout for o1 model + streamingTimeout *= 2 + } + ticker := time.NewTicker(streamingTimeout) defer ticker.Stop() stopChan := make(chan bool)