From a9bfcb0daf19515a9b012114a25cb5ea5f748b91 Mon Sep 17 00:00:00 2001 From: "1808837298@qq.com" <1808837298@qq.com> Date: Sat, 8 Mar 2025 16:50:24 +0800 Subject: [PATCH] feat: Add prompt cache hit tokens support for DeepSeek channel #406 --- dto/openai_response.go | 1 + relay/channel/openai/relay-openai.go | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/dto/openai_response.go b/dto/openai_response.go index a1d728fe..9188fad7 100644 --- a/dto/openai_response.go +++ b/dto/openai_response.go @@ -166,6 +166,7 @@ type Usage struct { PromptTokens int `json:"prompt_tokens"` CompletionTokens int `json:"completion_tokens"` TotalTokens int `json:"total_tokens"` + PromptCacheHitTokens int `json:"prompt_cache_hit_tokens,omitempty"` PromptTokensDetails InputTokenDetails `json:"prompt_tokens_details"` CompletionTokenDetails OutputTokenDetails `json:"completion_tokens_details"` } diff --git a/relay/channel/openai/relay-openai.go b/relay/channel/openai/relay-openai.go index 223ddd3d..ffd36d3c 100644 --- a/relay/channel/openai/relay-openai.go +++ b/relay/channel/openai/relay-openai.go @@ -254,6 +254,12 @@ func OaiStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.Rel if !containStreamUsage { usage, _ = service.ResponseText2Usage(responseTextBuilder.String(), info.UpstreamModelName, info.PromptTokens) usage.CompletionTokens += toolCount * 7 + } else { + if info.ChannelType == common.ChannelTypeDeepSeek { + if usage.PromptCacheHitTokens != 0 { + usage.PromptTokensDetails.CachedTokens = usage.PromptCacheHitTokens + } + } } if info.ShouldIncludeUsage && !containStreamUsage {