feat: add gemini tool_calls finish reason

This commit is contained in:
CalciumIon
2024-12-06 14:31:27 +08:00
parent 8c42ea19b9
commit 195ab1fdd5
8 changed files with 21 additions and 15 deletions

View File

@@ -0,0 +1,6 @@
package constant
var (
FinishReasonStop = "stop"
FinishReasonToolCalls = "tool_calls"
)

View File

@@ -9,8 +9,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
"strings"
"sync"
@@ -75,7 +75,7 @@ func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *dto.Cha
var choice dto.ChatCompletionsStreamResponseChoice
choice.Delta.SetContentString(baiduResponse.Result)
if baiduResponse.IsEnd {
choice.FinishReason = &relaycommon.StopFinishReason
choice.FinishReason = &constant.FinishReasonStop
}
response := dto.ChatCompletionsStreamResponse{
Id: baiduResponse.Id,

View File

@@ -8,6 +8,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
@@ -186,10 +187,11 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *dto.OpenAITextResp
Role: "assistant",
Content: content,
},
FinishReason: relaycommon.StopFinishReason,
FinishReason: constant.FinishReasonStop,
}
if len(candidate.Content.Parts) > 0 {
if candidate.Content.Parts[0].FunctionCall != nil {
choice.FinishReason = constant.FinishReasonToolCalls
choice.Message.ToolCalls = getToolCalls(&candidate)
} else {
choice.Message.SetStringContent(candidate.Content.Parts[0].Text)
@@ -262,7 +264,7 @@ func GeminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycom
}
}
response := service.GenerateStopResponse(id, createAt, info.UpstreamModelName, relaycommon.StopFinishReason)
response := service.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop)
service.ObjectData(c, response)
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens

View File

@@ -7,8 +7,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
)
@@ -63,7 +63,7 @@ func streamResponsePaLM2OpenAI(palmResponse *PaLMChatResponse) *dto.ChatCompleti
if len(palmResponse.Candidates) > 0 {
choice.Delta.SetContentString(palmResponse.Candidates[0].Content)
}
choice.FinishReason = &relaycommon.StopFinishReason
choice.FinishReason = &constant.FinishReasonStop
var response dto.ChatCompletionsStreamResponse
response.Object = "chat.completion.chunk"
response.Model = "palm2"

View File

@@ -12,8 +12,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
"strconv"
"strings"
@@ -81,7 +81,7 @@ func streamResponseTencent2OpenAI(TencentResponse *TencentChatResponse) *dto.Cha
var choice dto.ChatCompletionsStreamResponseChoice
choice.Delta.SetContentString(TencentResponse.Choices[0].Delta.Content)
if TencentResponse.Choices[0].FinishReason == "stop" {
choice.FinishReason = &relaycommon.StopFinishReason
choice.FinishReason = &constant.FinishReasonStop
}
response.Choices = append(response.Choices, choice)
}

View File

@@ -12,8 +12,8 @@ import (
"net/http"
"net/url"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
"strings"
"time"
@@ -67,7 +67,7 @@ func responseXunfei2OpenAI(response *XunfeiChatResponse) *dto.OpenAITextResponse
Role: "assistant",
Content: content,
},
FinishReason: relaycommon.StopFinishReason,
FinishReason: constant.FinishReasonStop,
}
fullTextResponse := dto.OpenAITextResponse{
Object: "chat.completion",
@@ -89,7 +89,7 @@ func streamResponseXunfei2OpenAI(xunfeiResponse *XunfeiChatResponse) *dto.ChatCo
var choice dto.ChatCompletionsStreamResponseChoice
choice.Delta.SetContentString(xunfeiResponse.Payload.Choices.Text[0].Content)
if xunfeiResponse.Payload.Choices.Status == 2 {
choice.FinishReason = &relaycommon.StopFinishReason
choice.FinishReason = &constant.FinishReasonStop
}
response := dto.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",

View File

@@ -8,8 +8,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
"strings"
"sync"
@@ -139,7 +139,7 @@ func streamResponseZhipu2OpenAI(zhipuResponse string) *dto.ChatCompletionsStream
func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*dto.ChatCompletionsStreamResponse, *dto.Usage) {
var choice dto.ChatCompletionsStreamResponseChoice
choice.Delta.SetContentString("")
choice.FinishReason = &relaycommon.StopFinishReason
choice.FinishReason = &constant.FinishReasonStop
response := dto.ChatCompletionsStreamResponse{
Id: zhipuResponse.RequestId,
Object: "chat.completion.chunk",

View File

@@ -10,8 +10,6 @@ import (
"strings"
)
var StopFinishReason = "stop"
func GetFullRequestURL(baseURL string, requestURL string, channelType int) string {
fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)