feat: 完善格式抓换,修复gemini渠道和openai渠道在claude code中使用的问题
This commit is contained in:
@@ -835,6 +835,7 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *dto.GeminiChatResponse) (*d
|
||||
call.SetIndex(len(choice.Delta.ToolCalls))
|
||||
choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
|
||||
}
|
||||
|
||||
} else if part.Thought {
|
||||
isThought = true
|
||||
texts = append(texts, part.Text)
|
||||
@@ -895,6 +896,7 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
|
||||
responseText := strings.Builder{}
|
||||
var usage = &dto.Usage{}
|
||||
var imageCount int
|
||||
finishReason := constant.FinishReasonStop
|
||||
|
||||
helper.StreamScannerHandler(c, resp, info, func(data string) bool {
|
||||
var geminiResponse dto.GeminiChatResponse
|
||||
@@ -936,9 +938,21 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
|
||||
|
||||
if info.SendResponseCount == 0 {
|
||||
// send first response
|
||||
err = handleStream(c, info, helper.GenerateStartEmptyResponse(id, createAt, info.UpstreamModelName, nil))
|
||||
if err != nil {
|
||||
common.LogError(c, err.Error())
|
||||
emptyResponse := helper.GenerateStartEmptyResponse(id, createAt, info.UpstreamModelName, nil)
|
||||
if response.IsToolCall() {
|
||||
emptyResponse.Choices[0].Delta.ToolCalls = make([]dto.ToolCallResponse, 1)
|
||||
emptyResponse.Choices[0].Delta.ToolCalls[0] = *response.GetFirstToolCall()
|
||||
emptyResponse.Choices[0].Delta.ToolCalls[0].Function.Arguments = ""
|
||||
finishReason = constant.FinishReasonToolCalls
|
||||
err = handleStream(c, info, emptyResponse)
|
||||
if err != nil {
|
||||
common.LogError(c, err.Error())
|
||||
}
|
||||
|
||||
response.ClearToolCalls()
|
||||
if response.IsFinished() {
|
||||
response.Choices[0].FinishReason = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -947,7 +961,7 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
|
||||
common.LogError(c, err.Error())
|
||||
}
|
||||
if isStop {
|
||||
_ = handleStream(c, info, helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop))
|
||||
_ = handleStream(c, info, helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, finishReason))
|
||||
}
|
||||
return true
|
||||
})
|
||||
@@ -1026,13 +1040,26 @@ func GeminiChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.R
|
||||
}
|
||||
|
||||
fullTextResponse.Usage = usage
|
||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||
if err != nil {
|
||||
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
||||
|
||||
switch info.RelayFormat {
|
||||
case relaycommon.RelayFormatOpenAI:
|
||||
responseBody, err = common.Marshal(fullTextResponse)
|
||||
if err != nil {
|
||||
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
||||
}
|
||||
case relaycommon.RelayFormatClaude:
|
||||
claudeResp := service.ResponseOpenAI2Claude(fullTextResponse, info)
|
||||
claudeRespStr, err := common.Marshal(claudeResp)
|
||||
if err != nil {
|
||||
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
||||
}
|
||||
responseBody = claudeRespStr
|
||||
case relaycommon.RelayFormatGemini:
|
||||
break
|
||||
}
|
||||
c.Writer.Header().Set("Content-Type", "application/json")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
c.Writer.Write(jsonResponse)
|
||||
|
||||
common.IOCopyBytesGracefully(c, resp, responseBody)
|
||||
|
||||
return &usage, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -63,10 +63,26 @@ func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayIn
|
||||
//if !strings.Contains(request.Model, "claude") {
|
||||
// return nil, fmt.Errorf("you are using openai channel type with path /v1/messages, only claude model supported convert, but got %s", request.Model)
|
||||
//}
|
||||
//if common.DebugEnabled {
|
||||
// bodyBytes := []byte(common.GetJsonString(request))
|
||||
// err := os.WriteFile(fmt.Sprintf("claude_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644)
|
||||
// if err != nil {
|
||||
// println(fmt.Sprintf("failed to save request body to file: %v", err))
|
||||
// }
|
||||
//}
|
||||
aiRequest, err := service.ClaudeToOpenAIRequest(*request, info)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//if common.DebugEnabled {
|
||||
// println(fmt.Sprintf("convert claude to openai request result: %s", common.GetJsonString(aiRequest)))
|
||||
// // Save request body to file for debugging
|
||||
// bodyBytes := []byte(common.GetJsonString(aiRequest))
|
||||
// err = os.WriteFile(fmt.Sprintf("claude_to_openai_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644)
|
||||
// if err != nil {
|
||||
// println(fmt.Sprintf("failed to save request body to file: %v", err))
|
||||
// }
|
||||
//}
|
||||
if info.SupportStreamOptions && info.IsStream {
|
||||
aiRequest.StreamOptions = &dto.StreamOptions{
|
||||
IncludeUsage: true,
|
||||
|
||||
Reference in New Issue
Block a user