refactor: 更新请求转换逻辑,优化工具调用解析

This commit is contained in:
somnifex
2025-09-15 23:15:46 +08:00
parent fc38c480a1
commit 7d6ba52d85
4 changed files with 16 additions and 15 deletions

View File

@@ -32,7 +32,8 @@ func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayIn
openaiRequest.(*dto.GeneralOpenAIRequest).StreamOptions = &dto.StreamOptions{ openaiRequest.(*dto.GeneralOpenAIRequest).StreamOptions = &dto.StreamOptions{
IncludeUsage: true, IncludeUsage: true,
} }
return requestOpenAI2Ollama(c, openaiRequest.(*dto.GeneralOpenAIRequest)) // map to ollama chat request (Claude -> OpenAI -> Ollama chat)
return openAIChatToOllamaChat(c, openaiRequest.(*dto.GeneralOpenAIRequest))
} }
func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) { func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {

View File

@@ -2,7 +2,6 @@ package ollama
import ( import (
"encoding/json" "encoding/json"
"one-api/dto"
) )
// OllamaChatMessage represents a single chat message // OllamaChatMessage represents a single chat message

View File

@@ -101,18 +101,21 @@ func openAIChatToOllamaChat(c *gin.Context, r *dto.GeneralOpenAIRequest) (*Ollam
// history tool call result message // history tool call result message
if m.Role == "tool" && m.Name != nil { cm.ToolName = *m.Name } if m.Role == "tool" && m.Name != nil { cm.ToolName = *m.Name }
// tool calls from assistant previous message // tool calls from assistant previous message
if len(m.ToolCalls)>0 { if m.ToolCalls != nil && len(m.ToolCalls) > 0 {
calls := make([]OllamaToolCall,0,len(m.ToolCalls)) parsed := m.ParseToolCalls()
for _, tc := range m.ToolCalls { if len(parsed) > 0 {
var args interface{} calls := make([]OllamaToolCall,0,len(parsed))
if tc.Function.Arguments != "" { _ = json.Unmarshal([]byte(tc.Function.Arguments), &args) } for _, tc := range parsed {
oc := OllamaToolCall{} var args interface{}
oc.Function.Name = tc.Function.Name if tc.Function.Arguments != "" { _ = json.Unmarshal([]byte(tc.Function.Arguments), &args) }
if args==nil { args = map[string]any{} } if args==nil { args = map[string]any{} }
oc.Function.Arguments = args oc := OllamaToolCall{}
calls = append(calls, oc) oc.Function.Name = tc.Function.Name
oc.Function.Arguments = args
calls = append(calls, oc)
}
cm.ToolCalls = calls
} }
cm.ToolCalls = calls
} }
chatReq.Messages = append(chatReq.Messages, cm) chatReq.Messages = append(chatReq.Messages, cm)
} }
@@ -165,7 +168,6 @@ func requestOpenAI2Embeddings(r dto.EmbeddingRequest) *OllamaEmbeddingRequest {
opts := map[string]any{} opts := map[string]any{}
if r.Temperature != nil { opts["temperature"] = r.Temperature } if r.Temperature != nil { opts["temperature"] = r.Temperature }
if r.TopP != 0 { opts["top_p"] = r.TopP } if r.TopP != 0 { opts["top_p"] = r.TopP }
if r.TopK != 0 { opts["top_k"] = r.TopK }
if r.FrequencyPenalty != 0 { opts["frequency_penalty"] = r.FrequencyPenalty } if r.FrequencyPenalty != 0 { opts["frequency_penalty"] = r.FrequencyPenalty }
if r.PresencePenalty != 0 { opts["presence_penalty"] = r.PresencePenalty } if r.PresencePenalty != 0 { opts["presence_penalty"] = r.PresencePenalty }
if r.Seed != 0 { opts["seed"] = int(r.Seed) } if r.Seed != 0 { opts["seed"] = int(r.Seed) }

View File

@@ -87,7 +87,6 @@ func ollamaStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http
// delta content // delta content
var content string var content string
if chunk.Message != nil { content = chunk.Message.Content } else { content = chunk.Response } if chunk.Message != nil { content = chunk.Message.Content } else { content = chunk.Response }
if content != "" { aggregatedText.WriteString(content) }
delta := dto.ChatCompletionsStreamResponse{ delta := dto.ChatCompletionsStreamResponse{
Id: responseId, Id: responseId,
Object: "chat.completion.chunk", Object: "chat.completion.chunk",