From a4b2b9c935146408f930eafae8219dd1a39c472c Mon Sep 17 00:00:00 2001 From: "1808837298@qq.com" <1808837298@qq.com> Date: Wed, 19 Feb 2025 19:58:34 +0800 Subject: [PATCH] feat: Enhance Ollama channel support with additional request parameters #771 --- relay/channel/ollama/dto.go | 31 +++++++++++++++------------- relay/channel/ollama/relay-ollama.go | 3 +++ relay/common/relay_info.go | 2 +- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/relay/channel/ollama/dto.go b/relay/channel/ollama/dto.go index 08019115..a954c607 100644 --- a/relay/channel/ollama/dto.go +++ b/relay/channel/ollama/dto.go @@ -3,18 +3,21 @@ package ollama import "one-api/dto" type OllamaRequest struct { - Model string `json:"model,omitempty"` - Messages []dto.Message `json:"messages,omitempty"` - Stream bool `json:"stream,omitempty"` - Temperature *float64 `json:"temperature,omitempty"` - Seed float64 `json:"seed,omitempty"` - Topp float64 `json:"top_p,omitempty"` - TopK int `json:"top_k,omitempty"` - Stop any `json:"stop,omitempty"` - Tools []dto.ToolCall `json:"tools,omitempty"` - ResponseFormat any `json:"response_format,omitempty"` - FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` - PresencePenalty float64 `json:"presence_penalty,omitempty"` + Model string `json:"model,omitempty"` + Messages []dto.Message `json:"messages,omitempty"` + Stream bool `json:"stream,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + Seed float64 `json:"seed,omitempty"` + Topp float64 `json:"top_p,omitempty"` + TopK int `json:"top_k,omitempty"` + Stop any `json:"stop,omitempty"` + Tools []dto.ToolCall `json:"tools,omitempty"` + ResponseFormat any `json:"response_format,omitempty"` + FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` + PresencePenalty float64 `json:"presence_penalty,omitempty"` + Suffix any `json:"suffix,omitempty"` + StreamOptions *dto.StreamOptions `json:"stream_options,omitempty"` + Prompt any `json:"prompt,omitempty"` } type Options struct { @@ -35,7 +38,7 @@ type OllamaEmbeddingRequest struct { } type OllamaEmbeddingResponse struct { - Error string `json:"error,omitempty"` - Model string `json:"model"` + Error string `json:"error,omitempty"` + Model string `json:"model"` Embedding [][]float64 `json:"embeddings,omitempty"` } diff --git a/relay/channel/ollama/relay-ollama.go b/relay/channel/ollama/relay-ollama.go index 4ecdd19b..5a1d50c8 100644 --- a/relay/channel/ollama/relay-ollama.go +++ b/relay/channel/ollama/relay-ollama.go @@ -39,6 +39,9 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest { ResponseFormat: request.ResponseFormat, FrequencyPenalty: request.FrequencyPenalty, PresencePenalty: request.PresencePenalty, + Prompt: request.Prompt, + StreamOptions: request.StreamOptions, + Suffix: request.Suffix, } } diff --git a/relay/common/relay_info.go b/relay/common/relay_info.go index e1e3916d..007d17d6 100644 --- a/relay/common/relay_info.go +++ b/relay/common/relay_info.go @@ -113,7 +113,7 @@ func GenRelayInfo(c *gin.Context) *RelayInfo { if info.ChannelType == common.ChannelTypeOpenAI || info.ChannelType == common.ChannelTypeAnthropic || info.ChannelType == common.ChannelTypeAws || info.ChannelType == common.ChannelTypeGemini || info.ChannelType == common.ChannelCloudflare || info.ChannelType == common.ChannelTypeAzure || - info.ChannelType == common.ChannelTypeVolcEngine { + info.ChannelType == common.ChannelTypeVolcEngine || info.ChannelType == common.ChannelTypeOllama { info.SupportStreamOptions = true } return info