fix: add Think field to OllamaRequest and support extra parameters in GeneralOpenAIRequest. (close #1125

)
This commit is contained in:
CaIon
2025-07-22 12:06:21 +08:00
parent 4d0037a40c
commit 2402715492
3 changed files with 13 additions and 3 deletions

View File

@@ -62,6 +62,8 @@ type GeneralOpenAIRequest struct {
Reasoning json.RawMessage `json:"reasoning,omitempty"` Reasoning json.RawMessage `json:"reasoning,omitempty"`
// Ali Qwen Params // Ali Qwen Params
VlHighResolutionImages json.RawMessage `json:"vl_high_resolution_images,omitempty"` VlHighResolutionImages json.RawMessage `json:"vl_high_resolution_images,omitempty"`
// 用匿名参数接收额外参数例如ollama的think参数在此接收
Extra map[string]json.RawMessage `json:"-"`
} }
func (r *GeneralOpenAIRequest) ToMap() map[string]any { func (r *GeneralOpenAIRequest) ToMap() map[string]any {

View File

@@ -1,6 +1,9 @@
package ollama package ollama
import "one-api/dto" import (
"encoding/json"
"one-api/dto"
)
type OllamaRequest struct { type OllamaRequest struct {
Model string `json:"model,omitempty"` Model string `json:"model,omitempty"`
@@ -19,6 +22,7 @@ type OllamaRequest struct {
Suffix any `json:"suffix,omitempty"` Suffix any `json:"suffix,omitempty"`
StreamOptions *dto.StreamOptions `json:"stream_options,omitempty"` StreamOptions *dto.StreamOptions `json:"stream_options,omitempty"`
Prompt any `json:"prompt,omitempty"` Prompt any `json:"prompt,omitempty"`
Think json.RawMessage `json:"think,omitempty"`
} }
type Options struct { type Options struct {

View File

@@ -50,7 +50,7 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) (*OllamaRequest, err
} else { } else {
Stop, _ = request.Stop.([]string) Stop, _ = request.Stop.([]string)
} }
return &OllamaRequest{ ollamaRequest := &OllamaRequest{
Model: request.Model, Model: request.Model,
Messages: messages, Messages: messages,
Stream: request.Stream, Stream: request.Stream,
@@ -67,7 +67,11 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) (*OllamaRequest, err
Prompt: request.Prompt, Prompt: request.Prompt,
StreamOptions: request.StreamOptions, StreamOptions: request.StreamOptions,
Suffix: request.Suffix, Suffix: request.Suffix,
}, nil }
if think, ok := request.Extra["think"]; ok {
ollamaRequest.Think = think
}
return ollamaRequest, nil
} }
func requestOpenAI2Embeddings(request dto.EmbeddingRequest) *OllamaEmbeddingRequest { func requestOpenAI2Embeddings(request dto.EmbeddingRequest) *OllamaEmbeddingRequest {