fix: default summary = detailed fix ReasoningContent fix ReasoningContent fix ReasoningContent fix ReasoningContent Revert "fix ReasoningContent" This reverts commit 45a88f78b91ce2376bca68745d19374bb9e95e88. fix ReasoningContent fix ReasoningContent
358 lines
8.6 KiB
Go
358 lines
8.6 KiB
Go
package openaicompat
|
|
|
|
import (
|
|
"encoding/json"
|
|
"errors"
|
|
"fmt"
|
|
"strings"
|
|
|
|
"github.com/QuantumNous/new-api/common"
|
|
"github.com/QuantumNous/new-api/dto"
|
|
)
|
|
|
|
func normalizeChatImageURLToString(v any) any {
|
|
switch vv := v.(type) {
|
|
case string:
|
|
return vv
|
|
case map[string]any:
|
|
if url := common.Interface2String(vv["url"]); url != "" {
|
|
return url
|
|
}
|
|
return v
|
|
case dto.MessageImageUrl:
|
|
if vv.Url != "" {
|
|
return vv.Url
|
|
}
|
|
return v
|
|
case *dto.MessageImageUrl:
|
|
if vv != nil && vv.Url != "" {
|
|
return vv.Url
|
|
}
|
|
return v
|
|
default:
|
|
return v
|
|
}
|
|
}
|
|
|
|
func ChatCompletionsRequestToResponsesRequest(req *dto.GeneralOpenAIRequest) (*dto.OpenAIResponsesRequest, error) {
|
|
if req == nil {
|
|
return nil, errors.New("request is nil")
|
|
}
|
|
if req.Model == "" {
|
|
return nil, errors.New("model is required")
|
|
}
|
|
if req.N > 1 {
|
|
return nil, fmt.Errorf("n>1 is not supported in responses compatibility mode")
|
|
}
|
|
|
|
var instructionsParts []string
|
|
inputItems := make([]map[string]any, 0, len(req.Messages))
|
|
|
|
for _, msg := range req.Messages {
|
|
role := strings.TrimSpace(msg.Role)
|
|
if role == "" {
|
|
continue
|
|
}
|
|
|
|
if role == "tool" || role == "function" {
|
|
callID := strings.TrimSpace(msg.ToolCallId)
|
|
|
|
var output any
|
|
if msg.Content == nil {
|
|
output = ""
|
|
} else if msg.IsStringContent() {
|
|
output = msg.StringContent()
|
|
} else {
|
|
if b, err := common.Marshal(msg.Content); err == nil {
|
|
output = string(b)
|
|
} else {
|
|
output = fmt.Sprintf("%v", msg.Content)
|
|
}
|
|
}
|
|
|
|
if callID == "" {
|
|
inputItems = append(inputItems, map[string]any{
|
|
"role": "user",
|
|
"content": fmt.Sprintf("[tool_output_missing_call_id] %v", output),
|
|
})
|
|
continue
|
|
}
|
|
|
|
inputItems = append(inputItems, map[string]any{
|
|
"type": "function_call_output",
|
|
"call_id": callID,
|
|
"output": output,
|
|
})
|
|
continue
|
|
}
|
|
|
|
// Prefer mapping system/developer messages into `instructions`.
|
|
if role == "system" || role == "developer" {
|
|
if msg.Content == nil {
|
|
continue
|
|
}
|
|
if msg.IsStringContent() {
|
|
if s := strings.TrimSpace(msg.StringContent()); s != "" {
|
|
instructionsParts = append(instructionsParts, s)
|
|
}
|
|
continue
|
|
}
|
|
parts := msg.ParseContent()
|
|
var sb strings.Builder
|
|
for _, part := range parts {
|
|
if part.Type == dto.ContentTypeText && strings.TrimSpace(part.Text) != "" {
|
|
if sb.Len() > 0 {
|
|
sb.WriteString("\n")
|
|
}
|
|
sb.WriteString(part.Text)
|
|
}
|
|
}
|
|
if s := strings.TrimSpace(sb.String()); s != "" {
|
|
instructionsParts = append(instructionsParts, s)
|
|
}
|
|
continue
|
|
}
|
|
|
|
item := map[string]any{
|
|
"role": role,
|
|
}
|
|
|
|
if msg.Content == nil {
|
|
item["content"] = ""
|
|
inputItems = append(inputItems, item)
|
|
|
|
if role == "assistant" {
|
|
for _, tc := range msg.ParseToolCalls() {
|
|
if strings.TrimSpace(tc.ID) == "" {
|
|
continue
|
|
}
|
|
if tc.Type != "" && tc.Type != "function" {
|
|
continue
|
|
}
|
|
name := strings.TrimSpace(tc.Function.Name)
|
|
if name == "" {
|
|
continue
|
|
}
|
|
inputItems = append(inputItems, map[string]any{
|
|
"type": "function_call",
|
|
"call_id": tc.ID,
|
|
"name": name,
|
|
"arguments": tc.Function.Arguments,
|
|
})
|
|
}
|
|
}
|
|
continue
|
|
}
|
|
|
|
if msg.IsStringContent() {
|
|
item["content"] = msg.StringContent()
|
|
inputItems = append(inputItems, item)
|
|
|
|
if role == "assistant" {
|
|
for _, tc := range msg.ParseToolCalls() {
|
|
if strings.TrimSpace(tc.ID) == "" {
|
|
continue
|
|
}
|
|
if tc.Type != "" && tc.Type != "function" {
|
|
continue
|
|
}
|
|
name := strings.TrimSpace(tc.Function.Name)
|
|
if name == "" {
|
|
continue
|
|
}
|
|
inputItems = append(inputItems, map[string]any{
|
|
"type": "function_call",
|
|
"call_id": tc.ID,
|
|
"name": name,
|
|
"arguments": tc.Function.Arguments,
|
|
})
|
|
}
|
|
}
|
|
continue
|
|
}
|
|
|
|
parts := msg.ParseContent()
|
|
contentParts := make([]map[string]any, 0, len(parts))
|
|
for _, part := range parts {
|
|
switch part.Type {
|
|
case dto.ContentTypeText:
|
|
contentParts = append(contentParts, map[string]any{
|
|
"type": "input_text",
|
|
"text": part.Text,
|
|
})
|
|
case dto.ContentTypeImageURL:
|
|
contentParts = append(contentParts, map[string]any{
|
|
"type": "input_image",
|
|
"image_url": normalizeChatImageURLToString(part.ImageUrl),
|
|
})
|
|
case dto.ContentTypeInputAudio:
|
|
contentParts = append(contentParts, map[string]any{
|
|
"type": "input_audio",
|
|
"input_audio": part.InputAudio,
|
|
})
|
|
case dto.ContentTypeFile:
|
|
contentParts = append(contentParts, map[string]any{
|
|
"type": "input_file",
|
|
"file": part.File,
|
|
})
|
|
case dto.ContentTypeVideoUrl:
|
|
contentParts = append(contentParts, map[string]any{
|
|
"type": "input_video",
|
|
"video_url": part.VideoUrl,
|
|
})
|
|
default:
|
|
contentParts = append(contentParts, map[string]any{
|
|
"type": part.Type,
|
|
})
|
|
}
|
|
}
|
|
item["content"] = contentParts
|
|
inputItems = append(inputItems, item)
|
|
|
|
if role == "assistant" {
|
|
for _, tc := range msg.ParseToolCalls() {
|
|
if strings.TrimSpace(tc.ID) == "" {
|
|
continue
|
|
}
|
|
if tc.Type != "" && tc.Type != "function" {
|
|
continue
|
|
}
|
|
name := strings.TrimSpace(tc.Function.Name)
|
|
if name == "" {
|
|
continue
|
|
}
|
|
inputItems = append(inputItems, map[string]any{
|
|
"type": "function_call",
|
|
"call_id": tc.ID,
|
|
"name": name,
|
|
"arguments": tc.Function.Arguments,
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
inputRaw, err := common.Marshal(inputItems)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
var instructionsRaw json.RawMessage
|
|
if len(instructionsParts) > 0 {
|
|
instructions := strings.Join(instructionsParts, "\n\n")
|
|
instructionsRaw, _ = common.Marshal(instructions)
|
|
}
|
|
|
|
var toolsRaw json.RawMessage
|
|
if req.Tools != nil {
|
|
tools := make([]map[string]any, 0, len(req.Tools))
|
|
for _, tool := range req.Tools {
|
|
switch tool.Type {
|
|
case "function":
|
|
tools = append(tools, map[string]any{
|
|
"type": "function",
|
|
"name": tool.Function.Name,
|
|
"description": tool.Function.Description,
|
|
"parameters": tool.Function.Parameters,
|
|
})
|
|
default:
|
|
// Best-effort: keep original tool shape for unknown types.
|
|
var m map[string]any
|
|
if b, err := common.Marshal(tool); err == nil {
|
|
_ = common.Unmarshal(b, &m)
|
|
}
|
|
if len(m) == 0 {
|
|
m = map[string]any{"type": tool.Type}
|
|
}
|
|
tools = append(tools, m)
|
|
}
|
|
}
|
|
toolsRaw, _ = common.Marshal(tools)
|
|
}
|
|
|
|
var toolChoiceRaw json.RawMessage
|
|
if req.ToolChoice != nil {
|
|
switch v := req.ToolChoice.(type) {
|
|
case string:
|
|
toolChoiceRaw, _ = common.Marshal(v)
|
|
default:
|
|
var m map[string]any
|
|
if b, err := common.Marshal(v); err == nil {
|
|
_ = common.Unmarshal(b, &m)
|
|
}
|
|
if m == nil {
|
|
toolChoiceRaw, _ = common.Marshal(v)
|
|
} else if t, _ := m["type"].(string); t == "function" {
|
|
// Chat: {"type":"function","function":{"name":"..."}}
|
|
// Responses: {"type":"function","name":"..."}
|
|
if name, ok := m["name"].(string); ok && name != "" {
|
|
toolChoiceRaw, _ = common.Marshal(map[string]any{
|
|
"type": "function",
|
|
"name": name,
|
|
})
|
|
} else if fn, ok := m["function"].(map[string]any); ok {
|
|
if name, ok := fn["name"].(string); ok && name != "" {
|
|
toolChoiceRaw, _ = common.Marshal(map[string]any{
|
|
"type": "function",
|
|
"name": name,
|
|
})
|
|
} else {
|
|
toolChoiceRaw, _ = common.Marshal(v)
|
|
}
|
|
} else {
|
|
toolChoiceRaw, _ = common.Marshal(v)
|
|
}
|
|
} else {
|
|
toolChoiceRaw, _ = common.Marshal(v)
|
|
}
|
|
}
|
|
}
|
|
|
|
var parallelToolCallsRaw json.RawMessage
|
|
if req.ParallelTooCalls != nil {
|
|
parallelToolCallsRaw, _ = common.Marshal(*req.ParallelTooCalls)
|
|
}
|
|
|
|
var textRaw json.RawMessage
|
|
if req.ResponseFormat != nil && req.ResponseFormat.Type != "" {
|
|
textRaw, _ = common.Marshal(map[string]any{
|
|
"format": req.ResponseFormat,
|
|
})
|
|
}
|
|
|
|
maxOutputTokens := req.MaxTokens
|
|
if req.MaxCompletionTokens > maxOutputTokens {
|
|
maxOutputTokens = req.MaxCompletionTokens
|
|
}
|
|
|
|
var topP *float64
|
|
if req.TopP != 0 {
|
|
topP = common.GetPointer(req.TopP)
|
|
}
|
|
|
|
out := &dto.OpenAIResponsesRequest{
|
|
Model: req.Model,
|
|
Input: inputRaw,
|
|
Instructions: instructionsRaw,
|
|
MaxOutputTokens: maxOutputTokens,
|
|
Stream: req.Stream,
|
|
Temperature: req.Temperature,
|
|
Text: textRaw,
|
|
ToolChoice: toolChoiceRaw,
|
|
Tools: toolsRaw,
|
|
TopP: topP,
|
|
User: req.User,
|
|
ParallelToolCalls: parallelToolCallsRaw,
|
|
Store: req.Store,
|
|
Metadata: req.Metadata,
|
|
}
|
|
|
|
if req.ReasoningEffort != "" {
|
|
out.Reasoning = &dto.Reasoning{
|
|
Effort: req.ReasoningEffort,
|
|
Summary: "detailed",
|
|
}
|
|
}
|
|
|
|
return out, nil
|
|
}
|