Files
new-api-oiss/service/openaicompat/chat_to_responses.go
Seefs 71460cba15 feat: /v1/chat/completion -> /v1/response (#2629)
* feat: /v1/chat/completion -> /v1/response
2026-01-11 21:38:07 +08:00

263 lines
6.5 KiB
Go

package openaicompat
import (
"encoding/json"
"errors"
"fmt"
"strings"
"github.com/QuantumNous/new-api/common"
"github.com/QuantumNous/new-api/dto"
)
func normalizeChatImageURLToString(v any) any {
switch vv := v.(type) {
case string:
return vv
case map[string]any:
if url := common.Interface2String(vv["url"]); url != "" {
return url
}
return v
case dto.MessageImageUrl:
if vv.Url != "" {
return vv.Url
}
return v
case *dto.MessageImageUrl:
if vv != nil && vv.Url != "" {
return vv.Url
}
return v
default:
return v
}
}
func ChatCompletionsRequestToResponsesRequest(req *dto.GeneralOpenAIRequest) (*dto.OpenAIResponsesRequest, error) {
if req == nil {
return nil, errors.New("request is nil")
}
if req.Model == "" {
return nil, errors.New("model is required")
}
if req.N > 1 {
return nil, fmt.Errorf("n>1 is not supported in responses compatibility mode")
}
var instructionsParts []string
inputItems := make([]map[string]any, 0, len(req.Messages))
for _, msg := range req.Messages {
role := strings.TrimSpace(msg.Role)
if role == "" {
continue
}
// Prefer mapping system/developer messages into `instructions`.
if role == "system" || role == "developer" {
if msg.Content == nil {
continue
}
if msg.IsStringContent() {
if s := strings.TrimSpace(msg.StringContent()); s != "" {
instructionsParts = append(instructionsParts, s)
}
continue
}
parts := msg.ParseContent()
var sb strings.Builder
for _, part := range parts {
if part.Type == dto.ContentTypeText && strings.TrimSpace(part.Text) != "" {
if sb.Len() > 0 {
sb.WriteString("\n")
}
sb.WriteString(part.Text)
}
}
if s := strings.TrimSpace(sb.String()); s != "" {
instructionsParts = append(instructionsParts, s)
}
continue
}
item := map[string]any{
"role": role,
}
if msg.Content == nil {
item["content"] = ""
inputItems = append(inputItems, item)
continue
}
if msg.IsStringContent() {
item["content"] = msg.StringContent()
inputItems = append(inputItems, item)
continue
}
parts := msg.ParseContent()
contentParts := make([]map[string]any, 0, len(parts))
for _, part := range parts {
switch part.Type {
case dto.ContentTypeText:
contentParts = append(contentParts, map[string]any{
"type": "input_text",
"text": part.Text,
})
case dto.ContentTypeImageURL:
contentParts = append(contentParts, map[string]any{
"type": "input_image",
"image_url": normalizeChatImageURLToString(part.ImageUrl),
})
case dto.ContentTypeInputAudio:
contentParts = append(contentParts, map[string]any{
"type": "input_audio",
"input_audio": part.InputAudio,
})
case dto.ContentTypeFile:
contentParts = append(contentParts, map[string]any{
"type": "input_file",
"file": part.File,
})
case dto.ContentTypeVideoUrl:
contentParts = append(contentParts, map[string]any{
"type": "input_video",
"video_url": part.VideoUrl,
})
default:
// Best-effort: keep unknown parts as-is to avoid silently dropping context.
contentParts = append(contentParts, map[string]any{
"type": part.Type,
})
}
}
item["content"] = contentParts
inputItems = append(inputItems, item)
}
inputRaw, err := common.Marshal(inputItems)
if err != nil {
return nil, err
}
var instructionsRaw json.RawMessage
if len(instructionsParts) > 0 {
instructions := strings.Join(instructionsParts, "\n\n")
instructionsRaw, _ = common.Marshal(instructions)
}
var toolsRaw json.RawMessage
if req.Tools != nil {
tools := make([]map[string]any, 0, len(req.Tools))
for _, tool := range req.Tools {
switch tool.Type {
case "function":
tools = append(tools, map[string]any{
"type": "function",
"name": tool.Function.Name,
"description": tool.Function.Description,
"parameters": tool.Function.Parameters,
})
default:
// Best-effort: keep original tool shape for unknown types.
var m map[string]any
if b, err := common.Marshal(tool); err == nil {
_ = common.Unmarshal(b, &m)
}
if len(m) == 0 {
m = map[string]any{"type": tool.Type}
}
tools = append(tools, m)
}
}
toolsRaw, _ = common.Marshal(tools)
}
var toolChoiceRaw json.RawMessage
if req.ToolChoice != nil {
switch v := req.ToolChoice.(type) {
case string:
toolChoiceRaw, _ = common.Marshal(v)
default:
var m map[string]any
if b, err := common.Marshal(v); err == nil {
_ = common.Unmarshal(b, &m)
}
if m == nil {
toolChoiceRaw, _ = common.Marshal(v)
} else if t, _ := m["type"].(string); t == "function" {
// Chat: {"type":"function","function":{"name":"..."}}
// Responses: {"type":"function","name":"..."}
if name, ok := m["name"].(string); ok && name != "" {
toolChoiceRaw, _ = common.Marshal(map[string]any{
"type": "function",
"name": name,
})
} else if fn, ok := m["function"].(map[string]any); ok {
if name, ok := fn["name"].(string); ok && name != "" {
toolChoiceRaw, _ = common.Marshal(map[string]any{
"type": "function",
"name": name,
})
} else {
toolChoiceRaw, _ = common.Marshal(v)
}
} else {
toolChoiceRaw, _ = common.Marshal(v)
}
} else {
toolChoiceRaw, _ = common.Marshal(v)
}
}
}
var parallelToolCallsRaw json.RawMessage
if req.ParallelTooCalls != nil {
parallelToolCallsRaw, _ = common.Marshal(*req.ParallelTooCalls)
}
var textRaw json.RawMessage
if req.ResponseFormat != nil && req.ResponseFormat.Type != "" {
textRaw, _ = common.Marshal(map[string]any{
"format": req.ResponseFormat,
})
}
maxOutputTokens := req.MaxTokens
if req.MaxCompletionTokens > maxOutputTokens {
maxOutputTokens = req.MaxCompletionTokens
}
var topP *float64
if req.TopP != 0 {
topP = common.GetPointer(req.TopP)
}
out := &dto.OpenAIResponsesRequest{
Model: req.Model,
Input: inputRaw,
Instructions: instructionsRaw,
MaxOutputTokens: maxOutputTokens,
Stream: req.Stream,
Temperature: req.Temperature,
Text: textRaw,
ToolChoice: toolChoiceRaw,
Tools: toolsRaw,
TopP: topP,
User: req.User,
ParallelToolCalls: parallelToolCallsRaw,
Store: req.Store,
Metadata: req.Metadata,
}
if req.ReasoningEffort != "" && req.ReasoningEffort != "none" {
out.Reasoning = &dto.Reasoning{
Effort: req.ReasoningEffort,
}
}
return out, nil
}