Files
new-api/relay/channel/openai/adaptor.go
Calcium-Ion ef1e380bbc Merge pull request #1577 from nekohy/feats-better-adaptor-for-openrouter
Fix reasoning adaptor for openrouter
2025-08-15 16:19:24 +08:00

622 lines
20 KiB
Go
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

package openai
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/textproto"
"one-api/common"
"one-api/constant"
"one-api/dto"
"one-api/relay/channel"
"one-api/relay/channel/ai360"
"one-api/relay/channel/lingyiwanwu"
"one-api/relay/channel/minimax"
"one-api/relay/channel/openrouter"
"one-api/relay/channel/xinference"
relaycommon "one-api/relay/common"
"one-api/relay/common_handler"
relayconstant "one-api/relay/constant"
"one-api/service"
"one-api/types"
"path/filepath"
"strings"
"github.com/gin-gonic/gin"
)
type Adaptor struct {
ChannelType int
ResponseFormat string
}
// parseReasoningEffortFromModelSuffix 从模型名称中解析推理级别
// support OAI models: o1-mini/o3-mini/o4-mini/o1/o3 etc...
// minimal effort only available in gpt-5
func parseReasoningEffortFromModelSuffix(model string) (string, string) {
effortSuffixes := []string{"-high", "-minimal", "-low", "-medium"}
for _, suffix := range effortSuffixes {
if strings.HasSuffix(model, suffix) {
effort := strings.TrimPrefix(suffix, "-")
originModel := strings.TrimSuffix(model, suffix)
return effort, originModel
}
}
return "", model
}
func (a *Adaptor) ConvertGeminiRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeminiChatRequest) (any, error) {
// 使用 service.GeminiToOpenAIRequest 转换请求格式
openaiRequest, err := service.GeminiToOpenAIRequest(request, info)
if err != nil {
return nil, err
}
return a.ConvertOpenAIRequest(c, info, openaiRequest)
}
func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.ClaudeRequest) (any, error) {
//if !strings.Contains(request.Model, "claude") {
// return nil, fmt.Errorf("you are using openai channel type with path /v1/messages, only claude model supported convert, but got %s", request.Model)
//}
//if common.DebugEnabled {
// bodyBytes := []byte(common.GetJsonString(request))
// err := os.WriteFile(fmt.Sprintf("claude_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644)
// if err != nil {
// println(fmt.Sprintf("failed to save request body to file: %v", err))
// }
//}
aiRequest, err := service.ClaudeToOpenAIRequest(*request, info)
if err != nil {
return nil, err
}
//if common.DebugEnabled {
// println(fmt.Sprintf("convert claude to openai request result: %s", common.GetJsonString(aiRequest)))
// // Save request body to file for debugging
// bodyBytes := []byte(common.GetJsonString(aiRequest))
// err = os.WriteFile(fmt.Sprintf("claude_to_openai_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644)
// if err != nil {
// println(fmt.Sprintf("failed to save request body to file: %v", err))
// }
//}
if info.SupportStreamOptions && info.IsStream {
aiRequest.StreamOptions = &dto.StreamOptions{
IncludeUsage: true,
}
}
return a.ConvertOpenAIRequest(c, info, aiRequest)
}
func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
a.ChannelType = info.ChannelType
// initialize ThinkingContentInfo when thinking_to_content is enabled
if info.ChannelSetting.ThinkingToContent {
info.ThinkingContentInfo = relaycommon.ThinkingContentInfo{
IsFirstThinkingContent: true,
SendLastThinkingContent: false,
HasSentThinkingContent: false,
}
}
}
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
if info.RelayMode == relayconstant.RelayModeRealtime {
if strings.HasPrefix(info.ChannelBaseUrl, "https://") {
baseUrl := strings.TrimPrefix(info.ChannelBaseUrl, "https://")
baseUrl = "wss://" + baseUrl
info.ChannelBaseUrl = baseUrl
} else if strings.HasPrefix(info.ChannelBaseUrl, "http://") {
baseUrl := strings.TrimPrefix(info.ChannelBaseUrl, "http://")
baseUrl = "ws://" + baseUrl
info.ChannelBaseUrl = baseUrl
}
}
switch info.ChannelType {
case constant.ChannelTypeAzure:
apiVersion := info.ApiVersion
if apiVersion == "" {
apiVersion = constant.AzureDefaultAPIVersion
}
// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
requestURL := strings.Split(info.RequestURLPath, "?")[0]
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, apiVersion)
task := strings.TrimPrefix(requestURL, "/v1/")
if info.RelayFormat == types.RelayFormatClaude {
task = strings.TrimPrefix(task, "messages")
task = "chat/completions" + task
}
// 特殊处理 responses API
if info.RelayMode == relayconstant.RelayModeResponses {
responsesApiVersion := "preview"
subUrl := "/openai/v1/responses"
if strings.Contains(info.ChannelBaseUrl, "cognitiveservices.azure.com") {
subUrl = "/openai/responses"
responsesApiVersion = apiVersion
}
if info.ChannelOtherSettings.AzureResponsesVersion != "" {
responsesApiVersion = info.ChannelOtherSettings.AzureResponsesVersion
}
requestURL = fmt.Sprintf("%s?api-version=%s", subUrl, responsesApiVersion)
return relaycommon.GetFullRequestURL(info.ChannelBaseUrl, requestURL, info.ChannelType), nil
}
model_ := info.UpstreamModelName
// 2025年5月10日后创建的渠道不移除.
if info.ChannelCreateTime < constant.AzureNoRemoveDotTime {
model_ = strings.Replace(model_, ".", "", -1)
}
// https://github.com/songquanpeng/one-api/issues/67
requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
if info.RelayMode == relayconstant.RelayModeRealtime {
requestURL = fmt.Sprintf("/openai/realtime?deployment=%s&api-version=%s", model_, apiVersion)
}
return relaycommon.GetFullRequestURL(info.ChannelBaseUrl, requestURL, info.ChannelType), nil
case constant.ChannelTypeMiniMax:
return minimax.GetRequestURL(info)
case constant.ChannelTypeCustom:
url := info.ChannelBaseUrl
url = strings.Replace(url, "{model}", info.UpstreamModelName, -1)
return url, nil
default:
if info.RelayFormat == types.RelayFormatClaude || info.RelayFormat == types.RelayFormatGemini {
return fmt.Sprintf("%s/v1/chat/completions", info.ChannelBaseUrl), nil
}
return relaycommon.GetFullRequestURL(info.ChannelBaseUrl, info.RequestURLPath, info.ChannelType), nil
}
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, header *http.Header, info *relaycommon.RelayInfo) error {
channel.SetupApiRequestHeader(info, c, header)
if info.ChannelType == constant.ChannelTypeAzure {
header.Set("api-key", info.ApiKey)
return nil
}
if info.ChannelType == constant.ChannelTypeOpenAI && "" != info.Organization {
header.Set("OpenAI-Organization", info.Organization)
}
if info.RelayMode == relayconstant.RelayModeRealtime {
swp := c.Request.Header.Get("Sec-WebSocket-Protocol")
if swp != "" {
items := []string{
"realtime",
"openai-insecure-api-key." + info.ApiKey,
"openai-beta.realtime-v1",
}
header.Set("Sec-WebSocket-Protocol", strings.Join(items, ","))
//req.Header.Set("Sec-WebSocket-Key", c.Request.Header.Get("Sec-WebSocket-Key"))
//req.Header.Set("Sec-Websocket-Extensions", c.Request.Header.Get("Sec-Websocket-Extensions"))
//req.Header.Set("Sec-Websocket-Version", c.Request.Header.Get("Sec-Websocket-Version"))
} else {
header.Set("openai-beta", "realtime=v1")
header.Set("Authorization", "Bearer "+info.ApiKey)
}
} else {
header.Set("Authorization", "Bearer "+info.ApiKey)
}
if info.ChannelType == constant.ChannelTypeOpenRouter {
header.Set("HTTP-Referer", "https://www.newapi.ai")
header.Set("X-Title", "New API")
}
return nil
}
func (a *Adaptor) ConvertOpenAIRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
if info.ChannelType != constant.ChannelTypeOpenAI && info.ChannelType != constant.ChannelTypeAzure {
request.StreamOptions = nil
}
if info.ChannelType == constant.ChannelTypeOpenRouter {
if len(request.Usage) == 0 {
request.Usage = json.RawMessage(`{"include":true}`)
}
// 适配 OpenRouter 的 thinking 后缀
if strings.HasSuffix(info.UpstreamModelName, "-thinking") {
info.UpstreamModelName = strings.TrimSuffix(info.UpstreamModelName, "-thinking")
request.Model = info.UpstreamModelName
if len(request.Reasoning) == 0 {
reasoning := map[string]any{
"enabled": true,
}
if request.ReasoningEffort != "" && request.ReasoningEffort != "none" {
reasoning["effort"] = request.ReasoningEffort
}
marshal, err := common.Marshal(reasoning)
if err != nil {
return nil, fmt.Errorf("error marshalling reasoning: %w", err)
}
request.Reasoning = marshal
}
// 清空多余的ReasoningEffort
request.ReasoningEffort = ""
} else {
if len(request.Reasoning) == 0 {
// 适配 OpenAI 的 ReasoningEffort 格式
if request.ReasoningEffort != "" {
reasoning := map[string]any{
"enabled": true,
}
if request.ReasoningEffort != "none" {
reasoning["effort"] = request.ReasoningEffort
marshal, err := common.Marshal(reasoning)
if err != nil {
return nil, fmt.Errorf("error marshalling reasoning: %w", err)
}
request.Reasoning = marshal
}
}
}
request.ReasoningEffort = ""
}
// https://docs.anthropic.com/en/api/openai-sdk#extended-thinking-support
// 没有做排除3.5Haiku等,要出问题再加吧,最佳兼容性(不是
if request.THINKING != nil && strings.HasPrefix(info.UpstreamModelName, "anthropic") {
var thinking dto.Thinking // Claude标准Thinking格式
if err := json.Unmarshal(request.THINKING, &thinking); err != nil {
return nil, fmt.Errorf("error Unmarshal thinking: %w", err)
}
// 只有当 thinking.Type 是 "enabled" 时才处理
if thinking.Type == "enabled" {
// 检查 BudgetTokens 是否为 nil
if thinking.BudgetTokens == nil {
return nil, fmt.Errorf("BudgetTokens is nil when thinking is enabled")
}
reasoning := openrouter.RequestReasoning{
MaxTokens: *thinking.BudgetTokens,
}
marshal, err := common.Marshal(reasoning)
if err != nil {
return nil, fmt.Errorf("error marshalling reasoning: %w", err)
}
request.Reasoning = marshal
}
// 清空 THINKING
request.THINKING = nil
}
}
if strings.HasPrefix(info.UpstreamModelName, "o") || strings.HasPrefix(info.UpstreamModelName, "gpt-5") {
if request.MaxCompletionTokens == 0 && request.MaxTokens != 0 {
request.MaxCompletionTokens = request.MaxTokens
request.MaxTokens = 0
}
if strings.HasPrefix(info.UpstreamModelName, "o") {
request.Temperature = nil
}
if strings.HasPrefix(info.UpstreamModelName, "gpt-5") {
if info.UpstreamModelName != "gpt-5-chat-latest" {
request.Temperature = nil
}
}
// 转换模型推理力度后缀
effort, originModel := parseReasoningEffortFromModelSuffix(info.UpstreamModelName)
if effort != "" {
request.ReasoningEffort = effort
info.UpstreamModelName = originModel
request.Model = originModel
}
info.ReasoningEffort = request.ReasoningEffort
// o系列模型developer适配o1-mini除外
if !strings.HasPrefix(info.UpstreamModelName, "o1-mini") && !strings.HasPrefix(info.UpstreamModelName, "o1-preview") {
//修改第一个Message的内容将system改为developer
if len(request.Messages) > 0 && request.Messages[0].Role == "system" {
request.Messages[0].Role = "developer"
}
}
}
return request, nil
}
func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) {
return request, nil
}
func (a *Adaptor) ConvertEmbeddingRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.EmbeddingRequest) (any, error) {
return request, nil
}
func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
a.ResponseFormat = request.ResponseFormat
if info.RelayMode == relayconstant.RelayModeAudioSpeech {
jsonData, err := json.Marshal(request)
if err != nil {
return nil, fmt.Errorf("error marshalling object: %w", err)
}
return bytes.NewReader(jsonData), nil
} else {
var requestBody bytes.Buffer
writer := multipart.NewWriter(&requestBody)
writer.WriteField("model", request.Model)
// 获取所有表单字段
formData := c.Request.PostForm
// 遍历表单字段并打印输出
for key, values := range formData {
if key == "model" {
continue
}
for _, value := range values {
writer.WriteField(key, value)
}
}
// 添加文件字段
file, header, err := c.Request.FormFile("file")
if err != nil {
return nil, errors.New("file is required")
}
defer file.Close()
part, err := writer.CreateFormFile("file", header.Filename)
if err != nil {
return nil, errors.New("create form file failed")
}
if _, err := io.Copy(part, file); err != nil {
return nil, errors.New("copy file failed")
}
// 关闭 multipart 编写器以设置分界线
writer.Close()
c.Request.Header.Set("Content-Type", writer.FormDataContentType())
return &requestBody, nil
}
}
func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
switch info.RelayMode {
case relayconstant.RelayModeImagesEdits:
var requestBody bytes.Buffer
writer := multipart.NewWriter(&requestBody)
writer.WriteField("model", request.Model)
// 使用已解析的 multipart 表单,避免重复解析
mf := c.Request.MultipartForm
if mf == nil {
if _, err := c.MultipartForm(); err != nil {
return nil, errors.New("failed to parse multipart form")
}
mf = c.Request.MultipartForm
}
// 写入所有非文件字段
if mf != nil {
for key, values := range mf.Value {
if key == "model" {
continue
}
for _, value := range values {
writer.WriteField(key, value)
}
}
}
if mf != nil && mf.File != nil {
// Check if "image" field exists in any form, including array notation
var imageFiles []*multipart.FileHeader
var exists bool
// First check for standard "image" field
if imageFiles, exists = mf.File["image"]; !exists || len(imageFiles) == 0 {
// If not found, check for "image[]" field
if imageFiles, exists = mf.File["image[]"]; !exists || len(imageFiles) == 0 {
// If still not found, iterate through all fields to find any that start with "image["
foundArrayImages := false
for fieldName, files := range mf.File {
if strings.HasPrefix(fieldName, "image[") && len(files) > 0 {
foundArrayImages = true
imageFiles = append(imageFiles, files...)
}
}
// If no image fields found at all
if !foundArrayImages && (len(imageFiles) == 0) {
return nil, errors.New("image is required")
}
}
}
// Process all image files
for i, fileHeader := range imageFiles {
file, err := fileHeader.Open()
if err != nil {
return nil, fmt.Errorf("failed to open image file %d: %w", i, err)
}
// If multiple images, use image[] as the field name
fieldName := "image"
if len(imageFiles) > 1 {
fieldName = "image[]"
}
// Determine MIME type based on file extension
mimeType := detectImageMimeType(fileHeader.Filename)
// Create a form file with the appropriate content type
h := make(textproto.MIMEHeader)
h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, fieldName, fileHeader.Filename))
h.Set("Content-Type", mimeType)
part, err := writer.CreatePart(h)
if err != nil {
return nil, fmt.Errorf("create form part failed for image %d: %w", i, err)
}
if _, err := io.Copy(part, file); err != nil {
return nil, fmt.Errorf("copy file failed for image %d: %w", i, err)
}
// 复制完立即关闭,避免在循环内使用 defer 占用资源
_ = file.Close()
}
// Handle mask file if present
if maskFiles, exists := mf.File["mask"]; exists && len(maskFiles) > 0 {
maskFile, err := maskFiles[0].Open()
if err != nil {
return nil, errors.New("failed to open mask file")
}
// 复制完立即关闭,避免在循环内使用 defer 占用资源
// Determine MIME type for mask file
mimeType := detectImageMimeType(maskFiles[0].Filename)
// Create a form file with the appropriate content type
h := make(textproto.MIMEHeader)
h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="mask"; filename="%s"`, maskFiles[0].Filename))
h.Set("Content-Type", mimeType)
maskPart, err := writer.CreatePart(h)
if err != nil {
return nil, errors.New("create form file failed for mask")
}
if _, err := io.Copy(maskPart, maskFile); err != nil {
return nil, errors.New("copy mask file failed")
}
_ = maskFile.Close()
}
} else {
return nil, errors.New("no multipart form data found")
}
// 关闭 multipart 编写器以设置分界线
writer.Close()
c.Request.Header.Set("Content-Type", writer.FormDataContentType())
return &requestBody, nil
default:
return request, nil
}
}
// detectImageMimeType determines the MIME type based on the file extension
func detectImageMimeType(filename string) string {
ext := strings.ToLower(filepath.Ext(filename))
switch ext {
case ".jpg", ".jpeg":
return "image/jpeg"
case ".png":
return "image/png"
case ".webp":
return "image/webp"
default:
// Try to detect from extension if possible
if strings.HasPrefix(ext, ".jp") {
return "image/jpeg"
}
// Default to png as a fallback
return "image/png"
}
}
func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) {
// 转换模型推理力度后缀
effort, originModel := parseReasoningEffortFromModelSuffix(request.Model)
if effort != "" {
request.Reasoning.Effort = effort
request.Model = originModel
}
return request, nil
}
func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (any, error) {
if info.RelayMode == relayconstant.RelayModeAudioTranscription ||
info.RelayMode == relayconstant.RelayModeAudioTranslation ||
info.RelayMode == relayconstant.RelayModeImagesEdits {
return channel.DoFormRequest(a, c, info, requestBody)
} else if info.RelayMode == relayconstant.RelayModeRealtime {
return channel.DoWssRequest(a, c, info, requestBody)
} else {
return channel.DoApiRequest(a, c, info, requestBody)
}
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *types.NewAPIError) {
switch info.RelayMode {
case relayconstant.RelayModeRealtime:
err, usage = OpenaiRealtimeHandler(c, info)
case relayconstant.RelayModeAudioSpeech:
usage = OpenaiTTSHandler(c, resp, info)
case relayconstant.RelayModeAudioTranslation:
fallthrough
case relayconstant.RelayModeAudioTranscription:
err, usage = OpenaiSTTHandler(c, resp, info, a.ResponseFormat)
case relayconstant.RelayModeImagesGenerations, relayconstant.RelayModeImagesEdits:
usage, err = OpenaiHandlerWithUsage(c, info, resp)
case relayconstant.RelayModeRerank:
usage, err = common_handler.RerankHandler(c, info, resp)
case relayconstant.RelayModeResponses:
if info.IsStream {
usage, err = OaiResponsesStreamHandler(c, info, resp)
} else {
usage, err = OaiResponsesHandler(c, info, resp)
}
default:
if info.IsStream {
usage, err = OaiStreamHandler(c, info, resp)
} else {
usage, err = OpenaiHandler(c, info, resp)
}
}
return
}
func (a *Adaptor) GetModelList() []string {
switch a.ChannelType {
case constant.ChannelType360:
return ai360.ModelList
case constant.ChannelTypeLingYiWanWu:
return lingyiwanwu.ModelList
case constant.ChannelTypeMiniMax:
return minimax.ModelList
case constant.ChannelTypeXinference:
return xinference.ModelList
case constant.ChannelTypeOpenRouter:
return openrouter.ModelList
default:
return ModelList
}
}
func (a *Adaptor) GetChannelName() string {
switch a.ChannelType {
case constant.ChannelType360:
return ai360.ChannelName
case constant.ChannelTypeLingYiWanWu:
return lingyiwanwu.ChannelName
case constant.ChannelTypeMiniMax:
return minimax.ChannelName
case constant.ChannelTypeXinference:
return xinference.ChannelName
case constant.ChannelTypeOpenRouter:
return openrouter.ChannelName
default:
return ChannelName
}
}