package openai import ( "bytes" "encoding/json" "errors" "fmt" "io" "mime/multipart" "net/http" "net/textproto" "one-api/common" "one-api/constant" "one-api/dto" "one-api/relay/channel" "one-api/relay/channel/ai360" "one-api/relay/channel/lingyiwanwu" "one-api/relay/channel/minimax" "one-api/relay/channel/moonshot" "one-api/relay/channel/openrouter" "one-api/relay/channel/xinference" relaycommon "one-api/relay/common" "one-api/relay/common_handler" relayconstant "one-api/relay/constant" "one-api/service" "one-api/types" "path/filepath" "strings" "github.com/gin-gonic/gin" ) type Adaptor struct { ChannelType int ResponseFormat string } // parseReasoningEffortFromModelSuffix 从模型名称中解析推理级别 // support OAI models: o1-mini/o3-mini/o4-mini/o1/o3 etc... // minimal effort only available in gpt-5 func parseReasoningEffortFromModelSuffix(model string) (string, string) { effortSuffixes := []string{"-high", "-minimal", "-low", "-medium"} for _, suffix := range effortSuffixes { if strings.HasSuffix(model, suffix) { effort := strings.TrimPrefix(suffix, "-") originModel := strings.TrimSuffix(model, suffix) return effort, originModel } } return "", model } func (a *Adaptor) ConvertGeminiRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeminiChatRequest) (any, error) { // 使用 service.GeminiToOpenAIRequest 转换请求格式 openaiRequest, err := service.GeminiToOpenAIRequest(request, info) if err != nil { return nil, err } return a.ConvertOpenAIRequest(c, info, openaiRequest) } func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.ClaudeRequest) (any, error) { //if !strings.Contains(request.Model, "claude") { // return nil, fmt.Errorf("you are using openai channel type with path /v1/messages, only claude model supported convert, but got %s", request.Model) //} //if common.DebugEnabled { // bodyBytes := []byte(common.GetJsonString(request)) // err := os.WriteFile(fmt.Sprintf("claude_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644) // if err != nil { // println(fmt.Sprintf("failed to save request body to file: %v", err)) // } //} aiRequest, err := service.ClaudeToOpenAIRequest(*request, info) if err != nil { return nil, err } //if common.DebugEnabled { // println(fmt.Sprintf("convert claude to openai request result: %s", common.GetJsonString(aiRequest))) // // Save request body to file for debugging // bodyBytes := []byte(common.GetJsonString(aiRequest)) // err = os.WriteFile(fmt.Sprintf("claude_to_openai_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644) // if err != nil { // println(fmt.Sprintf("failed to save request body to file: %v", err)) // } //} if info.SupportStreamOptions && info.IsStream { aiRequest.StreamOptions = &dto.StreamOptions{ IncludeUsage: true, } } return a.ConvertOpenAIRequest(c, info, aiRequest) } func (a *Adaptor) Init(info *relaycommon.RelayInfo) { a.ChannelType = info.ChannelType // initialize ThinkingContentInfo when thinking_to_content is enabled if info.ChannelSetting.ThinkingToContent { info.ThinkingContentInfo = relaycommon.ThinkingContentInfo{ IsFirstThinkingContent: true, SendLastThinkingContent: false, HasSentThinkingContent: false, } } } func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) { if info.RelayMode == relayconstant.RelayModeRealtime { if strings.HasPrefix(info.BaseUrl, "https://") { baseUrl := strings.TrimPrefix(info.BaseUrl, "https://") baseUrl = "wss://" + baseUrl info.BaseUrl = baseUrl } else if strings.HasPrefix(info.BaseUrl, "http://") { baseUrl := strings.TrimPrefix(info.BaseUrl, "http://") baseUrl = "ws://" + baseUrl info.BaseUrl = baseUrl } } switch info.ChannelType { case constant.ChannelTypeAzure: apiVersion := info.ApiVersion if apiVersion == "" { apiVersion = constant.AzureDefaultAPIVersion } // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api requestURL := strings.Split(info.RequestURLPath, "?")[0] requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, apiVersion) task := strings.TrimPrefix(requestURL, "/v1/") // 特殊处理 responses API if info.RelayMode == relayconstant.RelayModeResponses { requestURL = fmt.Sprintf("/openai/v1/responses?api-version=preview") return relaycommon.GetFullRequestURL(info.BaseUrl, requestURL, info.ChannelType), nil } model_ := info.UpstreamModelName // 2025年5月10日后创建的渠道不移除. if info.ChannelCreateTime < constant.AzureNoRemoveDotTime { model_ = strings.Replace(model_, ".", "", -1) } // https://github.com/songquanpeng/one-api/issues/67 requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task) if info.RelayMode == relayconstant.RelayModeRealtime { requestURL = fmt.Sprintf("/openai/realtime?deployment=%s&api-version=%s", model_, apiVersion) } return relaycommon.GetFullRequestURL(info.BaseUrl, requestURL, info.ChannelType), nil case constant.ChannelTypeMiniMax: return minimax.GetRequestURL(info) case constant.ChannelTypeCustom: url := info.BaseUrl url = strings.Replace(url, "{model}", info.UpstreamModelName, -1) return url, nil default: if info.RelayFormat == relaycommon.RelayFormatClaude || info.RelayFormat == relaycommon.RelayFormatGemini { return fmt.Sprintf("%s/v1/chat/completions", info.BaseUrl), nil } return relaycommon.GetFullRequestURL(info.BaseUrl, info.RequestURLPath, info.ChannelType), nil } } func (a *Adaptor) SetupRequestHeader(c *gin.Context, header *http.Header, info *relaycommon.RelayInfo) error { channel.SetupApiRequestHeader(info, c, header) if info.ChannelType == constant.ChannelTypeAzure { header.Set("api-key", info.ApiKey) return nil } if info.ChannelType == constant.ChannelTypeOpenAI && "" != info.Organization { header.Set("OpenAI-Organization", info.Organization) } if info.RelayMode == relayconstant.RelayModeRealtime { swp := c.Request.Header.Get("Sec-WebSocket-Protocol") if swp != "" { items := []string{ "realtime", "openai-insecure-api-key." + info.ApiKey, "openai-beta.realtime-v1", } header.Set("Sec-WebSocket-Protocol", strings.Join(items, ",")) //req.Header.Set("Sec-WebSocket-Key", c.Request.Header.Get("Sec-WebSocket-Key")) //req.Header.Set("Sec-Websocket-Extensions", c.Request.Header.Get("Sec-Websocket-Extensions")) //req.Header.Set("Sec-Websocket-Version", c.Request.Header.Get("Sec-Websocket-Version")) } else { header.Set("openai-beta", "realtime=v1") header.Set("Authorization", "Bearer "+info.ApiKey) } } else { header.Set("Authorization", "Bearer "+info.ApiKey) } if info.ChannelType == constant.ChannelTypeOpenRouter { header.Set("HTTP-Referer", "https://www.newapi.ai") header.Set("X-Title", "New API") } return nil } func (a *Adaptor) ConvertOpenAIRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) { if request == nil { return nil, errors.New("request is nil") } if info.ChannelType != constant.ChannelTypeOpenAI && info.ChannelType != constant.ChannelTypeAzure { request.StreamOptions = nil } if info.ChannelType == constant.ChannelTypeOpenRouter { if len(request.Usage) == 0 { request.Usage = json.RawMessage(`{"include":true}`) } // 适配 OpenRouter 的 thinking 后缀 if strings.HasSuffix(info.UpstreamModelName, "-thinking") { info.UpstreamModelName = strings.TrimSuffix(info.UpstreamModelName, "-thinking") request.Model = info.UpstreamModelName if len(request.Reasoning) == 0 { reasoning := map[string]any{ "enabled": true, } if request.ReasoningEffort != "" && request.ReasoningEffort != "none" { reasoning["effort"] = request.ReasoningEffort } marshal, err := common.Marshal(reasoning) if err != nil { return nil, fmt.Errorf("error marshalling reasoning: %w", err) } request.Reasoning = marshal } } else { if len(request.Reasoning) == 0 { // 适配 OpenAI 的 ReasoningEffort 格式 if request.ReasoningEffort != "" { reasoning := map[string]any{ "enabled": true, } if request.ReasoningEffort != "none" { reasoning["effort"] = request.ReasoningEffort marshal, err := common.Marshal(reasoning) if err != nil { return nil, fmt.Errorf("error marshalling reasoning: %w", err) } request.Reasoning = marshal } } } } } if strings.HasPrefix(request.Model, "o") || strings.HasPrefix(request.Model, "gpt-5") { if request.MaxCompletionTokens == 0 && request.MaxTokens != 0 { request.MaxCompletionTokens = request.MaxTokens request.MaxTokens = 0 } if strings.HasPrefix(request.Model, "o") { request.Temperature = nil } if strings.HasPrefix(request.Model, "gpt-5") { if request.Model != "gpt-5-chat-latest" { request.Temperature = nil } } // 转换模型推理力度后缀 effort, originModel := parseReasoningEffortFromModelSuffix(request.Model) if effort != "" { request.ReasoningEffort = effort request.Model = originModel } info.ReasoningEffort = request.ReasoningEffort info.UpstreamModelName = request.Model // o系列模型developer适配(o1-mini除外) if !strings.HasPrefix(request.Model, "o1-mini") && !strings.HasPrefix(request.Model, "o1-preview") { //修改第一个Message的内容,将system改为developer if len(request.Messages) > 0 && request.Messages[0].Role == "system" { request.Messages[0].Role = "developer" } } } return request, nil } func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) { return request, nil } func (a *Adaptor) ConvertEmbeddingRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.EmbeddingRequest) (any, error) { return request, nil } func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) { a.ResponseFormat = request.ResponseFormat if info.RelayMode == relayconstant.RelayModeAudioSpeech { jsonData, err := json.Marshal(request) if err != nil { return nil, fmt.Errorf("error marshalling object: %w", err) } return bytes.NewReader(jsonData), nil } else { var requestBody bytes.Buffer writer := multipart.NewWriter(&requestBody) writer.WriteField("model", request.Model) // 获取所有表单字段 formData := c.Request.PostForm // 遍历表单字段并打印输出 for key, values := range formData { if key == "model" { continue } for _, value := range values { writer.WriteField(key, value) } } // 添加文件字段 file, header, err := c.Request.FormFile("file") if err != nil { return nil, errors.New("file is required") } defer file.Close() part, err := writer.CreateFormFile("file", header.Filename) if err != nil { return nil, errors.New("create form file failed") } if _, err := io.Copy(part, file); err != nil { return nil, errors.New("copy file failed") } // 关闭 multipart 编写器以设置分界线 writer.Close() c.Request.Header.Set("Content-Type", writer.FormDataContentType()) return &requestBody, nil } } func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) { switch info.RelayMode { case relayconstant.RelayModeImagesEdits: var requestBody bytes.Buffer writer := multipart.NewWriter(&requestBody) writer.WriteField("model", request.Model) // 获取所有表单字段 formData := c.Request.PostForm // 遍历表单字段并打印输出 for key, values := range formData { if key == "model" { continue } for _, value := range values { writer.WriteField(key, value) } } // Parse the multipart form to handle both single image and multiple images if err := c.Request.ParseMultipartForm(32 << 20); err != nil { // 32MB max memory return nil, errors.New("failed to parse multipart form") } if c.Request.MultipartForm != nil && c.Request.MultipartForm.File != nil { // Check if "image" field exists in any form, including array notation var imageFiles []*multipart.FileHeader var exists bool // First check for standard "image" field if imageFiles, exists = c.Request.MultipartForm.File["image"]; !exists || len(imageFiles) == 0 { // If not found, check for "image[]" field if imageFiles, exists = c.Request.MultipartForm.File["image[]"]; !exists || len(imageFiles) == 0 { // If still not found, iterate through all fields to find any that start with "image[" foundArrayImages := false for fieldName, files := range c.Request.MultipartForm.File { if strings.HasPrefix(fieldName, "image[") && len(files) > 0 { foundArrayImages = true for _, file := range files { imageFiles = append(imageFiles, file) } } } // If no image fields found at all if !foundArrayImages && (len(imageFiles) == 0) { return nil, errors.New("image is required") } } } // Process all image files for i, fileHeader := range imageFiles { file, err := fileHeader.Open() if err != nil { return nil, fmt.Errorf("failed to open image file %d: %w", i, err) } defer file.Close() // If multiple images, use image[] as the field name fieldName := "image" if len(imageFiles) > 1 { fieldName = "image[]" } // Determine MIME type based on file extension mimeType := detectImageMimeType(fileHeader.Filename) // Create a form file with the appropriate content type h := make(textproto.MIMEHeader) h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, fieldName, fileHeader.Filename)) h.Set("Content-Type", mimeType) part, err := writer.CreatePart(h) if err != nil { return nil, fmt.Errorf("create form part failed for image %d: %w", i, err) } if _, err := io.Copy(part, file); err != nil { return nil, fmt.Errorf("copy file failed for image %d: %w", i, err) } } // Handle mask file if present if maskFiles, exists := c.Request.MultipartForm.File["mask"]; exists && len(maskFiles) > 0 { maskFile, err := maskFiles[0].Open() if err != nil { return nil, errors.New("failed to open mask file") } defer maskFile.Close() // Determine MIME type for mask file mimeType := detectImageMimeType(maskFiles[0].Filename) // Create a form file with the appropriate content type h := make(textproto.MIMEHeader) h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="mask"; filename="%s"`, maskFiles[0].Filename)) h.Set("Content-Type", mimeType) maskPart, err := writer.CreatePart(h) if err != nil { return nil, errors.New("create form file failed for mask") } if _, err := io.Copy(maskPart, maskFile); err != nil { return nil, errors.New("copy mask file failed") } } } else { return nil, errors.New("no multipart form data found") } // 关闭 multipart 编写器以设置分界线 writer.Close() c.Request.Header.Set("Content-Type", writer.FormDataContentType()) return bytes.NewReader(requestBody.Bytes()), nil default: return request, nil } } // detectImageMimeType determines the MIME type based on the file extension func detectImageMimeType(filename string) string { ext := strings.ToLower(filepath.Ext(filename)) switch ext { case ".jpg", ".jpeg": return "image/jpeg" case ".png": return "image/png" case ".webp": return "image/webp" default: // Try to detect from extension if possible if strings.HasPrefix(ext, ".jp") { return "image/jpeg" } // Default to png as a fallback return "image/png" } } func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) { // 转换模型推理力度后缀 effort, originModel := parseReasoningEffortFromModelSuffix(request.Model) if effort != "" { request.Reasoning.Effort = effort request.Model = originModel } return request, nil } func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (any, error) { if info.RelayMode == relayconstant.RelayModeAudioTranscription || info.RelayMode == relayconstant.RelayModeAudioTranslation || info.RelayMode == relayconstant.RelayModeImagesEdits { return channel.DoFormRequest(a, c, info, requestBody) } else if info.RelayMode == relayconstant.RelayModeRealtime { return channel.DoWssRequest(a, c, info, requestBody) } else { return channel.DoApiRequest(a, c, info, requestBody) } } func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *types.NewAPIError) { switch info.RelayMode { case relayconstant.RelayModeRealtime: err, usage = OpenaiRealtimeHandler(c, info) case relayconstant.RelayModeAudioSpeech: usage = OpenaiTTSHandler(c, resp, info) case relayconstant.RelayModeAudioTranslation: fallthrough case relayconstant.RelayModeAudioTranscription: err, usage = OpenaiSTTHandler(c, resp, info, a.ResponseFormat) case relayconstant.RelayModeImagesGenerations, relayconstant.RelayModeImagesEdits: usage, err = OpenaiHandlerWithUsage(c, info, resp) case relayconstant.RelayModeRerank: usage, err = common_handler.RerankHandler(c, info, resp) case relayconstant.RelayModeResponses: if info.IsStream { usage, err = OaiResponsesStreamHandler(c, info, resp) } else { usage, err = OaiResponsesHandler(c, info, resp) } default: if info.IsStream { usage, err = OaiStreamHandler(c, info, resp) } else { usage, err = OpenaiHandler(c, info, resp) } } return } func (a *Adaptor) GetModelList() []string { switch a.ChannelType { case constant.ChannelType360: return ai360.ModelList case constant.ChannelTypeMoonshot: return moonshot.ModelList case constant.ChannelTypeLingYiWanWu: return lingyiwanwu.ModelList case constant.ChannelTypeMiniMax: return minimax.ModelList case constant.ChannelTypeXinference: return xinference.ModelList case constant.ChannelTypeOpenRouter: return openrouter.ModelList default: return ModelList } } func (a *Adaptor) GetChannelName() string { switch a.ChannelType { case constant.ChannelType360: return ai360.ChannelName case constant.ChannelTypeMoonshot: return moonshot.ChannelName case constant.ChannelTypeLingYiWanWu: return lingyiwanwu.ChannelName case constant.ChannelTypeMiniMax: return minimax.ChannelName case constant.ChannelTypeXinference: return xinference.ChannelName case constant.ChannelTypeOpenRouter: return openrouter.ChannelName default: return ChannelName } }