diff --git a/relay/relay-text.go b/relay/relay-text.go index b1c9d515..2f080033 100644 --- a/relay/relay-text.go +++ b/relay/relay-text.go @@ -17,6 +17,7 @@ import ( "one-api/relay/helper" "one-api/service" "one-api/setting" + "one-api/setting/model_setting" "strings" "time" @@ -152,33 +153,32 @@ func TextHelper(c *gin.Context) (openaiErr *dto.OpenAIErrorWithStatusCode) { adaptor.Init(relayInfo) var requestBody io.Reader - //if relayInfo.ChannelType == common.ChannelTypeOpenAI && !isModelMapped { - // body, err := common.GetRequestBody(c) - // if err != nil { - // return service.OpenAIErrorWrapperLocal(err, "get_request_body_failed", http.StatusInternalServerError) - // } - // requestBody = bytes.NewBuffer(body) - //} else { - // - //} - - convertedRequest, err := adaptor.ConvertRequest(c, relayInfo, textRequest) - if err != nil { - return service.OpenAIErrorWrapperLocal(err, "convert_request_failed", http.StatusInternalServerError) + if model_setting.GetGlobalSettings().PassThroughRequestEnabled { + body, err := common.GetRequestBody(c) + if err != nil { + return service.OpenAIErrorWrapperLocal(err, "get_request_body_failed", http.StatusInternalServerError) + } + requestBody = bytes.NewBuffer(body) + } else { + convertedRequest, err := adaptor.ConvertRequest(c, relayInfo, textRequest) + if err != nil { + return service.OpenAIErrorWrapperLocal(err, "convert_request_failed", http.StatusInternalServerError) + } + jsonData, err := json.Marshal(convertedRequest) + if err != nil { + return service.OpenAIErrorWrapperLocal(err, "json_marshal_failed", http.StatusInternalServerError) + } + requestBody = bytes.NewBuffer(jsonData) } - jsonData, err := json.Marshal(convertedRequest) - if err != nil { - return service.OpenAIErrorWrapperLocal(err, "json_marshal_failed", http.StatusInternalServerError) - } - requestBody = bytes.NewBuffer(jsonData) - statusCodeMappingStr := c.GetString("status_code_mapping") var httpResp *http.Response resp, err := adaptor.DoRequest(c, relayInfo, requestBody) if err != nil { return service.OpenAIErrorWrapper(err, "do_request_failed", http.StatusInternalServerError) } + statusCodeMappingStr := c.GetString("status_code_mapping") + if resp != nil { httpResp = resp.(*http.Response) relayInfo.IsStream = relayInfo.IsStream || strings.HasPrefix(httpResp.Header.Get("Content-Type"), "text/event-stream") diff --git a/setting/model_setting/global.go b/setting/model_setting/global.go new file mode 100644 index 00000000..de2851bb --- /dev/null +++ b/setting/model_setting/global.go @@ -0,0 +1,26 @@ +package model_setting + +import ( + "one-api/setting/config" +) + +type GlobalSettings struct { + PassThroughRequestEnabled bool `json:"pass_through_request_enabled"` +} + +// 默认配置 +var defaultOpenaiSettings = GlobalSettings{ + PassThroughRequestEnabled: false, +} + +// 全局实例 +var globalSettings = defaultOpenaiSettings + +func init() { + // 注册到全局配置管理器 + config.GlobalConfig.Register("global", &globalSettings) +} + +func GetGlobalSettings() *GlobalSettings { + return &globalSettings +} diff --git a/web/src/components/ModelSetting.js b/web/src/components/ModelSetting.js index 904b4015..4815abbc 100644 --- a/web/src/components/ModelSetting.js +++ b/web/src/components/ModelSetting.js @@ -6,6 +6,7 @@ import { API, showError, showSuccess } from '../helpers'; import { useTranslation } from 'react-i18next'; import SettingGeminiModel from '../pages/Setting/Model/SettingGeminiModel.js'; import SettingClaudeModel from '../pages/Setting/Model/SettingClaudeModel.js'; +import SettingGlobalModel from '../pages/Setting/Model/SettingGlobalModel.js'; const ModelSetting = () => { const { t } = useTranslation(); @@ -16,6 +17,7 @@ const ModelSetting = () => { 'claude.thinking_adapter_enabled': true, 'claude.default_max_tokens': '', 'claude.thinking_adapter_budget_tokens_percentage': 0.8, + 'global.pass_through_request_enabled': false, }); let [loading, setLoading] = useState(false); @@ -35,7 +37,7 @@ const ModelSetting = () => { item.value = JSON.stringify(JSON.parse(item.value), null, 2); } if ( - item.key.endsWith('Enabled') + item.key.endsWith('Enabled') || item.key.endsWith('enabled') ) { newInputs[item.key] = item.value === 'true' ? true : false; } else { @@ -67,6 +69,10 @@ const ModelSetting = () => { return ( <> + {/* OpenAI */} + + + {/* Gemini */} diff --git a/web/src/pages/Setting/Model/SettingGlobalModel.js b/web/src/pages/Setting/Model/SettingGlobalModel.js new file mode 100644 index 00000000..9a3ac04c --- /dev/null +++ b/web/src/pages/Setting/Model/SettingGlobalModel.js @@ -0,0 +1,98 @@ +import React, { useEffect, useState, useRef } from 'react'; +import { Button, Col, Form, Row, Spin } from '@douyinfe/semi-ui'; +import { + compareObjects, + API, + showError, + showSuccess, + showWarning, verifyJSON +} from '../../../helpers'; +import { useTranslation } from 'react-i18next'; + +export default function SettingGlobalModel(props) { + const { t } = useTranslation(); + + const [loading, setLoading] = useState(false); + const [inputs, setInputs] = useState({ + 'global.pass_through_request_enabled': false, + }); + const refForm = useRef(); + const [inputsRow, setInputsRow] = useState(inputs); + + function onSubmit() { + const updateArray = compareObjects(inputs, inputsRow); + if (!updateArray.length) return showWarning(t('你似乎并没有修改什么')); + const requestQueue = updateArray.map((item) => { + let value = ''; + if (typeof inputs[item.key] === 'boolean') { + value = String(inputs[item.key]); + } else { + value = inputs[item.key]; + } + return API.put('/api/option/', { + key: item.key, + value, + }); + }); + setLoading(true); + Promise.all(requestQueue) + .then((res) => { + if (requestQueue.length === 1) { + if (res.includes(undefined)) return; + } else if (requestQueue.length > 1) { + if (res.includes(undefined)) return showError(t('部分保存失败,请重试')); + } + showSuccess(t('保存成功')); + props.refresh(); + }) + .catch(() => { + showError(t('保存失败,请重试')); + }) + .finally(() => { + setLoading(false); + }); + } + + useEffect(() => { + const currentInputs = {}; + for (let key in props.options) { + if (Object.keys(inputs).includes(key)) { + currentInputs[key] = props.options[key]; + } + } + setInputs(currentInputs); + setInputsRow(structuredClone(currentInputs)); + refForm.current.setValues(currentInputs); + }, [props.options]); + + return ( + <> + +
(refForm.current = formAPI)} + style={{ marginBottom: 15 }} + > + + + + setInputs({ ...inputs, 'global.pass_through_request_enabled': value })} + extraText={'开启后,所有请求将直接透传给上游,不会进行任何处理(重定向和渠道适配也将失效),请谨慎开启'} + /> + + + + + + + +
+
+ + ); +}