feat(relay): Add pass-through request option for global settings

This commit is contained in:
1808837298@qq.com
2025-03-11 17:02:35 +08:00
parent 8de29fbb83
commit 6f24dddcb2
4 changed files with 150 additions and 20 deletions

View File

@@ -17,6 +17,7 @@ import (
"one-api/relay/helper" "one-api/relay/helper"
"one-api/service" "one-api/service"
"one-api/setting" "one-api/setting"
"one-api/setting/model_setting"
"strings" "strings"
"time" "time"
@@ -152,33 +153,32 @@ func TextHelper(c *gin.Context) (openaiErr *dto.OpenAIErrorWithStatusCode) {
adaptor.Init(relayInfo) adaptor.Init(relayInfo)
var requestBody io.Reader var requestBody io.Reader
//if relayInfo.ChannelType == common.ChannelTypeOpenAI && !isModelMapped { if model_setting.GetGlobalSettings().PassThroughRequestEnabled {
// body, err := common.GetRequestBody(c) body, err := common.GetRequestBody(c)
// if err != nil { if err != nil {
// return service.OpenAIErrorWrapperLocal(err, "get_request_body_failed", http.StatusInternalServerError) return service.OpenAIErrorWrapperLocal(err, "get_request_body_failed", http.StatusInternalServerError)
// } }
// requestBody = bytes.NewBuffer(body) requestBody = bytes.NewBuffer(body)
//} else { } else {
// convertedRequest, err := adaptor.ConvertRequest(c, relayInfo, textRequest)
//} if err != nil {
return service.OpenAIErrorWrapperLocal(err, "convert_request_failed", http.StatusInternalServerError)
convertedRequest, err := adaptor.ConvertRequest(c, relayInfo, textRequest) }
if err != nil { jsonData, err := json.Marshal(convertedRequest)
return service.OpenAIErrorWrapperLocal(err, "convert_request_failed", http.StatusInternalServerError) if err != nil {
return service.OpenAIErrorWrapperLocal(err, "json_marshal_failed", http.StatusInternalServerError)
}
requestBody = bytes.NewBuffer(jsonData)
} }
jsonData, err := json.Marshal(convertedRequest)
if err != nil {
return service.OpenAIErrorWrapperLocal(err, "json_marshal_failed", http.StatusInternalServerError)
}
requestBody = bytes.NewBuffer(jsonData)
statusCodeMappingStr := c.GetString("status_code_mapping")
var httpResp *http.Response var httpResp *http.Response
resp, err := adaptor.DoRequest(c, relayInfo, requestBody) resp, err := adaptor.DoRequest(c, relayInfo, requestBody)
if err != nil { if err != nil {
return service.OpenAIErrorWrapper(err, "do_request_failed", http.StatusInternalServerError) return service.OpenAIErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
} }
statusCodeMappingStr := c.GetString("status_code_mapping")
if resp != nil { if resp != nil {
httpResp = resp.(*http.Response) httpResp = resp.(*http.Response)
relayInfo.IsStream = relayInfo.IsStream || strings.HasPrefix(httpResp.Header.Get("Content-Type"), "text/event-stream") relayInfo.IsStream = relayInfo.IsStream || strings.HasPrefix(httpResp.Header.Get("Content-Type"), "text/event-stream")

View File

@@ -0,0 +1,26 @@
package model_setting
import (
"one-api/setting/config"
)
type GlobalSettings struct {
PassThroughRequestEnabled bool `json:"pass_through_request_enabled"`
}
// 默认配置
var defaultOpenaiSettings = GlobalSettings{
PassThroughRequestEnabled: false,
}
// 全局实例
var globalSettings = defaultOpenaiSettings
func init() {
// 注册到全局配置管理器
config.GlobalConfig.Register("global", &globalSettings)
}
func GetGlobalSettings() *GlobalSettings {
return &globalSettings
}

View File

@@ -6,6 +6,7 @@ import { API, showError, showSuccess } from '../helpers';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import SettingGeminiModel from '../pages/Setting/Model/SettingGeminiModel.js'; import SettingGeminiModel from '../pages/Setting/Model/SettingGeminiModel.js';
import SettingClaudeModel from '../pages/Setting/Model/SettingClaudeModel.js'; import SettingClaudeModel from '../pages/Setting/Model/SettingClaudeModel.js';
import SettingGlobalModel from '../pages/Setting/Model/SettingGlobalModel.js';
const ModelSetting = () => { const ModelSetting = () => {
const { t } = useTranslation(); const { t } = useTranslation();
@@ -16,6 +17,7 @@ const ModelSetting = () => {
'claude.thinking_adapter_enabled': true, 'claude.thinking_adapter_enabled': true,
'claude.default_max_tokens': '', 'claude.default_max_tokens': '',
'claude.thinking_adapter_budget_tokens_percentage': 0.8, 'claude.thinking_adapter_budget_tokens_percentage': 0.8,
'global.pass_through_request_enabled': false,
}); });
let [loading, setLoading] = useState(false); let [loading, setLoading] = useState(false);
@@ -35,7 +37,7 @@ const ModelSetting = () => {
item.value = JSON.stringify(JSON.parse(item.value), null, 2); item.value = JSON.stringify(JSON.parse(item.value), null, 2);
} }
if ( if (
item.key.endsWith('Enabled') item.key.endsWith('Enabled') || item.key.endsWith('enabled')
) { ) {
newInputs[item.key] = item.value === 'true' ? true : false; newInputs[item.key] = item.value === 'true' ? true : false;
} else { } else {
@@ -67,6 +69,10 @@ const ModelSetting = () => {
return ( return (
<> <>
<Spin spinning={loading} size='large'> <Spin spinning={loading} size='large'>
{/* OpenAI */}
<Card style={{ marginTop: '10px' }}>
<SettingGlobalModel options={inputs} refresh={onRefresh} />
</Card>
{/* Gemini */} {/* Gemini */}
<Card style={{ marginTop: '10px' }}> <Card style={{ marginTop: '10px' }}>
<SettingGeminiModel options={inputs} refresh={onRefresh} /> <SettingGeminiModel options={inputs} refresh={onRefresh} />

View File

@@ -0,0 +1,98 @@
import React, { useEffect, useState, useRef } from 'react';
import { Button, Col, Form, Row, Spin } from '@douyinfe/semi-ui';
import {
compareObjects,
API,
showError,
showSuccess,
showWarning, verifyJSON
} from '../../../helpers';
import { useTranslation } from 'react-i18next';
export default function SettingGlobalModel(props) {
const { t } = useTranslation();
const [loading, setLoading] = useState(false);
const [inputs, setInputs] = useState({
'global.pass_through_request_enabled': false,
});
const refForm = useRef();
const [inputsRow, setInputsRow] = useState(inputs);
function onSubmit() {
const updateArray = compareObjects(inputs, inputsRow);
if (!updateArray.length) return showWarning(t('你似乎并没有修改什么'));
const requestQueue = updateArray.map((item) => {
let value = '';
if (typeof inputs[item.key] === 'boolean') {
value = String(inputs[item.key]);
} else {
value = inputs[item.key];
}
return API.put('/api/option/', {
key: item.key,
value,
});
});
setLoading(true);
Promise.all(requestQueue)
.then((res) => {
if (requestQueue.length === 1) {
if (res.includes(undefined)) return;
} else if (requestQueue.length > 1) {
if (res.includes(undefined)) return showError(t('部分保存失败,请重试'));
}
showSuccess(t('保存成功'));
props.refresh();
})
.catch(() => {
showError(t('保存失败,请重试'));
})
.finally(() => {
setLoading(false);
});
}
useEffect(() => {
const currentInputs = {};
for (let key in props.options) {
if (Object.keys(inputs).includes(key)) {
currentInputs[key] = props.options[key];
}
}
setInputs(currentInputs);
setInputsRow(structuredClone(currentInputs));
refForm.current.setValues(currentInputs);
}, [props.options]);
return (
<>
<Spin spinning={loading}>
<Form
values={inputs}
getFormApi={(formAPI) => (refForm.current = formAPI)}
style={{ marginBottom: 15 }}
>
<Form.Section text={t('全局设置')}>
<Row>
<Col xs={24} sm={12} md={8} lg={8} xl={8}>
<Form.Switch
label={t('启用请求透传')}
field={'global.pass_through_request_enabled'}
onChange={(value) => setInputs({ ...inputs, 'global.pass_through_request_enabled': value })}
extraText={'开启后,所有请求将直接透传给上游,不会进行任何处理(重定向和渠道适配也将失效),请谨慎开启'}
/>
</Col>
</Row>
<Row>
<Button size='default' onClick={onSubmit}>
{t('保存')}
</Button>
</Row>
</Form.Section>
</Form>
</Spin>
</>
);
}