feat: add system prompt override functionality in channel settings and request handling #1468

This commit is contained in:
CaIon
2025-08-09 12:53:06 +08:00
parent 3b61617cb1
commit 2a804b6c02
10 changed files with 153 additions and 92 deletions

View File

@@ -40,4 +40,6 @@ const (
ContextKeyUserGroup ContextKey = "user_group"
ContextKeyUsingGroup ContextKey = "group"
ContextKeyUserName ContextKey = "username"
ContextKeySystemPromptOverride ContextKey = "system_prompt_override"
)

View File

@@ -6,4 +6,5 @@ type ChannelSettings struct {
Proxy string `json:"proxy"`
PassThroughBodyEnabled bool `json:"pass_through_body_enabled,omitempty"`
SystemPrompt string `json:"system_prompt,omitempty"`
SystemPromptOverride bool `json:"system_prompt_override,omitempty"`
}

View File

@@ -78,6 +78,8 @@ func (r *GeneralOpenAIRequest) GetSystemRoleName() string {
if !strings.HasPrefix(r.Model, "o1-mini") && !strings.HasPrefix(r.Model, "o1-preview") {
return "developer"
}
} else if strings.HasPrefix(r.Model, "gpt-5") {
return "developer"
}
return "system"
}

View File

@@ -267,6 +267,8 @@ func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, mode
common.SetContextKey(c, constant.ContextKeyChannelKey, key)
common.SetContextKey(c, constant.ContextKeyChannelBaseUrl, channel.GetBaseURL())
common.SetContextKey(c, constant.ContextKeySystemPromptOverride, false)
// TODO: api_version统一
switch channel.Type {
case constant.ChannelTypeAzure:

View File

@@ -201,6 +201,26 @@ func TextHelper(c *gin.Context) (newAPIError *types.NewAPIError) {
Content: relayInfo.ChannelSetting.SystemPrompt,
}
request.Messages = append([]dto.Message{systemMessage}, request.Messages...)
} else if relayInfo.ChannelSetting.SystemPromptOverride {
common.SetContextKey(c, constant.ContextKeySystemPromptOverride, true)
// 如果有系统提示,且允许覆盖,则拼接到前面
for i, message := range request.Messages {
if message.Role == request.GetSystemRoleName() {
if message.IsStringContent() {
request.Messages[i].SetStringContent(relayInfo.ChannelSetting.SystemPrompt + "\n" + message.StringContent())
} else {
contents := message.ParseContent()
contents = append([]dto.MediaContent{
{
Type: dto.ContentTypeText,
Text: relayInfo.ChannelSetting.SystemPrompt,
},
}, contents...)
request.Messages[i].Content = contents
}
break
}
}
}
}

View File

@@ -28,6 +28,12 @@ func GenerateTextOtherInfo(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, m
other["is_model_mapped"] = true
other["upstream_model_name"] = relayInfo.UpstreamModelName
}
isSystemPromptOverwritten := common.GetContextKeyBool(ctx, constant.ContextKeySystemPromptOverride)
if isSystemPromptOverwritten {
other["is_system_prompt_overwritten"] = true
}
adminInfo := make(map[string]interface{})
adminInfo["use_channel"] = ctx.GetStringSlice("use_channel")
isMultiKey := common.GetContextKeyBool(ctx, constant.ContextKeyChannelIsMultiKey)

View File

@@ -131,6 +131,7 @@ const EditChannelModal = (props) => {
proxy: '',
pass_through_body_enabled: false,
system_prompt: '',
system_prompt_override: false,
};
const [batch, setBatch] = useState(false);
const [multiToSingle, setMultiToSingle] = useState(false);
@@ -340,12 +341,15 @@ const EditChannelModal = (props) => {
data.proxy = parsedSettings.proxy || '';
data.pass_through_body_enabled = parsedSettings.pass_through_body_enabled || false;
data.system_prompt = parsedSettings.system_prompt || '';
data.system_prompt_override = parsedSettings.system_prompt_override || false;
} catch (error) {
console.error('解析渠道设置失败:', error);
data.force_format = false;
data.thinking_to_content = false;
data.proxy = '';
data.pass_through_body_enabled = false;
data.system_prompt = '';
data.system_prompt_override = false;
}
} else {
data.force_format = false;
@@ -353,6 +357,7 @@ const EditChannelModal = (props) => {
data.proxy = '';
data.pass_through_body_enabled = false;
data.system_prompt = '';
data.system_prompt_override = false;
}
setInputs(data);
@@ -372,6 +377,7 @@ const EditChannelModal = (props) => {
proxy: data.proxy,
pass_through_body_enabled: data.pass_through_body_enabled,
system_prompt: data.system_prompt,
system_prompt_override: data.system_prompt_override || false,
});
// console.log(data);
} else {
@@ -573,6 +579,7 @@ const EditChannelModal = (props) => {
proxy: '',
pass_through_body_enabled: false,
system_prompt: '',
system_prompt_override: false,
});
// 重置密钥模式状态
setKeyMode('append');
@@ -721,6 +728,7 @@ const EditChannelModal = (props) => {
proxy: localInputs.proxy || '',
pass_through_body_enabled: localInputs.pass_through_body_enabled || false,
system_prompt: localInputs.system_prompt || '',
system_prompt_override: localInputs.system_prompt_override || false,
};
localInputs.setting = JSON.stringify(channelExtraSettings);
@@ -730,6 +738,7 @@ const EditChannelModal = (props) => {
delete localInputs.proxy;
delete localInputs.pass_through_body_enabled;
delete localInputs.system_prompt;
delete localInputs.system_prompt_override;
let res;
localInputs.auto_ban = localInputs.auto_ban ? 1 : 0;
@@ -1722,6 +1731,14 @@ const EditChannelModal = (props) => {
showClear
extraText={t('用户优先:如果用户在请求中指定了系统提示词,将优先使用用户的设置')}
/>
<Form.Switch
field='system_prompt_override'
label={t('系统提示词拼接')}
checkedText={t('开')}
uncheckedText={t('关')}
onChange={(value) => handleChannelSettingsChange('system_prompt_override', value)}
extraText={t('如果用户请求中包含系统提示词,则使用此设置拼接到用户的系统提示词前面')}
/>
</Card>
</div>
</Spin>

View File

@@ -34,7 +34,6 @@ import {
getLogOther,
renderModelTag,
renderClaudeLogContent,
renderClaudeModelPriceSimple,
renderLogContent,
renderModelPriceSimple,
renderAudioModelPrice,
@@ -538,7 +537,7 @@ export const getLogsColumns = ({
);
}
let content = other?.claude
? renderClaudeModelPriceSimple(
? renderModelPriceSimple(
other.model_ratio,
other.model_price,
other.group_ratio,
@@ -547,6 +546,10 @@ export const getLogsColumns = ({
other.cache_ratio || 1.0,
other.cache_creation_tokens || 0,
other.cache_creation_ratio || 1.0,
false,
1.0,
other?.is_system_prompt_overwritten,
'claude'
)
: renderModelPriceSimple(
other.model_ratio,
@@ -555,13 +558,19 @@ export const getLogsColumns = ({
other?.user_group_ratio,
other.cache_tokens || 0,
other.cache_ratio || 1.0,
0,
1.0,
false,
1.0,
other?.is_system_prompt_overwritten,
'openai'
);
return (
<Typography.Paragraph
ellipsis={{
rows: 2,
rows: 3,
}}
style={{ maxWidth: 240 }}
style={{ maxWidth: 240, whiteSpace: 'pre-line' }}
>
{content}
</Typography.Paragraph>

View File

@@ -953,6 +953,71 @@ function getEffectiveRatio(groupRatio, user_group_ratio) {
};
}
// Shared core for simple price rendering (used by OpenAI-like and Claude-like variants)
function renderPriceSimpleCore({
modelRatio,
modelPrice = -1,
groupRatio,
user_group_ratio,
cacheTokens = 0,
cacheRatio = 1.0,
cacheCreationTokens = 0,
cacheCreationRatio = 1.0,
image = false,
imageRatio = 1.0,
isSystemPromptOverride = false
}) {
const { ratio: effectiveGroupRatio, label: ratioLabel } = getEffectiveRatio(
groupRatio,
user_group_ratio,
);
const finalGroupRatio = effectiveGroupRatio;
if (modelPrice !== -1) {
return i18next.t('价格:${{price}} * {{ratioType}}{{ratio}}', {
price: modelPrice,
ratioType: ratioLabel,
ratio: finalGroupRatio,
});
}
const parts = [];
// base: model ratio
parts.push(i18next.t('模型: {{ratio}}'));
// cache part (label differs when with image)
if (cacheTokens !== 0) {
parts.push(i18next.t('缓存: {{cacheRatio}}'));
}
// cache creation part (Claude specific if passed)
if (cacheCreationTokens !== 0) {
parts.push(i18next.t('缓存创建: {{cacheCreationRatio}}'));
}
// image part
if (image) {
parts.push(i18next.t('图片输入: {{imageRatio}}'));
}
parts.push(`{{ratioType}}: {{groupRatio}}`);
let result = i18next.t(parts.join(' * '), {
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: finalGroupRatio,
cacheRatio: cacheRatio,
cacheCreationRatio: cacheCreationRatio,
imageRatio: imageRatio,
})
if (isSystemPromptOverride) {
result += '\n\r' + i18next.t('系统提示覆盖');
}
return result;
}
export function renderModelPrice(
inputTokens,
completionTokens,
@@ -1245,56 +1310,26 @@ export function renderModelPriceSimple(
user_group_ratio,
cacheTokens = 0,
cacheRatio = 1.0,
cacheCreationTokens = 0,
cacheCreationRatio = 1.0,
image = false,
imageRatio = 1.0,
isSystemPromptOverride = false,
provider = 'openai',
) {
const { ratio: effectiveGroupRatio, label: ratioLabel } = getEffectiveRatio(groupRatio, user_group_ratio);
groupRatio = effectiveGroupRatio;
if (modelPrice !== -1) {
return i18next.t('价格:${{price}} * {{ratioType}}{{ratio}}', {
price: modelPrice,
ratioType: ratioLabel,
ratio: groupRatio,
});
} else {
if (image && cacheTokens !== 0) {
return i18next.t(
'模型: {{ratio}} * {{ratioType}}: {{groupRatio}} * 缓存倍率: {{cacheRatio}} * 图片输入倍率: {{imageRatio}}',
{
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
cacheRatio: cacheRatio,
imageRatio: imageRatio,
},
);
} else if (image) {
return i18next.t(
'模型: {{ratio}} * {{ratioType}}: {{groupRatio}} * 图片输入倍率: {{imageRatio}}',
{
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
imageRatio: imageRatio,
},
);
} else if (cacheTokens !== 0) {
return i18next.t(
'模型: {{ratio}} * 分组: {{groupRatio}} * 缓存: {{cacheRatio}}',
{
ratio: modelRatio,
groupRatio: groupRatio,
cacheRatio: cacheRatio,
},
);
} else {
return i18next.t('模型: {{ratio}} * {{ratioType}}{{groupRatio}}', {
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
});
}
}
return renderPriceSimpleCore({
modelRatio,
modelPrice,
groupRatio,
user_group_ratio,
cacheTokens,
cacheRatio,
cacheCreationTokens,
cacheCreationRatio,
image,
imageRatio,
isSystemPromptOverride
});
}
export function renderAudioModelPrice(
@@ -1635,46 +1670,7 @@ export function renderClaudeLogContent(
}
}
export function renderClaudeModelPriceSimple(
modelRatio,
modelPrice = -1,
groupRatio,
user_group_ratio,
cacheTokens = 0,
cacheRatio = 1.0,
cacheCreationTokens = 0,
cacheCreationRatio = 1.0,
) {
const { ratio: effectiveGroupRatio, label: ratioLabel } = getEffectiveRatio(groupRatio, user_group_ratio);
groupRatio = effectiveGroupRatio;
if (modelPrice !== -1) {
return i18next.t('价格:${{price}} * {{ratioType}}{{ratio}}', {
price: modelPrice,
ratioType: ratioLabel,
ratio: groupRatio,
});
} else {
if (cacheTokens !== 0 || cacheCreationTokens !== 0) {
return i18next.t(
'模型: {{ratio}} * {{ratioType}}: {{groupRatio}} * 缓存: {{cacheRatio}}',
{
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
cacheRatio: cacheRatio,
cacheCreationRatio: cacheCreationRatio,
},
);
} else {
return i18next.t('模型: {{ratio}} * {{ratioType}}: {{groupRatio}}', {
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
});
}
}
}
// 已统一至 renderModelPriceSimple若仍有遗留引用请改为传入 provider='claude'
/**
* rehype 插件:将段落等文本节点拆分为逐词 <span>,并添加淡入动画 class。

View File

@@ -1804,5 +1804,11 @@
"已选择 {{selected}} / {{total}}": "Selected {{selected}} / {{total}}",
"新获取的模型": "New models",
"已有的模型": "Existing models",
"搜索模型": "Search models"
"搜索模型": "Search models",
"缓存: {{cacheRatio}}": "Cache: {{cacheRatio}}",
"缓存创建: {{cacheCreationRatio}}": "Cache creation: {{cacheCreationRatio}}",
"图片输入: {{imageRatio}}": "Image input: {{imageRatio}}",
"系统提示覆盖": "System prompt override",
"模型: {{ratio}}": "Model: {{ratio}}",
"专属倍率": "Exclusive group ratio"
}