Merge remote-tracking branch 'origin/alpha' into alpha

This commit is contained in:
t0ng7u
2025-08-09 13:10:20 +08:00
22 changed files with 276 additions and 193 deletions

View File

@@ -40,4 +40,6 @@ const (
ContextKeyUserGroup ContextKey = "user_group" ContextKeyUserGroup ContextKey = "user_group"
ContextKeyUsingGroup ContextKey = "group" ContextKeyUsingGroup ContextKey = "group"
ContextKeyUserName ContextKey = "username" ContextKeyUserName ContextKey = "username"
ContextKeySystemPromptOverride ContextKey = "system_prompt_override"
) )

View File

@@ -145,6 +145,22 @@ func UpdateMidjourneyTaskBulk() {
buttonStr, _ := json.Marshal(responseItem.Buttons) buttonStr, _ := json.Marshal(responseItem.Buttons)
task.Buttons = string(buttonStr) task.Buttons = string(buttonStr)
} }
// 映射 VideoUrl
task.VideoUrl = responseItem.VideoUrl
// 映射 VideoUrls - 将数组序列化为 JSON 字符串
if responseItem.VideoUrls != nil && len(responseItem.VideoUrls) > 0 {
videoUrlsStr, err := json.Marshal(responseItem.VideoUrls)
if err != nil {
common.LogError(ctx, fmt.Sprintf("序列化 VideoUrls 失败: %v", err))
task.VideoUrls = "[]" // 失败时设置为空数组
} else {
task.VideoUrls = string(videoUrlsStr)
}
} else {
task.VideoUrls = "" // 空值时清空字段
}
shouldReturnQuota := false shouldReturnQuota := false
if (task.Progress != "100%" && responseItem.FailReason != "") || (task.Progress == "100%" && task.Status == "FAILURE") { if (task.Progress != "100%" && responseItem.FailReason != "") || (task.Progress == "100%" && task.Status == "FAILURE") {
common.LogInfo(ctx, task.MjId+" 构建失败,"+task.FailReason) common.LogInfo(ctx, task.MjId+" 构建失败,"+task.FailReason)
@@ -208,6 +224,20 @@ func checkMjTaskNeedUpdate(oldTask *model.Midjourney, newTask dto.MidjourneyDto)
if oldTask.Progress != "100%" && newTask.FailReason != "" { if oldTask.Progress != "100%" && newTask.FailReason != "" {
return true return true
} }
// 检查 VideoUrl 是否需要更新
if oldTask.VideoUrl != newTask.VideoUrl {
return true
}
// 检查 VideoUrls 是否需要更新
if newTask.VideoUrls != nil && len(newTask.VideoUrls) > 0 {
newVideoUrlsStr, _ := json.Marshal(newTask.VideoUrls)
if oldTask.VideoUrls != string(newVideoUrlsStr) {
return true
}
} else if oldTask.VideoUrls != "" {
// 如果新数据没有 VideoUrls 但旧数据有,需要更新(清空)
return true
}
return false return false
} }

View File

@@ -6,4 +6,5 @@ type ChannelSettings struct {
Proxy string `json:"proxy"` Proxy string `json:"proxy"`
PassThroughBodyEnabled bool `json:"pass_through_body_enabled,omitempty"` PassThroughBodyEnabled bool `json:"pass_through_body_enabled,omitempty"`
SystemPrompt string `json:"system_prompt,omitempty"` SystemPrompt string `json:"system_prompt,omitempty"`
SystemPromptOverride bool `json:"system_prompt_override,omitempty"`
} }

View File

@@ -216,10 +216,14 @@ type GeminiEmbeddingRequest struct {
OutputDimensionality int `json:"outputDimensionality,omitempty"` OutputDimensionality int `json:"outputDimensionality,omitempty"`
} }
type GeminiEmbeddingResponse struct { type GeminiBatchEmbeddingRequest struct {
Embedding ContentEmbedding `json:"embedding"` Requests []*GeminiEmbeddingRequest `json:"requests"`
} }
type ContentEmbedding struct { type GeminiEmbedding struct {
Values []float64 `json:"values"` Values []float64 `json:"values"`
} }
type GeminiBatchEmbeddingResponse struct {
Embeddings []*GeminiEmbedding `json:"embeddings"`
}

View File

@@ -78,6 +78,8 @@ func (r *GeneralOpenAIRequest) GetSystemRoleName() string {
if !strings.HasPrefix(r.Model, "o1-mini") && !strings.HasPrefix(r.Model, "o1-preview") { if !strings.HasPrefix(r.Model, "o1-mini") && !strings.HasPrefix(r.Model, "o1-preview") {
return "developer" return "developer"
} }
} else if strings.HasPrefix(r.Model, "gpt-5") {
return "developer"
} }
return "system" return "system"
} }

View File

@@ -267,6 +267,8 @@ func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, mode
common.SetContextKey(c, constant.ContextKeyChannelKey, key) common.SetContextKey(c, constant.ContextKeyChannelKey, key)
common.SetContextKey(c, constant.ContextKeyChannelBaseUrl, channel.GetBaseURL()) common.SetContextKey(c, constant.ContextKeyChannelBaseUrl, channel.GetBaseURL())
common.SetContextKey(c, constant.ContextKeySystemPromptOverride, false)
// TODO: api_version统一 // TODO: api_version统一
switch channel.Type { switch channel.Type {
case constant.ChannelTypeAzure: case constant.ChannelTypeAzure:

View File

@@ -114,7 +114,7 @@ func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
if strings.HasPrefix(info.UpstreamModelName, "text-embedding") || if strings.HasPrefix(info.UpstreamModelName, "text-embedding") ||
strings.HasPrefix(info.UpstreamModelName, "embedding") || strings.HasPrefix(info.UpstreamModelName, "embedding") ||
strings.HasPrefix(info.UpstreamModelName, "gemini-embedding") { strings.HasPrefix(info.UpstreamModelName, "gemini-embedding") {
return fmt.Sprintf("%s/%s/models/%s:embedContent", info.BaseUrl, version, info.UpstreamModelName), nil return fmt.Sprintf("%s/%s/models/%s:batchEmbedContents", info.BaseUrl, version, info.UpstreamModelName), nil
} }
action := "generateContent" action := "generateContent"
@@ -159,29 +159,35 @@ func (a *Adaptor) ConvertEmbeddingRequest(c *gin.Context, info *relaycommon.Rela
if len(inputs) == 0 { if len(inputs) == 0 {
return nil, errors.New("input is empty") return nil, errors.New("input is empty")
} }
// process all inputs
// only process the first input geminiRequests := make([]map[string]interface{}, 0, len(inputs))
geminiRequest := dto.GeminiEmbeddingRequest{ for _, input := range inputs {
Content: dto.GeminiChatContent{ geminiRequest := map[string]interface{}{
Parts: []dto.GeminiPart{ "model": fmt.Sprintf("models/%s", info.UpstreamModelName),
{ "content": dto.GeminiChatContent{
Text: inputs[0], Parts: []dto.GeminiPart{
{
Text: input,
},
}, },
}, },
},
}
// set specific parameters for different models
// https://ai.google.dev/api/embeddings?hl=zh-cn#method:-models.embedcontent
switch info.UpstreamModelName {
case "text-embedding-004":
// except embedding-001 supports setting `OutputDimensionality`
if request.Dimensions > 0 {
geminiRequest.OutputDimensionality = request.Dimensions
} }
// set specific parameters for different models
// https://ai.google.dev/api/embeddings?hl=zh-cn#method:-models.embedcontent
switch info.UpstreamModelName {
case "text-embedding-004","gemini-embedding-exp-03-07","gemini-embedding-001":
// Only newer models introduced after 2024 support OutputDimensionality
if request.Dimensions > 0 {
geminiRequest["outputDimensionality"] = request.Dimensions
}
}
geminiRequests = append(geminiRequests, geminiRequest)
} }
return geminiRequest, nil return map[string]interface{}{
"requests": geminiRequests,
}, nil
} }
func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) { func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) {

View File

@@ -1071,7 +1071,7 @@ func GeminiEmbeddingHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *h
return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
} }
var geminiResponse dto.GeminiEmbeddingResponse var geminiResponse dto.GeminiBatchEmbeddingResponse
if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil { if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
} }
@@ -1079,14 +1079,16 @@ func GeminiEmbeddingHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *h
// convert to openai format response // convert to openai format response
openAIResponse := dto.OpenAIEmbeddingResponse{ openAIResponse := dto.OpenAIEmbeddingResponse{
Object: "list", Object: "list",
Data: []dto.OpenAIEmbeddingResponseItem{ Data: make([]dto.OpenAIEmbeddingResponseItem, 0, len(geminiResponse.Embeddings)),
{ Model: info.UpstreamModelName,
Object: "embedding", }
Embedding: geminiResponse.Embedding.Values,
Index: 0, for i, embedding := range geminiResponse.Embeddings {
}, openAIResponse.Data = append(openAIResponse.Data, dto.OpenAIEmbeddingResponseItem{
}, Object: "embedding",
Model: info.UpstreamModelName, Embedding: embedding.Values,
Index: i,
})
} }
// calculate usage // calculate usage

View File

@@ -54,8 +54,7 @@ func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Header, info *relaycommon.RelayInfo) error { func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Header, info *relaycommon.RelayInfo) error {
channel.SetupApiRequestHeader(info, c, req) channel.SetupApiRequestHeader(info, c, req)
token := getZhipuToken(info.ApiKey) req.Set("Authorization", "Bearer "+info.ApiKey)
req.Set("Authorization", token)
return nil return nil
} }

View File

@@ -1,69 +1,10 @@
package zhipu_4v package zhipu_4v
import ( import (
"github.com/golang-jwt/jwt"
"one-api/common"
"one-api/dto" "one-api/dto"
"strings" "strings"
"sync"
"time"
) )
// https://open.bigmodel.cn/doc/api#chatglm_std
// chatglm_std, chatglm_lite
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
var zhipuTokens sync.Map
var expSeconds int64 = 24 * 3600
func getZhipuToken(apikey string) string {
data, ok := zhipuTokens.Load(apikey)
if ok {
tokenData := data.(tokenData)
if time.Now().Before(tokenData.ExpiryTime) {
return tokenData.Token
}
}
split := strings.Split(apikey, ".")
if len(split) != 2 {
common.SysError("invalid zhipu key: " + apikey)
return ""
}
id := split[0]
secret := split[1]
expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
timestamp := time.Now().UnixNano() / 1e6
payload := jwt.MapClaims{
"api_key": id,
"exp": expMillis,
"timestamp": timestamp,
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
token.Header["alg"] = "HS256"
token.Header["sign_type"] = "SIGN"
tokenString, err := token.SignedString([]byte(secret))
if err != nil {
return ""
}
zhipuTokens.Store(apikey, tokenData{
Token: tokenString,
ExpiryTime: expiryTime,
})
return tokenString
}
func requestOpenAI2Zhipu(request dto.GeneralOpenAIRequest) *dto.GeneralOpenAIRequest { func requestOpenAI2Zhipu(request dto.GeneralOpenAIRequest) *dto.GeneralOpenAIRequest {
messages := make([]dto.Message, 0, len(request.Messages)) messages := make([]dto.Message, 0, len(request.Messages))
for _, message := range request.Messages { for _, message := range request.Messages {

View File

@@ -201,6 +201,26 @@ func TextHelper(c *gin.Context) (newAPIError *types.NewAPIError) {
Content: relayInfo.ChannelSetting.SystemPrompt, Content: relayInfo.ChannelSetting.SystemPrompt,
} }
request.Messages = append([]dto.Message{systemMessage}, request.Messages...) request.Messages = append([]dto.Message{systemMessage}, request.Messages...)
} else if relayInfo.ChannelSetting.SystemPromptOverride {
common.SetContextKey(c, constant.ContextKeySystemPromptOverride, true)
// 如果有系统提示,且允许覆盖,则拼接到前面
for i, message := range request.Messages {
if message.Role == request.GetSystemRoleName() {
if message.IsStringContent() {
request.Messages[i].SetStringContent(relayInfo.ChannelSetting.SystemPrompt + "\n" + message.StringContent())
} else {
contents := message.ParseContent()
contents = append([]dto.MediaContent{
{
Type: dto.ContentTypeText,
Text: relayInfo.ChannelSetting.SystemPrompt,
},
}, contents...)
request.Messages[i].Content = contents
}
break
}
}
} }
} }

View File

@@ -28,6 +28,12 @@ func GenerateTextOtherInfo(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, m
other["is_model_mapped"] = true other["is_model_mapped"] = true
other["upstream_model_name"] = relayInfo.UpstreamModelName other["upstream_model_name"] = relayInfo.UpstreamModelName
} }
isSystemPromptOverwritten := common.GetContextKeyBool(ctx, constant.ContextKeySystemPromptOverride)
if isSystemPromptOverwritten {
other["is_system_prompt_overwritten"] = true
}
adminInfo := make(map[string]interface{}) adminInfo := make(map[string]interface{})
adminInfo["use_channel"] = ctx.GetStringSlice("use_channel") adminInfo["use_channel"] = ctx.GetStringSlice("use_channel")
isMultiKey := common.GetContextKeyBool(ctx, constant.ContextKeyChannelIsMultiKey) isMultiKey := common.GetContextKeyBool(ctx, constant.ContextKeyChannelIsMultiKey)

View File

@@ -131,6 +131,7 @@ const EditChannelModal = (props) => {
proxy: '', proxy: '',
pass_through_body_enabled: false, pass_through_body_enabled: false,
system_prompt: '', system_prompt: '',
system_prompt_override: false,
}; };
const [batch, setBatch] = useState(false); const [batch, setBatch] = useState(false);
const [multiToSingle, setMultiToSingle] = useState(false); const [multiToSingle, setMultiToSingle] = useState(false);
@@ -340,12 +341,15 @@ const EditChannelModal = (props) => {
data.proxy = parsedSettings.proxy || ''; data.proxy = parsedSettings.proxy || '';
data.pass_through_body_enabled = parsedSettings.pass_through_body_enabled || false; data.pass_through_body_enabled = parsedSettings.pass_through_body_enabled || false;
data.system_prompt = parsedSettings.system_prompt || ''; data.system_prompt = parsedSettings.system_prompt || '';
data.system_prompt_override = parsedSettings.system_prompt_override || false;
} catch (error) { } catch (error) {
console.error('解析渠道设置失败:', error); console.error('解析渠道设置失败:', error);
data.force_format = false; data.force_format = false;
data.thinking_to_content = false; data.thinking_to_content = false;
data.proxy = ''; data.proxy = '';
data.pass_through_body_enabled = false; data.pass_through_body_enabled = false;
data.system_prompt = '';
data.system_prompt_override = false;
} }
} else { } else {
data.force_format = false; data.force_format = false;
@@ -353,6 +357,7 @@ const EditChannelModal = (props) => {
data.proxy = ''; data.proxy = '';
data.pass_through_body_enabled = false; data.pass_through_body_enabled = false;
data.system_prompt = ''; data.system_prompt = '';
data.system_prompt_override = false;
} }
setInputs(data); setInputs(data);
@@ -372,6 +377,7 @@ const EditChannelModal = (props) => {
proxy: data.proxy, proxy: data.proxy,
pass_through_body_enabled: data.pass_through_body_enabled, pass_through_body_enabled: data.pass_through_body_enabled,
system_prompt: data.system_prompt, system_prompt: data.system_prompt,
system_prompt_override: data.system_prompt_override || false,
}); });
// console.log(data); // console.log(data);
} else { } else {
@@ -573,6 +579,7 @@ const EditChannelModal = (props) => {
proxy: '', proxy: '',
pass_through_body_enabled: false, pass_through_body_enabled: false,
system_prompt: '', system_prompt: '',
system_prompt_override: false,
}); });
// 重置密钥模式状态 // 重置密钥模式状态
setKeyMode('append'); setKeyMode('append');
@@ -721,6 +728,7 @@ const EditChannelModal = (props) => {
proxy: localInputs.proxy || '', proxy: localInputs.proxy || '',
pass_through_body_enabled: localInputs.pass_through_body_enabled || false, pass_through_body_enabled: localInputs.pass_through_body_enabled || false,
system_prompt: localInputs.system_prompt || '', system_prompt: localInputs.system_prompt || '',
system_prompt_override: localInputs.system_prompt_override || false,
}; };
localInputs.setting = JSON.stringify(channelExtraSettings); localInputs.setting = JSON.stringify(channelExtraSettings);
@@ -730,6 +738,7 @@ const EditChannelModal = (props) => {
delete localInputs.proxy; delete localInputs.proxy;
delete localInputs.pass_through_body_enabled; delete localInputs.pass_through_body_enabled;
delete localInputs.system_prompt; delete localInputs.system_prompt;
delete localInputs.system_prompt_override;
let res; let res;
localInputs.auto_ban = localInputs.auto_ban ? 1 : 0; localInputs.auto_ban = localInputs.auto_ban ? 1 : 0;
@@ -1722,6 +1731,14 @@ const EditChannelModal = (props) => {
showClear showClear
extraText={t('用户优先:如果用户在请求中指定了系统提示词,将优先使用用户的设置')} extraText={t('用户优先:如果用户在请求中指定了系统提示词,将优先使用用户的设置')}
/> />
<Form.Switch
field='system_prompt_override'
label={t('系统提示词拼接')}
checkedText={t('开')}
uncheckedText={t('关')}
onChange={(value) => handleChannelSettingsChange('system_prompt_override', value)}
extraText={t('如果用户请求中包含系统提示词,则使用此设置拼接到用户的系统提示词前面')}
/>
</Card> </Card>
</div> </div>
</Spin> </Spin>

View File

@@ -211,6 +211,7 @@ export const getTaskLogsColumns = ({
copyText, copyText,
openContentModal, openContentModal,
isAdminUser, isAdminUser,
openVideoModal,
}) => { }) => {
return [ return [
{ {
@@ -342,7 +343,13 @@ export const getTaskLogsColumns = ({
const isUrl = typeof text === 'string' && /^https?:\/\//.test(text); const isUrl = typeof text === 'string' && /^https?:\/\//.test(text);
if (isSuccess && isVideoTask && isUrl) { if (isSuccess && isVideoTask && isUrl) {
return ( return (
<a href={text} target="_blank" rel="noopener noreferrer"> <a
href="#"
onClick={e => {
e.preventDefault();
openVideoModal(text);
}}
>
{t('点击预览视频')} {t('点击预览视频')}
</a> </a>
); );

View File

@@ -39,6 +39,7 @@ const TaskLogsTable = (taskLogsData) => {
handlePageSizeChange, handlePageSizeChange,
copyText, copyText,
openContentModal, openContentModal,
openVideoModal,
isAdminUser, isAdminUser,
t, t,
COLUMN_KEYS, COLUMN_KEYS,
@@ -51,6 +52,7 @@ const TaskLogsTable = (taskLogsData) => {
COLUMN_KEYS, COLUMN_KEYS,
copyText, copyText,
openContentModal, openContentModal,
openVideoModal,
isAdminUser, isAdminUser,
}); });
}, [ }, [
@@ -58,6 +60,7 @@ const TaskLogsTable = (taskLogsData) => {
COLUMN_KEYS, COLUMN_KEYS,
copyText, copyText,
openContentModal, openContentModal,
openVideoModal,
isAdminUser, isAdminUser,
]); ]);

View File

@@ -37,7 +37,14 @@ const TaskLogsPage = () => {
<> <>
{/* Modals */} {/* Modals */}
<ColumnSelectorModal {...taskLogsData} /> <ColumnSelectorModal {...taskLogsData} />
<ContentModal {...taskLogsData} /> <ContentModal {...taskLogsData} isVideo={false} />
{/* 新增:视频预览弹窗 */}
<ContentModal
isModalOpen={taskLogsData.isVideoModalOpen}
setIsModalOpen={taskLogsData.setIsVideoModalOpen}
modalContent={taskLogsData.videoUrl}
isVideo={true}
/>
<Layout> <Layout>
<CardPro <CardPro

View File

@@ -24,6 +24,7 @@ const ContentModal = ({
isModalOpen, isModalOpen,
setIsModalOpen, setIsModalOpen,
modalContent, modalContent,
isVideo,
}) => { }) => {
return ( return (
<Modal <Modal
@@ -34,7 +35,11 @@ const ContentModal = ({
bodyStyle={{ height: '400px', overflow: 'auto' }} bodyStyle={{ height: '400px', overflow: 'auto' }}
width={800} width={800}
> >
<p style={{ whiteSpace: 'pre-line' }}>{modalContent}</p> {isVideo ? (
<video src={modalContent} controls style={{ width: '100%' }} autoPlay />
) : (
<p style={{ whiteSpace: 'pre-line' }}>{modalContent}</p>
)}
</Modal> </Modal>
); );
}; };

View File

@@ -34,7 +34,6 @@ import {
getLogOther, getLogOther,
renderModelTag, renderModelTag,
renderClaudeLogContent, renderClaudeLogContent,
renderClaudeModelPriceSimple,
renderLogContent, renderLogContent,
renderModelPriceSimple, renderModelPriceSimple,
renderAudioModelPrice, renderAudioModelPrice,
@@ -538,7 +537,7 @@ export const getLogsColumns = ({
); );
} }
let content = other?.claude let content = other?.claude
? renderClaudeModelPriceSimple( ? renderModelPriceSimple(
other.model_ratio, other.model_ratio,
other.model_price, other.model_price,
other.group_ratio, other.group_ratio,
@@ -547,6 +546,10 @@ export const getLogsColumns = ({
other.cache_ratio || 1.0, other.cache_ratio || 1.0,
other.cache_creation_tokens || 0, other.cache_creation_tokens || 0,
other.cache_creation_ratio || 1.0, other.cache_creation_ratio || 1.0,
false,
1.0,
other?.is_system_prompt_overwritten,
'claude'
) )
: renderModelPriceSimple( : renderModelPriceSimple(
other.model_ratio, other.model_ratio,
@@ -555,13 +558,19 @@ export const getLogsColumns = ({
other?.user_group_ratio, other?.user_group_ratio,
other.cache_tokens || 0, other.cache_tokens || 0,
other.cache_ratio || 1.0, other.cache_ratio || 1.0,
0,
1.0,
false,
1.0,
other?.is_system_prompt_overwritten,
'openai'
); );
return ( return (
<Typography.Paragraph <Typography.Paragraph
ellipsis={{ ellipsis={{
rows: 2, rows: 3,
}} }}
style={{ maxWidth: 240 }} style={{ maxWidth: 240, whiteSpace: 'pre-line' }}
> >
{content} {content}
</Typography.Paragraph> </Typography.Paragraph>

View File

@@ -215,14 +215,16 @@ export async function getOAuthState() {
export async function onOIDCClicked(auth_url, client_id, openInNewTab = false) { export async function onOIDCClicked(auth_url, client_id, openInNewTab = false) {
const state = await getOAuthState(); const state = await getOAuthState();
if (!state) return; if (!state) return;
const redirect_uri = `${window.location.origin}/oauth/oidc`; const url = new URL(auth_url);
const response_type = 'code'; url.searchParams.set('client_id', client_id);
const scope = 'openid profile email'; url.searchParams.set('redirect_uri', `${window.location.origin}/oauth/oidc`);
const url = `${auth_url}?client_id=${client_id}&redirect_uri=${redirect_uri}&response_type=${response_type}&scope=${scope}&state=${state}`; url.searchParams.set('response_type', 'code');
url.searchParams.set('scope', 'openid profile email');
url.searchParams.set('state', state);
if (openInNewTab) { if (openInNewTab) {
window.open(url); window.open(url.toString(), '_blank');
} else { } else {
window.location.href = url; window.location.href = url.toString();
} }
} }

View File

@@ -953,6 +953,71 @@ function getEffectiveRatio(groupRatio, user_group_ratio) {
}; };
} }
// Shared core for simple price rendering (used by OpenAI-like and Claude-like variants)
function renderPriceSimpleCore({
modelRatio,
modelPrice = -1,
groupRatio,
user_group_ratio,
cacheTokens = 0,
cacheRatio = 1.0,
cacheCreationTokens = 0,
cacheCreationRatio = 1.0,
image = false,
imageRatio = 1.0,
isSystemPromptOverride = false
}) {
const { ratio: effectiveGroupRatio, label: ratioLabel } = getEffectiveRatio(
groupRatio,
user_group_ratio,
);
const finalGroupRatio = effectiveGroupRatio;
if (modelPrice !== -1) {
return i18next.t('价格:${{price}} * {{ratioType}}{{ratio}}', {
price: modelPrice,
ratioType: ratioLabel,
ratio: finalGroupRatio,
});
}
const parts = [];
// base: model ratio
parts.push(i18next.t('模型: {{ratio}}'));
// cache part (label differs when with image)
if (cacheTokens !== 0) {
parts.push(i18next.t('缓存: {{cacheRatio}}'));
}
// cache creation part (Claude specific if passed)
if (cacheCreationTokens !== 0) {
parts.push(i18next.t('缓存创建: {{cacheCreationRatio}}'));
}
// image part
if (image) {
parts.push(i18next.t('图片输入: {{imageRatio}}'));
}
parts.push(`{{ratioType}}: {{groupRatio}}`);
let result = i18next.t(parts.join(' * '), {
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: finalGroupRatio,
cacheRatio: cacheRatio,
cacheCreationRatio: cacheCreationRatio,
imageRatio: imageRatio,
})
if (isSystemPromptOverride) {
result += '\n\r' + i18next.t('系统提示覆盖');
}
return result;
}
export function renderModelPrice( export function renderModelPrice(
inputTokens, inputTokens,
completionTokens, completionTokens,
@@ -1245,56 +1310,26 @@ export function renderModelPriceSimple(
user_group_ratio, user_group_ratio,
cacheTokens = 0, cacheTokens = 0,
cacheRatio = 1.0, cacheRatio = 1.0,
cacheCreationTokens = 0,
cacheCreationRatio = 1.0,
image = false, image = false,
imageRatio = 1.0, imageRatio = 1.0,
isSystemPromptOverride = false,
provider = 'openai',
) { ) {
const { ratio: effectiveGroupRatio, label: ratioLabel } = getEffectiveRatio(groupRatio, user_group_ratio); return renderPriceSimpleCore({
groupRatio = effectiveGroupRatio; modelRatio,
if (modelPrice !== -1) { modelPrice,
return i18next.t('价格:${{price}} * {{ratioType}}{{ratio}}', { groupRatio,
price: modelPrice, user_group_ratio,
ratioType: ratioLabel, cacheTokens,
ratio: groupRatio, cacheRatio,
}); cacheCreationTokens,
} else { cacheCreationRatio,
if (image && cacheTokens !== 0) { image,
return i18next.t( imageRatio,
'模型: {{ratio}} * {{ratioType}}: {{groupRatio}} * 缓存倍率: {{cacheRatio}} * 图片输入倍率: {{imageRatio}}', isSystemPromptOverride
{ });
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
cacheRatio: cacheRatio,
imageRatio: imageRatio,
},
);
} else if (image) {
return i18next.t(
'模型: {{ratio}} * {{ratioType}}: {{groupRatio}} * 图片输入倍率: {{imageRatio}}',
{
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
imageRatio: imageRatio,
},
);
} else if (cacheTokens !== 0) {
return i18next.t(
'模型: {{ratio}} * 分组: {{groupRatio}} * 缓存: {{cacheRatio}}',
{
ratio: modelRatio,
groupRatio: groupRatio,
cacheRatio: cacheRatio,
},
);
} else {
return i18next.t('模型: {{ratio}} * {{ratioType}}{{groupRatio}}', {
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
});
}
}
} }
export function renderAudioModelPrice( export function renderAudioModelPrice(
@@ -1635,46 +1670,7 @@ export function renderClaudeLogContent(
} }
} }
export function renderClaudeModelPriceSimple( // 已统一至 renderModelPriceSimple若仍有遗留引用请改为传入 provider='claude'
modelRatio,
modelPrice = -1,
groupRatio,
user_group_ratio,
cacheTokens = 0,
cacheRatio = 1.0,
cacheCreationTokens = 0,
cacheCreationRatio = 1.0,
) {
const { ratio: effectiveGroupRatio, label: ratioLabel } = getEffectiveRatio(groupRatio, user_group_ratio);
groupRatio = effectiveGroupRatio;
if (modelPrice !== -1) {
return i18next.t('价格:${{price}} * {{ratioType}}{{ratio}}', {
price: modelPrice,
ratioType: ratioLabel,
ratio: groupRatio,
});
} else {
if (cacheTokens !== 0 || cacheCreationTokens !== 0) {
return i18next.t(
'模型: {{ratio}} * {{ratioType}}: {{groupRatio}} * 缓存: {{cacheRatio}}',
{
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
cacheRatio: cacheRatio,
cacheCreationRatio: cacheCreationRatio,
},
);
} else {
return i18next.t('模型: {{ratio}} * {{ratioType}}: {{groupRatio}}', {
ratio: modelRatio,
ratioType: ratioLabel,
groupRatio: groupRatio,
});
}
}
}
/** /**
* rehype 插件:将段落等文本节点拆分为逐词 <span>,并添加淡入动画 class。 * rehype 插件:将段落等文本节点拆分为逐词 <span>,并添加淡入动画 class。

View File

@@ -65,6 +65,10 @@ export const useTaskLogsData = () => {
const [isModalOpen, setIsModalOpen] = useState(false); const [isModalOpen, setIsModalOpen] = useState(false);
const [modalContent, setModalContent] = useState(''); const [modalContent, setModalContent] = useState('');
// 新增:视频预览弹窗状态
const [isVideoModalOpen, setIsVideoModalOpen] = useState(false);
const [videoUrl, setVideoUrl] = useState('');
// Form state // Form state
const [formApi, setFormApi] = useState(null); const [formApi, setFormApi] = useState(null);
let now = new Date(); let now = new Date();
@@ -250,6 +254,12 @@ export const useTaskLogsData = () => {
setIsModalOpen(true); setIsModalOpen(true);
}; };
// 新增:打开视频预览弹窗
const openVideoModal = (url) => {
setVideoUrl(url);
setIsVideoModalOpen(true);
};
// Initialize data // Initialize data
useEffect(() => { useEffect(() => {
const localPageSize = parseInt(localStorage.getItem('task-page-size')) || ITEMS_PER_PAGE; const localPageSize = parseInt(localStorage.getItem('task-page-size')) || ITEMS_PER_PAGE;
@@ -271,6 +281,11 @@ export const useTaskLogsData = () => {
setIsModalOpen, setIsModalOpen,
modalContent, modalContent,
// 新增:视频弹窗状态
isVideoModalOpen,
setIsVideoModalOpen,
videoUrl,
// Form state // Form state
formApi, formApi,
setFormApi, setFormApi,
@@ -297,6 +312,7 @@ export const useTaskLogsData = () => {
refresh, refresh,
copyText, copyText,
openContentModal, openContentModal,
openVideoModal, // 新增
enrichLogs, enrichLogs,
syncPageData, syncPageData,

View File

@@ -1804,5 +1804,11 @@
"已选择 {{selected}} / {{total}}": "Selected {{selected}} / {{total}}", "已选择 {{selected}} / {{total}}": "Selected {{selected}} / {{total}}",
"新获取的模型": "New models", "新获取的模型": "New models",
"已有的模型": "Existing models", "已有的模型": "Existing models",
"搜索模型": "Search models" "搜索模型": "Search models",
"缓存: {{cacheRatio}}": "Cache: {{cacheRatio}}",
"缓存创建: {{cacheCreationRatio}}": "Cache creation: {{cacheCreationRatio}}",
"图片输入: {{imageRatio}}": "Image input: {{imageRatio}}",
"系统提示覆盖": "System prompt override",
"模型: {{ratio}}": "Model: {{ratio}}",
"专属倍率": "Exclusive group ratio"
} }