feat: Enhance logging functionality with group support

- Added a new 'group' parameter to various logging functions, including RecordConsumeLog, GetAllLogs, and GetUserLogs, to allow for more granular log tracking.
- Updated the logs table component to display group information, improving the visibility of log data.
- Refactored related functions to accommodate the new group parameter, ensuring consistent handling across the application.
- Improved the initialization of the group column for PostgreSQL compatibility.
This commit is contained in:
CalciumIon
2024-12-24 14:48:11 +08:00
parent cf3287a10a
commit 7180e6f114
10 changed files with 96 additions and 54 deletions

View File

@@ -4,7 +4,6 @@ import (
crand "crypto/rand"
"encoding/base64"
"fmt"
"github.com/google/uuid"
"html/template"
"log"
"math/big"
@@ -15,6 +14,8 @@ import (
"strconv"
"strings"
"time"
"github.com/google/uuid"
)
func OpenBrowser(url string) {

View File

@@ -141,7 +141,8 @@ func testChannel(channel *model.Channel, testModel string) (err error, openAIErr
milliseconds := tok.Sub(tik).Milliseconds()
consumedTime := float64(milliseconds) / 1000.0
other := service.GenerateTextOtherInfo(c, meta, modelRatio, 1, completionRatio, modelPrice)
model.RecordConsumeLog(c, 1, channel.Id, usage.PromptTokens, usage.CompletionTokens, testModel, "模型测试", quota, "模型测试", 0, quota, int(consumedTime), false, other)
model.RecordConsumeLog(c, 1, channel.Id, usage.PromptTokens, usage.CompletionTokens, testModel, "模型测试",
quota, "模型测试", 0, quota, int(consumedTime), false, "default", other)
common.SysLog(fmt.Sprintf("testing channel #%d, response: \n%s", channel.Id, string(respBody)))
return nil, nil
}

View File

@@ -25,7 +25,8 @@ func GetAllLogs(c *gin.Context) {
tokenName := c.Query("token_name")
modelName := c.Query("model_name")
channel, _ := strconv.Atoi(c.Query("channel"))
logs, total, err := model.GetAllLogs(logType, startTimestamp, endTimestamp, modelName, username, tokenName, (p-1)*pageSize, pageSize, channel)
group := c.Query("group")
logs, total, err := model.GetAllLogs(logType, startTimestamp, endTimestamp, modelName, username, tokenName, (p-1)*pageSize, pageSize, channel, group)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
@@ -63,7 +64,8 @@ func GetUserLogs(c *gin.Context) {
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
tokenName := c.Query("token_name")
modelName := c.Query("model_name")
logs, total, err := model.GetUserLogs(userId, logType, startTimestamp, endTimestamp, modelName, tokenName, (p-1)*pageSize, pageSize)
group := c.Query("group")
logs, total, err := model.GetUserLogs(userId, logType, startTimestamp, endTimestamp, modelName, tokenName, (p-1)*pageSize, pageSize, group)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
@@ -146,7 +148,8 @@ func GetLogsStat(c *gin.Context) {
username := c.Query("username")
modelName := c.Query("model_name")
channel, _ := strconv.Atoi(c.Query("channel"))
stat := model.SumUsedQuota(logType, startTimestamp, endTimestamp, modelName, username, tokenName, channel)
group := c.Query("group")
stat := model.SumUsedQuota(logType, startTimestamp, endTimestamp, modelName, username, tokenName, channel, group)
//tokenNum := model.SumUsedToken(logType, startTimestamp, endTimestamp, modelName, username, "")
c.JSON(http.StatusOK, gin.H{
"success": true,
@@ -168,7 +171,8 @@ func GetLogsSelfStat(c *gin.Context) {
tokenName := c.Query("token_name")
modelName := c.Query("model_name")
channel, _ := strconv.Atoi(c.Query("channel"))
quotaNum := model.SumUsedQuota(logType, startTimestamp, endTimestamp, modelName, username, tokenName, channel)
group := c.Query("group")
quotaNum := model.SumUsedQuota(logType, startTimestamp, endTimestamp, modelName, username, tokenName, channel, group)
//tokenNum := model.SumUsedToken(logType, startTimestamp, endTimestamp, modelName, username, tokenName)
c.JSON(200, gin.H{
"success": true,

View File

@@ -12,6 +12,16 @@ import (
"gorm.io/gorm"
)
var groupCol string
func init() {
if common.UsingPostgreSQL {
groupCol = `"group"`
} else {
groupCol = "`group`"
}
}
type Log struct {
Id int `json:"id" gorm:"index:idx_created_at_id,priority:1"`
UserId int `json:"user_id" gorm:"index"`
@@ -28,6 +38,7 @@ type Log struct {
IsStream bool `json:"is_stream" gorm:"default:false"`
ChannelId int `json:"channel" gorm:"index"`
TokenId int `json:"token_id" gorm:"default:0;index"`
Group string `json:"group" gorm:"index"`
Other string `json:"other"`
}
@@ -70,7 +81,9 @@ func RecordLog(userId int, logType int, content string) {
}
}
func RecordConsumeLog(ctx context.Context, userId int, channelId int, promptTokens int, completionTokens int, modelName string, tokenName string, quota int, content string, tokenId int, userQuota int, useTimeSeconds int, isStream bool, other map[string]interface{}) {
func RecordConsumeLog(ctx context.Context, userId int, channelId int, promptTokens int, completionTokens int,
modelName string, tokenName string, quota int, content string, tokenId int, userQuota int, useTimeSeconds int,
isStream bool, group string, other map[string]interface{}) {
common.LogInfo(ctx, fmt.Sprintf("record consume log: userId=%d, 用户调用前余额=%d, channelId=%d, promptTokens=%d, completionTokens=%d, modelName=%s, tokenName=%s, quota=%d, content=%s", userId, userQuota, channelId, promptTokens, completionTokens, modelName, tokenName, quota, content))
if !common.LogConsumeEnabled {
return
@@ -92,6 +105,7 @@ func RecordConsumeLog(ctx context.Context, userId int, channelId int, promptToke
TokenId: tokenId,
UseTime: useTimeSeconds,
IsStream: isStream,
Group: group,
Other: otherStr,
}
err := LOG_DB.Create(log).Error
@@ -105,7 +119,7 @@ func RecordConsumeLog(ctx context.Context, userId int, channelId int, promptToke
}
}
func GetAllLogs(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, startIdx int, num int, channel int) (logs []*Log, total int64, err error) {
func GetAllLogs(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, startIdx int, num int, channel int, group string) (logs []*Log, total int64, err error) {
var tx *gorm.DB
if logType == LogTypeUnknown {
tx = LOG_DB
@@ -130,6 +144,9 @@ func GetAllLogs(logType int, startTimestamp int64, endTimestamp int64, modelName
if channel != 0 {
tx = tx.Where("channel_id = ?", channel)
}
if group != "" {
tx = tx.Where(groupCol+" = ?", group)
}
err = tx.Model(&Log{}).Count(&total).Error
if err != nil {
return nil, 0, err
@@ -141,7 +158,7 @@ func GetAllLogs(logType int, startTimestamp int64, endTimestamp int64, modelName
return logs, total, err
}
func GetUserLogs(userId int, logType int, startTimestamp int64, endTimestamp int64, modelName string, tokenName string, startIdx int, num int) (logs []*Log, total int64, err error) {
func GetUserLogs(userId int, logType int, startTimestamp int64, endTimestamp int64, modelName string, tokenName string, startIdx int, num int, group string) (logs []*Log, total int64, err error) {
var tx *gorm.DB
if logType == LogTypeUnknown {
tx = LOG_DB.Where("user_id = ?", userId)
@@ -160,6 +177,9 @@ func GetUserLogs(userId int, logType int, startTimestamp int64, endTimestamp int
if endTimestamp != 0 {
tx = tx.Where("created_at <= ?", endTimestamp)
}
if group != "" {
tx = tx.Where(groupCol+" = ?", group)
}
err = tx.Model(&Log{}).Count(&total).Error
if err != nil {
return nil, 0, err
@@ -193,7 +213,7 @@ type Stat struct {
Tpm int `json:"tpm"`
}
func SumUsedQuota(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, channel int) (stat Stat) {
func SumUsedQuota(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, channel int, group string) (stat Stat) {
tx := LOG_DB.Table("logs").Select("sum(quota) quota")
// 为rpm和tpm创建单独的查询
@@ -221,6 +241,10 @@ func SumUsedQuota(logType int, startTimestamp int64, endTimestamp int64, modelNa
tx = tx.Where("channel_id = ?", channel)
rpmTpmQuery = rpmTpmQuery.Where("channel_id = ?", channel)
}
if group != "" {
tx = tx.Where(groupCol+" = ?", group)
rpmTpmQuery = rpmTpmQuery.Where(groupCol+" = ?", group)
}
tx = tx.Where("type = ?", LogTypeConsume)
rpmTpmQuery = rpmTpmQuery.Where("type = ?", LogTypeConsume)

View File

@@ -208,7 +208,8 @@ func RelaySwapFace(c *gin.Context) *dto.MidjourneyResponse {
other := make(map[string]interface{})
other["model_price"] = modelPrice
other["group_ratio"] = groupRatio
model.RecordConsumeLog(ctx, userId, channelId, 0, 0, modelName, tokenName, quota, logContent, tokenId, userQuota, 0, false, other)
model.RecordConsumeLog(ctx, userId, channelId, 0, 0, modelName, tokenName,
quota, logContent, tokenId, userQuota, 0, false, group, other)
model.UpdateUserUsedQuotaAndRequestCount(userId, quota)
channelId := c.GetInt("channel_id")
model.UpdateChannelUsedQuota(channelId, quota)
@@ -513,7 +514,8 @@ func RelayMidjourneySubmit(c *gin.Context, relayMode int) *dto.MidjourneyRespons
other := make(map[string]interface{})
other["model_price"] = modelPrice
other["group_ratio"] = groupRatio
model.RecordConsumeLog(ctx, userId, channelId, 0, 0, modelName, tokenName, quota, logContent, tokenId, userQuota, 0, false, other)
model.RecordConsumeLog(ctx, userId, channelId, 0, 0, modelName, tokenName,
quota, logContent, tokenId, userQuota, 0, false, group, other)
model.UpdateUserUsedQuotaAndRequestCount(userId, quota)
channelId := c.GetInt("channel_id")
model.UpdateChannelUsedQuota(channelId, quota)

View File

@@ -385,7 +385,7 @@ func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, modelN
}
other := service.GenerateTextOtherInfo(ctx, relayInfo, modelRatio, groupRatio, completionRatio, modelPrice)
model.RecordConsumeLog(ctx, relayInfo.UserId, relayInfo.ChannelId, promptTokens, completionTokens, logModel,
tokenName, quota, logContent, relayInfo.TokenId, userQuota, int(useTimeSeconds), relayInfo.IsStream, other)
tokenName, quota, logContent, relayInfo.TokenId, userQuota, int(useTimeSeconds), relayInfo.IsStream, relayInfo.Group, other)
//if quota != 0 {
//

View File

@@ -126,7 +126,8 @@ func RelayTaskSubmit(c *gin.Context, relayMode int) (taskErr *dto.TaskError) {
other := make(map[string]interface{})
other["model_price"] = modelPrice
other["group_ratio"] = groupRatio
model.RecordConsumeLog(ctx, relayInfo.UserId, relayInfo.ChannelId, 0, 0, modelName, tokenName, quota, logContent, relayInfo.TokenId, userQuota, 0, false, other)
model.RecordConsumeLog(ctx, relayInfo.UserId, relayInfo.ChannelId, 0, 0,
modelName, tokenName, quota, logContent, relayInfo.TokenId, userQuota, 0, false, relayInfo.Group, other)
model.UpdateUserUsedQuotaAndRequestCount(relayInfo.UserId, quota)
model.UpdateChannelUsedQuota(relayInfo.ChannelId, quota)
}

View File

@@ -12,7 +12,6 @@ func GenerateTextOtherInfo(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, m
other["group_ratio"] = groupRatio
other["completion_ratio"] = completionRatio
other["model_price"] = modelPrice
other["group"] = relayInfo.Group
other["frt"] = float64(relayInfo.FirstResponseTime.UnixMilli() - relayInfo.StartTime.UnixMilli())
adminInfo := make(map[string]interface{})
adminInfo["use_channel"] = ctx.GetStringSlice("use_channel")

View File

@@ -139,7 +139,7 @@ func PostWssConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, mod
}
other := GenerateWssOtherInfo(ctx, relayInfo, usage, modelRatio, groupRatio, completionRatio, audioRatio, audioCompletionRatio, modelPrice)
model.RecordConsumeLog(ctx, relayInfo.UserId, relayInfo.ChannelId, usage.InputTokens, usage.OutputTokens, logModel,
tokenName, quota, logContent, relayInfo.TokenId, userQuota, int(useTimeSeconds), relayInfo.IsStream, other)
tokenName, quota, logContent, relayInfo.TokenId, userQuota, int(useTimeSeconds), relayInfo.IsStream, relayInfo.Group, other)
}
func PostAudioConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo,
@@ -208,5 +208,5 @@ func PostAudioConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo,
}
other := GenerateAudioOtherInfo(ctx, relayInfo, usage, modelRatio, groupRatio, completionRatio, audioRatio, audioCompletionRatio, modelPrice)
model.RecordConsumeLog(ctx, relayInfo.UserId, relayInfo.ChannelId, usage.PromptTokens, usage.CompletionTokens, logModel,
tokenName, quota, logContent, relayInfo.TokenId, userQuota, int(useTimeSeconds), relayInfo.IsStream, other)
tokenName, quota, logContent, relayInfo.TokenId, userQuota, int(useTimeSeconds), relayInfo.IsStream, relayInfo.Group, other)
}

View File

@@ -222,19 +222,27 @@ const LogsTable = () => {
dataIndex: 'group',
render: (text, record, index) => {
if (record.type === 0 || record.type === 2) {
let other = JSON.parse(record.other);
if (other === null) {
return <></>;
}
if (other.group !== undefined) {
if (record.group) {
return (
<>
{renderGroup(other.group)}
{renderGroup(record.group)}
</>
);
} else {
return <></>;
}
} else {
let other = JSON.parse(record.other);
if (other === null) {
return <></>;
}
if (other.group !== undefined) {
return (
<>
{renderGroup(other.group)}
</>
);
} else {
return <></>;
}
}
} else {
return <></>;
}
@@ -417,6 +425,7 @@ const LogsTable = () => {
start_timestamp: timestamp2string(getTodayStartTimestamp()),
end_timestamp: timestamp2string(now.getTime() / 1000 + 3600),
channel: '',
group: '',
});
const {
username,
@@ -425,6 +434,7 @@ const LogsTable = () => {
start_timestamp,
end_timestamp,
channel,
group,
} = inputs;
const [stat, setStat] = useState({
@@ -433,13 +443,14 @@ const LogsTable = () => {
});
const handleInputChange = (value, name) => {
// console.log('name:', name, 'value:', value);
setInputs((inputs) => ({ ...inputs, [name]: value }));
};
const getLogSelfStat = async () => {
let localStartTimestamp = Date.parse(start_timestamp) / 1000;
let localEndTimestamp = Date.parse(end_timestamp) / 1000;
let url = `/api/log/self/stat?type=${logType}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}`;
let url = `/api/log/self/stat?type=${logType}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}&group=${group}`;
url = encodeURI(url);
let res = await API.get(url);
const { success, message, data } = res.data;
@@ -453,7 +464,7 @@ const LogsTable = () => {
const getLogStat = async () => {
let localStartTimestamp = Date.parse(start_timestamp) / 1000;
let localEndTimestamp = Date.parse(end_timestamp) / 1000;
let url = `/api/log/stat?type=${logType}&username=${username}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}&channel=${channel}`;
let url = `/api/log/stat?type=${logType}&username=${username}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}&channel=${channel}&group=${group}`;
url = encodeURI(url);
let res = await API.get(url);
const { success, message, data } = res.data;
@@ -596,9 +607,9 @@ const LogsTable = () => {
let localStartTimestamp = Date.parse(start_timestamp) / 1000;
let localEndTimestamp = Date.parse(end_timestamp) / 1000;
if (isAdminUser) {
url = `/api/log/?p=${startIdx}&page_size=${pageSize}&type=${logType}&username=${username}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}&channel=${channel}`;
url = `/api/log/?p=${startIdx}&page_size=${pageSize}&type=${logType}&username=${username}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}&channel=${channel}&group=${group}`;
} else {
url = `/api/log/self/?p=${startIdx}&page_size=${pageSize}&type=${logType}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}`;
url = `/api/log/self/?p=${startIdx}&page_size=${pageSize}&type=${logType}&token_name=${token_name}&model_name=${model_name}&start_timestamp=${localStartTimestamp}&end_timestamp=${localEndTimestamp}&group=${group}`;
}
url = encodeURI(url);
const res = await API.get(url);
@@ -682,10 +693,25 @@ const LogsTable = () => {
</Header>
<Form layout='horizontal' style={{ marginTop: 10 }}>
<>
<Form.Section>
<div style={{ display: 'flex', marginBottom: 10 }}>
<Form.DatePicker
field="range_timestamp"
label={t('时间范围')}
initValue={[start_timestamp, end_timestamp]}
value={[start_timestamp, end_timestamp]}
type="dateTimeRange"
name="range_timestamp"
onChange={(value) => {
handleInputChange(value[0], 'start_timestamp');
handleInputChange(value[1], 'end_timestamp');
}}
/>
</div>
</Form.Section>
<Form.Input
field='token_name'
label={t('令牌名称')}
style={{ width: 176 }}
value={token_name}
placeholder={t('可选值')}
name='token_name'
@@ -694,39 +720,24 @@ const LogsTable = () => {
<Form.Input
field='model_name'
label={t('模型名称')}
style={{ width: 176 }}
value={model_name}
placeholder={t('可选值')}
name='model_name'
onChange={(value) => handleInputChange(value, 'model_name')}
/>
<Form.DatePicker
field='start_timestamp'
label={t('起始时间')}
style={{ width: 272 }}
initValue={start_timestamp}
value={start_timestamp}
type='dateTime'
name='start_timestamp'
onChange={(value) => handleInputChange(value, 'start_timestamp')}
/>
<Form.DatePicker
field='end_timestamp'
fluid
label={t('结束时间')}
style={{ width: 272 }}
initValue={end_timestamp}
value={end_timestamp}
type='dateTime'
name='end_timestamp'
onChange={(value) => handleInputChange(value, 'end_timestamp')}
<Form.Input
field='group'
label={t('分组')}
value={group}
placeholder={t('可选值')}
name='group'
onChange={(value) => handleInputChange(value, 'group')}
/>
{isAdminUser && (
<>
<Form.Input
field='channel'
label={t('渠道 ID')}
style={{ width: 176 }}
value={channel}
placeholder={t('可选值')}
name='channel'
@@ -735,7 +746,6 @@ const LogsTable = () => {
<Form.Input
field='username'
label={t('用户名称')}
style={{ width: 176 }}
value={username}
placeholder={t('可选值')}
name='username'