添加完整项目文件

包含Go API项目的所有源代码、配置文件、Docker配置、文档和前端资源

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
nosqli
2025-08-21 23:33:10 +08:00
parent 69af9723d9
commit fb44c8bf03
513 changed files with 90387 additions and 0 deletions

286
middleware/auth.go Normal file
View File

@@ -0,0 +1,286 @@
package middleware
import (
"fmt"
"net/http"
"one-api/common"
"one-api/model"
"strconv"
"strings"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
)
func validUserInfo(username string, role int) bool {
// check username is empty
if strings.TrimSpace(username) == "" {
return false
}
if !common.IsValidateRole(role) {
return false
}
return true
}
func authHelper(c *gin.Context, minRole int) {
session := sessions.Default(c)
username := session.Get("username")
role := session.Get("role")
id := session.Get("id")
status := session.Get("status")
useAccessToken := false
if username == nil {
// Check access token
accessToken := c.Request.Header.Get("Authorization")
if accessToken == "" {
c.JSON(http.StatusUnauthorized, gin.H{
"success": false,
"message": "无权进行此操作,未登录且未提供 access token",
})
c.Abort()
return
}
user := model.ValidateAccessToken(accessToken)
if user != nil && user.Username != "" {
if !validUserInfo(user.Username, user.Role) {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "无权进行此操作,用户信息无效",
})
c.Abort()
return
}
// Token is valid
username = user.Username
role = user.Role
id = user.Id
status = user.Status
useAccessToken = true
} else {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "无权进行此操作access token 无效",
})
c.Abort()
return
}
}
// get header New-Api-User
apiUserIdStr := c.Request.Header.Get("New-Api-User")
if apiUserIdStr == "" {
c.JSON(http.StatusUnauthorized, gin.H{
"success": false,
"message": "无权进行此操作,未提供 New-Api-User",
})
c.Abort()
return
}
apiUserId, err := strconv.Atoi(apiUserIdStr)
if err != nil {
c.JSON(http.StatusUnauthorized, gin.H{
"success": false,
"message": "无权进行此操作New-Api-User 格式错误",
})
c.Abort()
return
}
if id != apiUserId {
c.JSON(http.StatusUnauthorized, gin.H{
"success": false,
"message": "无权进行此操作New-Api-User 与登录用户不匹配",
})
c.Abort()
return
}
if status.(int) == common.UserStatusDisabled {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "用户已被封禁",
})
c.Abort()
return
}
if role.(int) < minRole {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "无权进行此操作,权限不足",
})
c.Abort()
return
}
if !validUserInfo(username.(string), role.(int)) {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "无权进行此操作,用户信息无效",
})
c.Abort()
return
}
c.Set("username", username)
c.Set("role", role)
c.Set("id", id)
c.Set("group", session.Get("group"))
c.Set("use_access_token", useAccessToken)
//userCache, err := model.GetUserCache(id.(int))
//if err != nil {
// c.JSON(http.StatusOK, gin.H{
// "success": false,
// "message": err.Error(),
// })
// c.Abort()
// return
//}
//userCache.WriteContext(c)
c.Next()
}
func TryUserAuth() func(c *gin.Context) {
return func(c *gin.Context) {
session := sessions.Default(c)
id := session.Get("id")
if id != nil {
c.Set("id", id)
}
c.Next()
}
}
func UserAuth() func(c *gin.Context) {
return func(c *gin.Context) {
authHelper(c, common.RoleCommonUser)
}
}
func AdminAuth() func(c *gin.Context) {
return func(c *gin.Context) {
authHelper(c, common.RoleAdminUser)
}
}
func RootAuth() func(c *gin.Context) {
return func(c *gin.Context) {
authHelper(c, common.RoleRootUser)
}
}
func WssAuth(c *gin.Context) {
}
func TokenAuth() func(c *gin.Context) {
return func(c *gin.Context) {
// 先检测是否为ws
if c.Request.Header.Get("Sec-WebSocket-Protocol") != "" {
// Sec-WebSocket-Protocol: realtime, openai-insecure-api-key.sk-xxx, openai-beta.realtime-v1
// read sk from Sec-WebSocket-Protocol
key := c.Request.Header.Get("Sec-WebSocket-Protocol")
parts := strings.Split(key, ",")
for _, part := range parts {
part = strings.TrimSpace(part)
if strings.HasPrefix(part, "openai-insecure-api-key") {
key = strings.TrimPrefix(part, "openai-insecure-api-key.")
break
}
}
c.Request.Header.Set("Authorization", "Bearer "+key)
}
// 检查path包含/v1/messages
if strings.Contains(c.Request.URL.Path, "/v1/messages") {
// 从x-api-key中获取key
key := c.Request.Header.Get("x-api-key")
if key != "" {
c.Request.Header.Set("Authorization", "Bearer "+key)
}
}
// gemini api 从query中获取key
if strings.HasPrefix(c.Request.URL.Path, "/v1beta/models/") || strings.HasPrefix(c.Request.URL.Path, "/v1/models/") {
skKey := c.Query("key")
if skKey != "" {
c.Request.Header.Set("Authorization", "Bearer "+skKey)
}
// 从x-goog-api-key header中获取key
xGoogKey := c.Request.Header.Get("x-goog-api-key")
if xGoogKey != "" {
c.Request.Header.Set("Authorization", "Bearer "+xGoogKey)
}
}
key := c.Request.Header.Get("Authorization")
parts := make([]string, 0)
key = strings.TrimPrefix(key, "Bearer ")
if key == "" || key == "midjourney-proxy" {
key = c.Request.Header.Get("mj-api-secret")
key = strings.TrimPrefix(key, "Bearer ")
key = strings.TrimPrefix(key, "sk-")
parts = strings.Split(key, "-")
key = parts[0]
} else {
key = strings.TrimPrefix(key, "sk-")
parts = strings.Split(key, "-")
key = parts[0]
}
token, err := model.ValidateUserToken(key)
if token != nil {
id := c.GetInt("id")
if id == 0 {
c.Set("id", token.UserId)
}
}
if err != nil {
abortWithOpenAiMessage(c, http.StatusUnauthorized, err.Error())
return
}
userCache, err := model.GetUserCache(token.UserId)
if err != nil {
abortWithOpenAiMessage(c, http.StatusInternalServerError, err.Error())
return
}
userEnabled := userCache.Status == common.UserStatusEnabled
if !userEnabled {
abortWithOpenAiMessage(c, http.StatusForbidden, "用户已被封禁")
return
}
userCache.WriteContext(c)
err = SetupContextForToken(c, token, parts...)
if err != nil {
return
}
c.Next()
}
}
func SetupContextForToken(c *gin.Context, token *model.Token, parts ...string) error {
if token == nil {
return fmt.Errorf("token is nil")
}
c.Set("id", token.UserId)
c.Set("token_id", token.Id)
c.Set("token_key", token.Key)
c.Set("token_name", token.Name)
c.Set("token_unlimited_quota", token.UnlimitedQuota)
if !token.UnlimitedQuota {
c.Set("token_quota", token.RemainQuota)
}
if token.ModelLimitsEnabled {
c.Set("token_model_limit_enabled", true)
c.Set("token_model_limit", token.GetModelLimitsMap())
} else {
c.Set("token_model_limit_enabled", false)
}
c.Set("allow_ips", token.GetIpLimitsMap())
c.Set("token_group", token.Group)
if len(parts) > 1 {
if model.IsAdmin(token.UserId) {
c.Set("specific_channel_id", parts[1])
} else {
abortWithOpenAiMessage(c, http.StatusForbidden, "普通用户不支持指定渠道")
return fmt.Errorf("普通用户不支持指定渠道")
}
}
return nil
}

16
middleware/cache.go Normal file
View File

@@ -0,0 +1,16 @@
package middleware
import (
"github.com/gin-gonic/gin"
)
func Cache() func(c *gin.Context) {
return func(c *gin.Context) {
if c.Request.RequestURI == "/" {
c.Header("Cache-Control", "no-cache")
} else {
c.Header("Cache-Control", "max-age=604800") // one week
}
c.Next()
}
}

15
middleware/cors.go Normal file
View File

@@ -0,0 +1,15 @@
package middleware
import (
"github.com/gin-contrib/cors"
"github.com/gin-gonic/gin"
)
func CORS() gin.HandlerFunc {
config := cors.DefaultConfig()
config.AllowAllOrigins = true
config.AllowCredentials = true
config.AllowMethods = []string{"GET", "POST", "PUT", "DELETE", "OPTIONS"}
config.AllowHeaders = []string{"*"}
return cors.New(config)
}

331
middleware/distributor.go Normal file
View File

@@ -0,0 +1,331 @@
package middleware
import (
"errors"
"fmt"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
"one-api/model"
relayconstant "one-api/relay/constant"
"one-api/service"
"one-api/setting"
"one-api/setting/ratio_setting"
"one-api/types"
"strconv"
"strings"
"time"
"github.com/gin-gonic/gin"
)
type ModelRequest struct {
Model string `json:"model"`
Group string `json:"group,omitempty"`
}
func Distribute() func(c *gin.Context) {
return func(c *gin.Context) {
allowIpsMap := common.GetContextKeyStringMap(c, constant.ContextKeyTokenAllowIps)
if len(allowIpsMap) != 0 {
clientIp := c.ClientIP()
if _, ok := allowIpsMap[clientIp]; !ok {
abortWithOpenAiMessage(c, http.StatusForbidden, "您的 IP 不在令牌允许访问的列表中")
return
}
}
var channel *model.Channel
channelId, ok := common.GetContextKey(c, constant.ContextKeyTokenSpecificChannelId)
modelRequest, shouldSelectChannel, err := getModelRequest(c)
if err != nil {
abortWithOpenAiMessage(c, http.StatusBadRequest, "Invalid request, "+err.Error())
return
}
userGroup := common.GetContextKeyString(c, constant.ContextKeyUserGroup)
tokenGroup := common.GetContextKeyString(c, constant.ContextKeyTokenGroup)
if tokenGroup != "" {
// check common.UserUsableGroups[userGroup]
if _, ok := setting.GetUserUsableGroups(userGroup)[tokenGroup]; !ok {
abortWithOpenAiMessage(c, http.StatusForbidden, fmt.Sprintf("令牌分组 %s 已被禁用", tokenGroup))
return
}
// check group in common.GroupRatio
if !ratio_setting.ContainsGroupRatio(tokenGroup) {
if tokenGroup != "auto" {
abortWithOpenAiMessage(c, http.StatusForbidden, fmt.Sprintf("分组 %s 已被弃用", tokenGroup))
return
}
}
userGroup = tokenGroup
}
common.SetContextKey(c, constant.ContextKeyUsingGroup, userGroup)
if ok {
id, err := strconv.Atoi(channelId.(string))
if err != nil {
abortWithOpenAiMessage(c, http.StatusBadRequest, "无效的渠道 Id")
return
}
channel, err = model.GetChannelById(id, true)
if err != nil {
abortWithOpenAiMessage(c, http.StatusBadRequest, "无效的渠道 Id")
return
}
if channel.Status != common.ChannelStatusEnabled {
abortWithOpenAiMessage(c, http.StatusForbidden, "该渠道已被禁用")
return
}
} else {
// Select a channel for the user
// check token model mapping
modelLimitEnable := common.GetContextKeyBool(c, constant.ContextKeyTokenModelLimitEnabled)
if modelLimitEnable {
s, ok := common.GetContextKey(c, constant.ContextKeyTokenModelLimit)
var tokenModelLimit map[string]bool
if ok {
tokenModelLimit = s.(map[string]bool)
} else {
tokenModelLimit = map[string]bool{}
}
if tokenModelLimit != nil {
if _, ok := tokenModelLimit[modelRequest.Model]; !ok {
abortWithOpenAiMessage(c, http.StatusForbidden, "该令牌无权访问模型 "+modelRequest.Model)
return
}
} else {
// token model limit is empty, all models are not allowed
abortWithOpenAiMessage(c, http.StatusForbidden, "该令牌无权访问任何模型")
return
}
}
if shouldSelectChannel {
var selectGroup string
channel, selectGroup, err = model.CacheGetRandomSatisfiedChannel(c, userGroup, modelRequest.Model, 0)
if err != nil {
showGroup := userGroup
if userGroup == "auto" {
showGroup = fmt.Sprintf("auto(%s)", selectGroup)
}
message := fmt.Sprintf("当前分组 %s 下对于模型 %s 无可用渠道", showGroup, modelRequest.Model)
// 如果错误,但是渠道不为空,说明是数据库一致性问题
if channel != nil {
common.SysError(fmt.Sprintf("渠道不存在:%d", channel.Id))
message = "数据库一致性已被破坏,请联系管理员"
}
// 如果错误,而且渠道为空,说明是没有可用渠道
abortWithOpenAiMessage(c, http.StatusServiceUnavailable, message)
return
}
if channel == nil {
abortWithOpenAiMessage(c, http.StatusServiceUnavailable, fmt.Sprintf("当前分组 %s 下对于模型 %s 无可用渠道(数据库一致性已被破坏)", userGroup, modelRequest.Model))
return
}
}
}
common.SetContextKey(c, constant.ContextKeyRequestStartTime, time.Now())
SetupContextForSelectedChannel(c, channel, modelRequest.Model)
c.Next()
}
}
func getModelRequest(c *gin.Context) (*ModelRequest, bool, error) {
var modelRequest ModelRequest
shouldSelectChannel := true
var err error
if strings.Contains(c.Request.URL.Path, "/mj/") {
relayMode := relayconstant.Path2RelayModeMidjourney(c.Request.URL.Path)
if relayMode == relayconstant.RelayModeMidjourneyTaskFetch ||
relayMode == relayconstant.RelayModeMidjourneyTaskFetchByCondition ||
relayMode == relayconstant.RelayModeMidjourneyNotify ||
relayMode == relayconstant.RelayModeMidjourneyTaskImageSeed {
shouldSelectChannel = false
} else {
midjourneyRequest := dto.MidjourneyRequest{}
err = common.UnmarshalBodyReusable(c, &midjourneyRequest)
if err != nil {
return nil, false, err
}
midjourneyModel, mjErr, success := service.GetMjRequestModel(relayMode, &midjourneyRequest)
if mjErr != nil {
return nil, false, fmt.Errorf(mjErr.Description)
}
if midjourneyModel == "" {
if !success {
return nil, false, fmt.Errorf("无效的请求, 无法解析模型")
} else {
// task fetch, task fetch by condition, notify
shouldSelectChannel = false
}
}
modelRequest.Model = midjourneyModel
}
c.Set("relay_mode", relayMode)
} else if strings.Contains(c.Request.URL.Path, "/suno/") {
relayMode := relayconstant.Path2RelaySuno(c.Request.Method, c.Request.URL.Path)
if relayMode == relayconstant.RelayModeSunoFetch ||
relayMode == relayconstant.RelayModeSunoFetchByID {
shouldSelectChannel = false
} else {
modelName := service.CoverTaskActionToModelName(constant.TaskPlatformSuno, c.Param("action"))
modelRequest.Model = modelName
}
c.Set("platform", string(constant.TaskPlatformSuno))
c.Set("relay_mode", relayMode)
} else if strings.Contains(c.Request.URL.Path, "/v1/video/generations") {
err = common.UnmarshalBodyReusable(c, &modelRequest)
var platform string
var relayMode int
if strings.HasPrefix(modelRequest.Model, "jimeng") {
platform = string(constant.TaskPlatformJimeng)
relayMode = relayconstant.Path2RelayJimeng(c.Request.Method, c.Request.URL.Path)
if relayMode == relayconstant.RelayModeJimengFetchByID {
shouldSelectChannel = false
}
} else {
platform = string(constant.TaskPlatformKling)
relayMode = relayconstant.Path2RelayKling(c.Request.Method, c.Request.URL.Path)
if relayMode == relayconstant.RelayModeKlingFetchByID {
shouldSelectChannel = false
}
}
c.Set("platform", platform)
c.Set("relay_mode", relayMode)
} else if strings.HasPrefix(c.Request.URL.Path, "/v1beta/models/") || strings.HasPrefix(c.Request.URL.Path, "/v1/models/") {
// Gemini API 路径处理: /v1beta/models/gemini-2.0-flash:generateContent
relayMode := relayconstant.RelayModeGemini
modelName := extractModelNameFromGeminiPath(c.Request.URL.Path)
if modelName != "" {
modelRequest.Model = modelName
}
c.Set("relay_mode", relayMode)
} else if !strings.HasPrefix(c.Request.URL.Path, "/v1/audio/transcriptions") && !strings.HasPrefix(c.Request.URL.Path, "/v1/images/edits") {
err = common.UnmarshalBodyReusable(c, &modelRequest)
}
if err != nil {
return nil, false, errors.New("无效的请求, " + err.Error())
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/realtime") {
//wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview-2024-10-01
modelRequest.Model = c.Query("model")
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") {
if modelRequest.Model == "" {
modelRequest.Model = "text-moderation-stable"
}
}
if strings.HasSuffix(c.Request.URL.Path, "embeddings") {
if modelRequest.Model == "" {
modelRequest.Model = c.Param("model")
}
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "dall-e")
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/edits") {
modelRequest.Model = common.GetStringIfEmpty(c.PostForm("model"), "gpt-image-1")
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/audio") {
relayMode := relayconstant.RelayModeAudioSpeech
if strings.HasPrefix(c.Request.URL.Path, "/v1/audio/speech") {
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "tts-1")
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/audio/translations") {
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, c.PostForm("model"))
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "whisper-1")
relayMode = relayconstant.RelayModeAudioTranslation
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/audio/transcriptions") {
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, c.PostForm("model"))
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "whisper-1")
relayMode = relayconstant.RelayModeAudioTranscription
}
c.Set("relay_mode", relayMode)
}
if strings.HasPrefix(c.Request.URL.Path, "/pg/chat/completions") {
// playground chat completions
err = common.UnmarshalBodyReusable(c, &modelRequest)
if err != nil {
return nil, false, errors.New("无效的请求, " + err.Error())
}
common.SetContextKey(c, constant.ContextKeyTokenGroup, modelRequest.Group)
}
return &modelRequest, shouldSelectChannel, nil
}
func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, modelName string) *types.NewAPIError {
c.Set("original_model", modelName) // for retry
if channel == nil {
return types.NewError(errors.New("channel is nil"), types.ErrorCodeGetChannelFailed)
}
common.SetContextKey(c, constant.ContextKeyChannelId, channel.Id)
common.SetContextKey(c, constant.ContextKeyChannelName, channel.Name)
common.SetContextKey(c, constant.ContextKeyChannelType, channel.Type)
common.SetContextKey(c, constant.ContextKeyChannelCreateTime, channel.CreatedTime)
common.SetContextKey(c, constant.ContextKeyChannelSetting, channel.GetSetting())
common.SetContextKey(c, constant.ContextKeyChannelParamOverride, channel.GetParamOverride())
if nil != channel.OpenAIOrganization && *channel.OpenAIOrganization != "" {
common.SetContextKey(c, constant.ContextKeyChannelOrganization, *channel.OpenAIOrganization)
}
common.SetContextKey(c, constant.ContextKeyChannelAutoBan, channel.GetAutoBan())
common.SetContextKey(c, constant.ContextKeyChannelModelMapping, channel.GetModelMapping())
common.SetContextKey(c, constant.ContextKeyChannelStatusCodeMapping, channel.GetStatusCodeMapping())
key, index, newAPIError := channel.GetNextEnabledKey()
if newAPIError != nil {
return newAPIError
}
if channel.ChannelInfo.IsMultiKey {
common.SetContextKey(c, constant.ContextKeyChannelIsMultiKey, true)
common.SetContextKey(c, constant.ContextKeyChannelMultiKeyIndex, index)
}
// c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", key))
common.SetContextKey(c, constant.ContextKeyChannelKey, key)
common.SetContextKey(c, constant.ContextKeyChannelBaseUrl, channel.GetBaseURL())
// TODO: api_version统一
switch channel.Type {
case constant.ChannelTypeAzure:
c.Set("api_version", channel.Other)
case constant.ChannelTypeVertexAi:
c.Set("region", channel.Other)
case constant.ChannelTypeXunfei:
c.Set("api_version", channel.Other)
case constant.ChannelTypeGemini:
c.Set("api_version", channel.Other)
case constant.ChannelTypeAli:
c.Set("plugin", channel.Other)
case constant.ChannelCloudflare:
c.Set("api_version", channel.Other)
case constant.ChannelTypeMokaAI:
c.Set("api_version", channel.Other)
case constant.ChannelTypeCoze:
c.Set("bot_id", channel.Other)
}
return nil
}
// extractModelNameFromGeminiPath 从 Gemini API URL 路径中提取模型名
// 输入格式: /v1beta/models/gemini-2.0-flash:generateContent
// 输出: gemini-2.0-flash
func extractModelNameFromGeminiPath(path string) string {
// 查找 "/models/" 的位置
modelsPrefix := "/models/"
modelsIndex := strings.Index(path, modelsPrefix)
if modelsIndex == -1 {
return ""
}
// 从 "/models/" 之后开始提取
startIndex := modelsIndex + len(modelsPrefix)
if startIndex >= len(path) {
return ""
}
// 查找 ":" 的位置,模型名在 ":" 之前
colonIndex := strings.Index(path[startIndex:], ":")
if colonIndex == -1 {
// 如果没有找到 ":",返回从 "/models/" 到路径结尾的部分
return path[startIndex:]
}
// 返回模型名部分
return path[startIndex : startIndex+colonIndex]
}

38
middleware/gzip.go Normal file
View File

@@ -0,0 +1,38 @@
package middleware
import (
"compress/gzip"
"github.com/andybalholm/brotli"
"github.com/gin-gonic/gin"
"io"
"net/http"
)
func DecompressRequestMiddleware() gin.HandlerFunc {
return func(c *gin.Context) {
if c.Request.Body == nil || c.Request.Method == http.MethodGet {
c.Next()
return
}
switch c.GetHeader("Content-Encoding") {
case "gzip":
gzipReader, err := gzip.NewReader(c.Request.Body)
if err != nil {
c.AbortWithStatus(http.StatusBadRequest)
return
}
defer gzipReader.Close()
// Replace the request body with the decompressed data
c.Request.Body = io.NopCloser(gzipReader)
c.Request.Header.Del("Content-Encoding")
case "br":
reader := brotli.NewReader(c.Request.Body)
c.Request.Body = io.NopCloser(reader)
c.Request.Header.Del("Content-Encoding")
}
// Continue processing the request
c.Next()
}
}

View File

@@ -0,0 +1,47 @@
package middleware
import (
"bytes"
"encoding/json"
"io"
"one-api/common"
"one-api/constant"
"github.com/gin-gonic/gin"
)
func KlingRequestConvert() func(c *gin.Context) {
return func(c *gin.Context) {
var originalReq map[string]interface{}
if err := common.UnmarshalBodyReusable(c, &originalReq); err != nil {
c.Next()
return
}
model, _ := originalReq["model_name"].(string)
prompt, _ := originalReq["prompt"].(string)
unifiedReq := map[string]interface{}{
"model": model,
"prompt": prompt,
"metadata": originalReq,
}
jsonData, err := json.Marshal(unifiedReq)
if err != nil {
c.Next()
return
}
// Rewrite request body and path
c.Request.Body = io.NopCloser(bytes.NewBuffer(jsonData))
c.Request.URL.Path = "/v1/video/generations"
if image, ok := originalReq["image"]; !ok || image == "" {
c.Set("action", constant.TaskActionTextGenerate)
}
// We have to reset the request body for the next handlers
c.Set(common.KeyRequestBody, jsonData)
c.Next()
}
}

25
middleware/logger.go Normal file
View File

@@ -0,0 +1,25 @@
package middleware
import (
"fmt"
"github.com/gin-gonic/gin"
"one-api/common"
)
func SetUpLogger(server *gin.Engine) {
server.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {
var requestID string
if param.Keys != nil {
requestID = param.Keys[common.RequestIdKey].(string)
}
return fmt.Sprintf("[GIN] %s | %s | %3d | %13v | %15s | %7s %s\n",
param.TimeStamp.Format("2006/01/02 - 15:04:05"),
requestID,
param.StatusCode,
param.Latency,
param.ClientIP,
param.Method,
param.Path,
)
}))
}

View File

@@ -0,0 +1,199 @@
package middleware
import (
"context"
"fmt"
"net/http"
"one-api/common"
"one-api/common/limiter"
"one-api/constant"
"one-api/setting"
"strconv"
"time"
"github.com/gin-gonic/gin"
"github.com/go-redis/redis/v8"
)
const (
ModelRequestRateLimitCountMark = "MRRL"
ModelRequestRateLimitSuccessCountMark = "MRRLS"
)
// 检查Redis中的请求限制
func checkRedisRateLimit(ctx context.Context, rdb *redis.Client, key string, maxCount int, duration int64) (bool, error) {
// 如果maxCount为0表示不限制
if maxCount == 0 {
return true, nil
}
// 获取当前计数
length, err := rdb.LLen(ctx, key).Result()
if err != nil {
return false, err
}
// 如果未达到限制,允许请求
if length < int64(maxCount) {
return true, nil
}
// 检查时间窗口
oldTimeStr, _ := rdb.LIndex(ctx, key, -1).Result()
oldTime, err := time.Parse(timeFormat, oldTimeStr)
if err != nil {
return false, err
}
nowTimeStr := time.Now().Format(timeFormat)
nowTime, err := time.Parse(timeFormat, nowTimeStr)
if err != nil {
return false, err
}
// 如果在时间窗口内已达到限制,拒绝请求
subTime := nowTime.Sub(oldTime).Seconds()
if int64(subTime) < duration {
rdb.Expire(ctx, key, time.Duration(setting.ModelRequestRateLimitDurationMinutes)*time.Minute)
return false, nil
}
return true, nil
}
// 记录Redis请求
func recordRedisRequest(ctx context.Context, rdb *redis.Client, key string, maxCount int) {
// 如果maxCount为0不记录请求
if maxCount == 0 {
return
}
now := time.Now().Format(timeFormat)
rdb.LPush(ctx, key, now)
rdb.LTrim(ctx, key, 0, int64(maxCount-1))
rdb.Expire(ctx, key, time.Duration(setting.ModelRequestRateLimitDurationMinutes)*time.Minute)
}
// Redis限流处理器
func redisRateLimitHandler(duration int64, totalMaxCount, successMaxCount int) gin.HandlerFunc {
return func(c *gin.Context) {
userId := strconv.Itoa(c.GetInt("id"))
ctx := context.Background()
rdb := common.RDB
// 1. 检查成功请求数限制
successKey := fmt.Sprintf("rateLimit:%s:%s", ModelRequestRateLimitSuccessCountMark, userId)
allowed, err := checkRedisRateLimit(ctx, rdb, successKey, successMaxCount, duration)
if err != nil {
fmt.Println("检查成功请求数限制失败:", err.Error())
abortWithOpenAiMessage(c, http.StatusInternalServerError, "rate_limit_check_failed")
return
}
if !allowed {
abortWithOpenAiMessage(c, http.StatusTooManyRequests, fmt.Sprintf("您已达到请求数限制:%d分钟内最多请求%d次", setting.ModelRequestRateLimitDurationMinutes, successMaxCount))
return
}
//2.检查总请求数限制并记录总请求当totalMaxCount为0时会自动跳过使用令牌桶限流器
if totalMaxCount > 0 {
totalKey := fmt.Sprintf("rateLimit:%s", userId)
// 初始化
tb := limiter.New(ctx, rdb)
allowed, err = tb.Allow(
ctx,
totalKey,
limiter.WithCapacity(int64(totalMaxCount)*duration),
limiter.WithRate(int64(totalMaxCount)),
limiter.WithRequested(duration),
)
if err != nil {
fmt.Println("检查总请求数限制失败:", err.Error())
abortWithOpenAiMessage(c, http.StatusInternalServerError, "rate_limit_check_failed")
return
}
if !allowed {
abortWithOpenAiMessage(c, http.StatusTooManyRequests, fmt.Sprintf("您已达到总请求数限制:%d分钟内最多请求%d次包括失败次数请检查您的请求是否正确", setting.ModelRequestRateLimitDurationMinutes, totalMaxCount))
}
}
// 4. 处理请求
c.Next()
// 5. 如果请求成功,记录成功请求
if c.Writer.Status() < 400 {
recordRedisRequest(ctx, rdb, successKey, successMaxCount)
}
}
}
// 内存限流处理器
func memoryRateLimitHandler(duration int64, totalMaxCount, successMaxCount int) gin.HandlerFunc {
inMemoryRateLimiter.Init(time.Duration(setting.ModelRequestRateLimitDurationMinutes) * time.Minute)
return func(c *gin.Context) {
userId := strconv.Itoa(c.GetInt("id"))
totalKey := ModelRequestRateLimitCountMark + userId
successKey := ModelRequestRateLimitSuccessCountMark + userId
// 1. 检查总请求数限制当totalMaxCount为0时跳过
if totalMaxCount > 0 && !inMemoryRateLimiter.Request(totalKey, totalMaxCount, duration) {
c.Status(http.StatusTooManyRequests)
c.Abort()
return
}
// 2. 检查成功请求数限制
// 使用一个临时key来检查限制这样可以避免实际记录
checkKey := successKey + "_check"
if !inMemoryRateLimiter.Request(checkKey, successMaxCount, duration) {
c.Status(http.StatusTooManyRequests)
c.Abort()
return
}
// 3. 处理请求
c.Next()
// 4. 如果请求成功,记录到实际的成功请求计数中
if c.Writer.Status() < 400 {
inMemoryRateLimiter.Request(successKey, successMaxCount, duration)
}
}
}
// ModelRequestRateLimit 模型请求限流中间件
func ModelRequestRateLimit() func(c *gin.Context) {
return func(c *gin.Context) {
// 在每个请求时检查是否启用限流
if !setting.ModelRequestRateLimitEnabled {
c.Next()
return
}
// 计算限流参数
duration := int64(setting.ModelRequestRateLimitDurationMinutes * 60)
totalMaxCount := setting.ModelRequestRateLimitCount
successMaxCount := setting.ModelRequestRateLimitSuccessCount
// 获取分组
group := common.GetContextKeyString(c, constant.ContextKeyTokenGroup)
if group == "" {
group = common.GetContextKeyString(c, constant.ContextKeyUserGroup)
}
//获取分组的限流配置
groupTotalCount, groupSuccessCount, found := setting.GetGroupRateLimit(group)
if found {
totalMaxCount = groupTotalCount
successMaxCount = groupSuccessCount
}
// 根据存储类型选择并执行限流处理器
if common.RedisEnabled {
redisRateLimitHandler(duration, totalMaxCount, successMaxCount)(c)
} else {
memoryRateLimitHandler(duration, totalMaxCount, successMaxCount)(c)
}
}
}

113
middleware/rate-limit.go Normal file
View File

@@ -0,0 +1,113 @@
package middleware
import (
"context"
"fmt"
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"time"
)
var timeFormat = "2006-01-02T15:04:05.000Z"
var inMemoryRateLimiter common.InMemoryRateLimiter
var defNext = func(c *gin.Context) {
c.Next()
}
func redisRateLimiter(c *gin.Context, maxRequestNum int, duration int64, mark string) {
ctx := context.Background()
rdb := common.RDB
key := "rateLimit:" + mark + c.ClientIP()
listLength, err := rdb.LLen(ctx, key).Result()
if err != nil {
fmt.Println(err.Error())
c.Status(http.StatusInternalServerError)
c.Abort()
return
}
if listLength < int64(maxRequestNum) {
rdb.LPush(ctx, key, time.Now().Format(timeFormat))
rdb.Expire(ctx, key, common.RateLimitKeyExpirationDuration)
} else {
oldTimeStr, _ := rdb.LIndex(ctx, key, -1).Result()
oldTime, err := time.Parse(timeFormat, oldTimeStr)
if err != nil {
fmt.Println(err)
c.Status(http.StatusInternalServerError)
c.Abort()
return
}
nowTimeStr := time.Now().Format(timeFormat)
nowTime, err := time.Parse(timeFormat, nowTimeStr)
if err != nil {
fmt.Println(err)
c.Status(http.StatusInternalServerError)
c.Abort()
return
}
// time.Since will return negative number!
// See: https://stackoverflow.com/questions/50970900/why-is-time-since-returning-negative-durations-on-windows
if int64(nowTime.Sub(oldTime).Seconds()) < duration {
rdb.Expire(ctx, key, common.RateLimitKeyExpirationDuration)
c.Status(http.StatusTooManyRequests)
c.Abort()
return
} else {
rdb.LPush(ctx, key, time.Now().Format(timeFormat))
rdb.LTrim(ctx, key, 0, int64(maxRequestNum-1))
rdb.Expire(ctx, key, common.RateLimitKeyExpirationDuration)
}
}
}
func memoryRateLimiter(c *gin.Context, maxRequestNum int, duration int64, mark string) {
key := mark + c.ClientIP()
if !inMemoryRateLimiter.Request(key, maxRequestNum, duration) {
c.Status(http.StatusTooManyRequests)
c.Abort()
return
}
}
func rateLimitFactory(maxRequestNum int, duration int64, mark string) func(c *gin.Context) {
if common.RedisEnabled {
return func(c *gin.Context) {
redisRateLimiter(c, maxRequestNum, duration, mark)
}
} else {
// It's safe to call multi times.
inMemoryRateLimiter.Init(common.RateLimitKeyExpirationDuration)
return func(c *gin.Context) {
memoryRateLimiter(c, maxRequestNum, duration, mark)
}
}
}
func GlobalWebRateLimit() func(c *gin.Context) {
if common.GlobalWebRateLimitEnable {
return rateLimitFactory(common.GlobalWebRateLimitNum, common.GlobalWebRateLimitDuration, "GW")
}
return defNext
}
func GlobalAPIRateLimit() func(c *gin.Context) {
if common.GlobalApiRateLimitEnable {
return rateLimitFactory(common.GlobalApiRateLimitNum, common.GlobalApiRateLimitDuration, "GA")
}
return defNext
}
func CriticalRateLimit() func(c *gin.Context) {
return rateLimitFactory(common.CriticalRateLimitNum, common.CriticalRateLimitDuration, "CT")
}
func DownloadRateLimit() func(c *gin.Context) {
return rateLimitFactory(common.DownloadRateLimitNum, common.DownloadRateLimitDuration, "DW")
}
func UploadRateLimit() func(c *gin.Context) {
return rateLimitFactory(common.UploadRateLimitNum, common.UploadRateLimitDuration, "UP")
}

28
middleware/recover.go Normal file
View File

@@ -0,0 +1,28 @@
package middleware
import (
"fmt"
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"runtime/debug"
)
func RelayPanicRecover() gin.HandlerFunc {
return func(c *gin.Context) {
defer func() {
if err := recover(); err != nil {
common.SysError(fmt.Sprintf("panic detected: %v", err))
common.SysError(fmt.Sprintf("stacktrace from panic: %s", string(debug.Stack())))
c.JSON(http.StatusInternalServerError, gin.H{
"error": gin.H{
"message": fmt.Sprintf("Panic detected, error: %v. Please submit a issue here: https://github.com/Calcium-Ion/new-api", err),
"type": "new_api_panic",
},
})
c.Abort()
}
}()
c.Next()
}
}

18
middleware/request-id.go Normal file
View File

@@ -0,0 +1,18 @@
package middleware
import (
"context"
"github.com/gin-gonic/gin"
"one-api/common"
)
func RequestId() func(c *gin.Context) {
return func(c *gin.Context) {
id := common.GetTimeString() + common.GetRandomString(8)
c.Set(common.RequestIdKey, id)
ctx := context.WithValue(c.Request.Context(), common.RequestIdKey, id)
c.Request = c.Request.WithContext(ctx)
c.Header(common.RequestIdKey, id)
c.Next()
}
}

41
middleware/stats.go Normal file
View File

@@ -0,0 +1,41 @@
package middleware
import (
"sync/atomic"
"github.com/gin-gonic/gin"
)
// HTTPStats 存储HTTP统计信息
type HTTPStats struct {
activeConnections int64
}
var globalStats = &HTTPStats{}
// StatsMiddleware 统计中间件
func StatsMiddleware() gin.HandlerFunc {
return func(c *gin.Context) {
// 增加活跃连接数
atomic.AddInt64(&globalStats.activeConnections, 1)
// 确保在请求结束时减少连接数
defer func() {
atomic.AddInt64(&globalStats.activeConnections, -1)
}()
c.Next()
}
}
// StatsInfo 统计信息结构
type StatsInfo struct {
ActiveConnections int64 `json:"active_connections"`
}
// GetStats 获取统计信息
func GetStats() StatsInfo {
return StatsInfo{
ActiveConnections: atomic.LoadInt64(&globalStats.activeConnections),
}
}

View File

@@ -0,0 +1,80 @@
package middleware
import (
"encoding/json"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
"net/http"
"net/url"
"one-api/common"
)
type turnstileCheckResponse struct {
Success bool `json:"success"`
}
func TurnstileCheck() gin.HandlerFunc {
return func(c *gin.Context) {
if common.TurnstileCheckEnabled {
session := sessions.Default(c)
turnstileChecked := session.Get("turnstile")
if turnstileChecked != nil {
c.Next()
return
}
response := c.Query("turnstile")
if response == "" {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "Turnstile token 为空",
})
c.Abort()
return
}
rawRes, err := http.PostForm("https://challenges.cloudflare.com/turnstile/v0/siteverify", url.Values{
"secret": {common.TurnstileSecretKey},
"response": {response},
"remoteip": {c.ClientIP()},
})
if err != nil {
common.SysError(err.Error())
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": err.Error(),
})
c.Abort()
return
}
defer rawRes.Body.Close()
var res turnstileCheckResponse
err = json.NewDecoder(rawRes.Body).Decode(&res)
if err != nil {
common.SysError(err.Error())
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": err.Error(),
})
c.Abort()
return
}
if !res.Success {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "Turnstile 校验失败,请刷新重试!",
})
c.Abort()
return
}
session.Set("turnstile", true)
err = session.Save()
if err != nil {
c.JSON(http.StatusOK, gin.H{
"message": "无法保存会话信息,请重试",
"success": false,
})
return
}
}
c.Next()
}
}

29
middleware/utils.go Normal file
View File

@@ -0,0 +1,29 @@
package middleware
import (
"fmt"
"github.com/gin-gonic/gin"
"one-api/common"
)
func abortWithOpenAiMessage(c *gin.Context, statusCode int, message string) {
userId := c.GetInt("id")
c.JSON(statusCode, gin.H{
"error": gin.H{
"message": common.MessageWithRequestId(message, c.GetString(common.RequestIdKey)),
"type": "new_api_error",
},
})
c.Abort()
common.LogError(c.Request.Context(), fmt.Sprintf("user %d | %s", userId, message))
}
func abortWithMidjourneyMessage(c *gin.Context, statusCode int, code int, description string) {
c.JSON(statusCode, gin.H{
"description": description,
"type": "new_api_error",
"code": code,
})
c.Abort()
common.LogError(c.Request.Context(), description)
}