refactor: Introduce pre-consume quota and unify relay handlers

This commit introduces a major architectural refactoring to improve quota management, centralize logging, and streamline the relay handling logic.

Key changes:
- **Pre-consume Quota:** Implements a new mechanism to check and reserve user quota *before* making the request to the upstream provider. This ensures more accurate quota deduction and prevents users from exceeding their limits due to concurrent requests.

- **Unified Relay Handlers:** Refactors the relay logic to use generic handlers (e.g., `ChatHandler`, `ImageHandler`) instead of provider-specific implementations. This significantly reduces code duplication and simplifies adding new channels.

- **Centralized Logger:** A new dedicated `logger` package is introduced, and all system logging calls are migrated to use it, moving this responsibility out of the `common` package.

- **Code Reorganization:** DTOs are generalized (e.g., `dalle.go` -> `openai_image.go`) and utility code is moved to more appropriate packages (e.g., `common/http.go` -> `service/http.go`) for better code structure.
This commit is contained in:
CaIon
2025-08-14 20:05:06 +08:00
parent 17bab355e4
commit e2037ad756
113 changed files with 3095 additions and 2518 deletions

View File

@@ -2,7 +2,10 @@ package dto
import (
"encoding/json"
"github.com/gin-gonic/gin"
"one-api/common"
"one-api/logger"
"one-api/types"
"strings"
)
@@ -14,19 +17,75 @@ type GeminiChatRequest struct {
SystemInstructions *GeminiChatContent `json:"systemInstruction,omitempty"`
}
func (r *GeminiChatRequest) GetTokenCountMeta() *types.TokenCountMeta {
var files []*types.FileMeta = make([]*types.FileMeta, 0)
var maxTokens int
if r.GenerationConfig.MaxOutputTokens > 0 {
maxTokens = int(r.GenerationConfig.MaxOutputTokens)
}
var inputTexts []string
for _, content := range r.Contents {
for _, part := range content.Parts {
if part.Text != "" {
inputTexts = append(inputTexts, part.Text)
}
if part.InlineData != nil && part.InlineData.Data != "" {
if strings.HasPrefix(part.InlineData.MimeType, "image/") {
files = append(files, &types.FileMeta{
FileType: types.FileTypeImage,
Data: part.InlineData.Data,
})
} else if strings.HasPrefix(part.InlineData.MimeType, "audio/") {
files = append(files, &types.FileMeta{
FileType: types.FileTypeAudio,
Data: part.InlineData.Data,
})
} else if strings.HasPrefix(part.InlineData.MimeType, "video/") {
files = append(files, &types.FileMeta{
FileType: types.FileTypeVideo,
Data: part.InlineData.Data,
})
} else {
files = append(files, &types.FileMeta{
FileType: types.FileTypeFile,
Data: part.InlineData.Data,
})
}
}
}
}
inputText := strings.Join(inputTexts, "\n")
return &types.TokenCountMeta{
CombineText: inputText,
Files: files,
MaxTokens: maxTokens,
}
}
func (r *GeminiChatRequest) IsStream(c *gin.Context) bool {
if c.Query("alt") == "sse" {
return true
}
return false
}
func (r *GeminiChatRequest) GetTools() []GeminiChatTool {
var tools []GeminiChatTool
if strings.HasSuffix(string(r.Tools), "[") {
// is array
if err := common.Unmarshal(r.Tools, &tools); err != nil {
common.LogError(nil, "error_unmarshalling_tools: "+err.Error())
logger.LogError(nil, "error_unmarshalling_tools: "+err.Error())
return nil
}
} else if strings.HasPrefix(string(r.Tools), "{") {
// is object
singleTool := GeminiChatTool{}
if err := common.Unmarshal(r.Tools, &singleTool); err != nil {
common.LogError(nil, "error_unmarshalling_single_tool: "+err.Error())
logger.LogError(nil, "error_unmarshalling_single_tool: "+err.Error())
return nil
}
tools = []GeminiChatTool{singleTool}
@@ -43,7 +102,7 @@ func (r *GeminiChatRequest) SetTools(tools []GeminiChatTool) {
// Marshal the tools to JSON
data, err := common.Marshal(tools)
if err != nil {
common.LogError(nil, "error_marshalling_tools: "+err.Error())
logger.LogError(nil, "error_marshalling_tools: "+err.Error())
return
}
r.Tools = data